Show More
@@ -1,846 +1,846 b'' | |||
|
1 | 1 | """A simple configuration system. |
|
2 | 2 | |
|
3 | 3 | Inheritance diagram: |
|
4 | 4 | |
|
5 | 5 | .. inheritance-diagram:: IPython.config.loader |
|
6 | 6 | :parts: 3 |
|
7 | 7 | |
|
8 | 8 | Authors |
|
9 | 9 | ------- |
|
10 | 10 | * Brian Granger |
|
11 | 11 | * Fernando Perez |
|
12 | 12 | * Min RK |
|
13 | 13 | """ |
|
14 | 14 | |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | # Copyright (C) 2008-2011 The IPython Development Team |
|
17 | 17 | # |
|
18 | 18 | # Distributed under the terms of the BSD License. The full license is in |
|
19 | 19 | # the file COPYING, distributed as part of this software. |
|
20 | 20 | #----------------------------------------------------------------------------- |
|
21 | 21 | |
|
22 | 22 | #----------------------------------------------------------------------------- |
|
23 | 23 | # Imports |
|
24 | 24 | #----------------------------------------------------------------------------- |
|
25 | 25 | |
|
26 | 26 | import argparse |
|
27 | 27 | import copy |
|
28 | 28 | import logging |
|
29 | 29 | import os |
|
30 | 30 | import re |
|
31 | 31 | import sys |
|
32 | 32 | import json |
|
33 | 33 | |
|
34 | 34 | from IPython.utils.path import filefind, get_ipython_dir |
|
35 | 35 | from IPython.utils import py3compat |
|
36 | 36 | from IPython.utils.encoding import DEFAULT_ENCODING |
|
37 | 37 | from IPython.utils.py3compat import unicode_type, iteritems |
|
38 |
from IPython.utils.traitlets import HasTraits, List, Any |
|
|
38 | from IPython.utils.traitlets import HasTraits, List, Any | |
|
39 | 39 | |
|
40 | 40 | #----------------------------------------------------------------------------- |
|
41 | 41 | # Exceptions |
|
42 | 42 | #----------------------------------------------------------------------------- |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | class ConfigError(Exception): |
|
46 | 46 | pass |
|
47 | 47 | |
|
48 | 48 | class ConfigLoaderError(ConfigError): |
|
49 | 49 | pass |
|
50 | 50 | |
|
51 | 51 | class ConfigFileNotFound(ConfigError): |
|
52 | 52 | pass |
|
53 | 53 | |
|
54 | 54 | class ArgumentError(ConfigLoaderError): |
|
55 | 55 | pass |
|
56 | 56 | |
|
57 | 57 | #----------------------------------------------------------------------------- |
|
58 | 58 | # Argparse fix |
|
59 | 59 | #----------------------------------------------------------------------------- |
|
60 | 60 | |
|
61 | 61 | # Unfortunately argparse by default prints help messages to stderr instead of |
|
62 | 62 | # stdout. This makes it annoying to capture long help screens at the command |
|
63 | 63 | # line, since one must know how to pipe stderr, which many users don't know how |
|
64 | 64 | # to do. So we override the print_help method with one that defaults to |
|
65 | 65 | # stdout and use our class instead. |
|
66 | 66 | |
|
67 | 67 | class ArgumentParser(argparse.ArgumentParser): |
|
68 | 68 | """Simple argparse subclass that prints help to stdout by default.""" |
|
69 | 69 | |
|
70 | 70 | def print_help(self, file=None): |
|
71 | 71 | if file is None: |
|
72 | 72 | file = sys.stdout |
|
73 | 73 | return super(ArgumentParser, self).print_help(file) |
|
74 | 74 | |
|
75 | 75 | print_help.__doc__ = argparse.ArgumentParser.print_help.__doc__ |
|
76 | 76 | |
|
77 | 77 | #----------------------------------------------------------------------------- |
|
78 | 78 | # Config class for holding config information |
|
79 | 79 | #----------------------------------------------------------------------------- |
|
80 | 80 | |
|
81 | 81 | class LazyConfigValue(HasTraits): |
|
82 | 82 | """Proxy object for exposing methods on configurable containers |
|
83 | 83 | |
|
84 | 84 | Exposes: |
|
85 | 85 | |
|
86 | 86 | - append, extend, insert on lists |
|
87 | 87 | - update on dicts |
|
88 | 88 | - update, add on sets |
|
89 | 89 | """ |
|
90 | 90 | |
|
91 | 91 | _value = None |
|
92 | 92 | |
|
93 | 93 | # list methods |
|
94 | 94 | _extend = List() |
|
95 | 95 | _prepend = List() |
|
96 | 96 | |
|
97 | 97 | def append(self, obj): |
|
98 | 98 | self._extend.append(obj) |
|
99 | 99 | |
|
100 | 100 | def extend(self, other): |
|
101 | 101 | self._extend.extend(other) |
|
102 | 102 | |
|
103 | 103 | def prepend(self, other): |
|
104 | 104 | """like list.extend, but for the front""" |
|
105 | 105 | self._prepend[:0] = other |
|
106 | 106 | |
|
107 | 107 | _inserts = List() |
|
108 | 108 | def insert(self, index, other): |
|
109 | 109 | if not isinstance(index, int): |
|
110 | 110 | raise TypeError("An integer is required") |
|
111 | 111 | self._inserts.append((index, other)) |
|
112 | 112 | |
|
113 | 113 | # dict methods |
|
114 | 114 | # update is used for both dict and set |
|
115 | 115 | _update = Any() |
|
116 | 116 | def update(self, other): |
|
117 | 117 | if self._update is None: |
|
118 | 118 | if isinstance(other, dict): |
|
119 | 119 | self._update = {} |
|
120 | 120 | else: |
|
121 | 121 | self._update = set() |
|
122 | 122 | self._update.update(other) |
|
123 | 123 | |
|
124 | 124 | # set methods |
|
125 | 125 | def add(self, obj): |
|
126 | 126 | self.update({obj}) |
|
127 | 127 | |
|
128 | 128 | def get_value(self, initial): |
|
129 | 129 | """construct the value from the initial one |
|
130 | 130 | |
|
131 | 131 | after applying any insert / extend / update changes |
|
132 | 132 | """ |
|
133 | 133 | if self._value is not None: |
|
134 | 134 | return self._value |
|
135 | 135 | value = copy.deepcopy(initial) |
|
136 | 136 | if isinstance(value, list): |
|
137 | 137 | for idx, obj in self._inserts: |
|
138 | 138 | value.insert(idx, obj) |
|
139 | 139 | value[:0] = self._prepend |
|
140 | 140 | value.extend(self._extend) |
|
141 | 141 | |
|
142 | 142 | elif isinstance(value, dict): |
|
143 | 143 | if self._update: |
|
144 | 144 | value.update(self._update) |
|
145 | 145 | elif isinstance(value, set): |
|
146 | 146 | if self._update: |
|
147 | 147 | value.update(self._update) |
|
148 | 148 | self._value = value |
|
149 | 149 | return value |
|
150 | 150 | |
|
151 | 151 | def to_dict(self): |
|
152 | 152 | """return JSONable dict form of my data |
|
153 | 153 | |
|
154 | 154 | Currently update as dict or set, extend, prepend as lists, and inserts as list of tuples. |
|
155 | 155 | """ |
|
156 | 156 | d = {} |
|
157 | 157 | if self._update: |
|
158 | 158 | d['update'] = self._update |
|
159 | 159 | if self._extend: |
|
160 | 160 | d['extend'] = self._extend |
|
161 | 161 | if self._prepend: |
|
162 | 162 | d['prepend'] = self._prepend |
|
163 | 163 | elif self._inserts: |
|
164 | 164 | d['inserts'] = self._inserts |
|
165 | 165 | return d |
|
166 | 166 | |
|
167 | 167 | |
|
168 | 168 | def _is_section_key(key): |
|
169 | 169 | """Is a Config key a section name (does it start with a capital)?""" |
|
170 | 170 | if key and key[0].upper()==key[0] and not key.startswith('_'): |
|
171 | 171 | return True |
|
172 | 172 | else: |
|
173 | 173 | return False |
|
174 | 174 | |
|
175 | 175 | |
|
176 | 176 | class Config(dict): |
|
177 | 177 | """An attribute based dict that can do smart merges.""" |
|
178 | 178 | |
|
179 | 179 | def __init__(self, *args, **kwds): |
|
180 | 180 | dict.__init__(self, *args, **kwds) |
|
181 | 181 | self._ensure_subconfig() |
|
182 | 182 | |
|
183 | 183 | def _ensure_subconfig(self): |
|
184 | 184 | """ensure that sub-dicts that should be Config objects are |
|
185 | 185 | |
|
186 | 186 | casts dicts that are under section keys to Config objects, |
|
187 | 187 | which is necessary for constructing Config objects from dict literals. |
|
188 | 188 | """ |
|
189 | 189 | for key in self: |
|
190 | 190 | obj = self[key] |
|
191 | 191 | if _is_section_key(key) \ |
|
192 | 192 | and isinstance(obj, dict) \ |
|
193 | 193 | and not isinstance(obj, Config): |
|
194 | 194 | setattr(self, key, Config(obj)) |
|
195 | 195 | |
|
196 | 196 | def _merge(self, other): |
|
197 | 197 | """deprecated alias, use Config.merge()""" |
|
198 | 198 | self.merge(other) |
|
199 | 199 | |
|
200 | 200 | def merge(self, other): |
|
201 | 201 | """merge another config object into this one""" |
|
202 | 202 | to_update = {} |
|
203 | 203 | for k, v in iteritems(other): |
|
204 | 204 | if k not in self: |
|
205 | 205 | to_update[k] = copy.deepcopy(v) |
|
206 | 206 | else: # I have this key |
|
207 | 207 | if isinstance(v, Config) and isinstance(self[k], Config): |
|
208 | 208 | # Recursively merge common sub Configs |
|
209 | 209 | self[k].merge(v) |
|
210 | 210 | else: |
|
211 | 211 | # Plain updates for non-Configs |
|
212 | 212 | to_update[k] = copy.deepcopy(v) |
|
213 | 213 | |
|
214 | 214 | self.update(to_update) |
|
215 | 215 | |
|
216 | 216 | def __contains__(self, key): |
|
217 | 217 | # allow nested contains of the form `"Section.key" in config` |
|
218 | 218 | if '.' in key: |
|
219 | 219 | first, remainder = key.split('.', 1) |
|
220 | 220 | if first not in self: |
|
221 | 221 | return False |
|
222 | 222 | return remainder in self[first] |
|
223 | 223 | |
|
224 | 224 | return super(Config, self).__contains__(key) |
|
225 | 225 | |
|
226 | 226 | # .has_key is deprecated for dictionaries. |
|
227 | 227 | has_key = __contains__ |
|
228 | 228 | |
|
229 | 229 | def _has_section(self, key): |
|
230 | 230 | return _is_section_key(key) and key in self |
|
231 | 231 | |
|
232 | 232 | def copy(self): |
|
233 | 233 | return type(self)(dict.copy(self)) |
|
234 | 234 | |
|
235 | 235 | def __copy__(self): |
|
236 | 236 | return self.copy() |
|
237 | 237 | |
|
238 | 238 | def __deepcopy__(self, memo): |
|
239 | 239 | import copy |
|
240 | 240 | return type(self)(copy.deepcopy(list(self.items()))) |
|
241 | 241 | |
|
242 | 242 | def __getitem__(self, key): |
|
243 | 243 | try: |
|
244 | 244 | return dict.__getitem__(self, key) |
|
245 | 245 | except KeyError: |
|
246 | 246 | if _is_section_key(key): |
|
247 | 247 | c = Config() |
|
248 | 248 | dict.__setitem__(self, key, c) |
|
249 | 249 | return c |
|
250 | 250 | elif not key.startswith('_'): |
|
251 | 251 | # undefined, create lazy value, used for container methods |
|
252 | 252 | v = LazyConfigValue() |
|
253 | 253 | dict.__setitem__(self, key, v) |
|
254 | 254 | return v |
|
255 | 255 | else: |
|
256 | 256 | raise KeyError |
|
257 | 257 | |
|
258 | 258 | def __setitem__(self, key, value): |
|
259 | 259 | if _is_section_key(key): |
|
260 | 260 | if not isinstance(value, Config): |
|
261 | 261 | raise ValueError('values whose keys begin with an uppercase ' |
|
262 | 262 | 'char must be Config instances: %r, %r' % (key, value)) |
|
263 | 263 | dict.__setitem__(self, key, value) |
|
264 | 264 | |
|
265 | 265 | def __getattr__(self, key): |
|
266 | 266 | if key.startswith('__'): |
|
267 | 267 | return dict.__getattr__(self, key) |
|
268 | 268 | try: |
|
269 | 269 | return self.__getitem__(key) |
|
270 | 270 | except KeyError as e: |
|
271 | 271 | raise AttributeError(e) |
|
272 | 272 | |
|
273 | 273 | def __setattr__(self, key, value): |
|
274 | 274 | if key.startswith('__'): |
|
275 | 275 | return dict.__setattr__(self, key, value) |
|
276 | 276 | try: |
|
277 | 277 | self.__setitem__(key, value) |
|
278 | 278 | except KeyError as e: |
|
279 | 279 | raise AttributeError(e) |
|
280 | 280 | |
|
281 | 281 | def __delattr__(self, key): |
|
282 | 282 | if key.startswith('__'): |
|
283 | 283 | return dict.__delattr__(self, key) |
|
284 | 284 | try: |
|
285 | 285 | dict.__delitem__(self, key) |
|
286 | 286 | except KeyError as e: |
|
287 | 287 | raise AttributeError(e) |
|
288 | 288 | |
|
289 | 289 | |
|
290 | 290 | #----------------------------------------------------------------------------- |
|
291 | 291 | # Config loading classes |
|
292 | 292 | #----------------------------------------------------------------------------- |
|
293 | 293 | |
|
294 | 294 | |
|
295 | 295 | class ConfigLoader(object): |
|
296 | 296 | """A object for loading configurations from just about anywhere. |
|
297 | 297 | |
|
298 | 298 | The resulting configuration is packaged as a :class:`Config`. |
|
299 | 299 | |
|
300 | 300 | Notes |
|
301 | 301 | ----- |
|
302 | 302 | A :class:`ConfigLoader` does one thing: load a config from a source |
|
303 | 303 | (file, command line arguments) and returns the data as a :class:`Config` object. |
|
304 | 304 | There are lots of things that :class:`ConfigLoader` does not do. It does |
|
305 | 305 | not implement complex logic for finding config files. It does not handle |
|
306 | 306 | default values or merge multiple configs. These things need to be |
|
307 | 307 | handled elsewhere. |
|
308 | 308 | """ |
|
309 | 309 | |
|
310 | 310 | def _log_default(self): |
|
311 | 311 | from IPython.config.application import Application |
|
312 | 312 | if Application.initialized(): |
|
313 | 313 | return Application.instance().log |
|
314 | 314 | else: |
|
315 | 315 | return logging.getLogger() |
|
316 | 316 | |
|
317 | 317 | def __init__(self, log=None): |
|
318 | 318 | """A base class for config loaders. |
|
319 | 319 | |
|
320 | 320 | log : instance of :class:`logging.Logger` to use. |
|
321 | 321 | By default loger of :meth:`IPython.config.application.Application.instance()` |
|
322 | 322 | will be used |
|
323 | 323 | |
|
324 | 324 | Examples |
|
325 | 325 | -------- |
|
326 | 326 | |
|
327 | 327 | >>> cl = ConfigLoader() |
|
328 | 328 | >>> config = cl.load_config() |
|
329 | 329 | >>> config |
|
330 | 330 | {} |
|
331 | 331 | """ |
|
332 | 332 | self.clear() |
|
333 | 333 | if log is None: |
|
334 | 334 | self.log = self._log_default() |
|
335 | 335 | self.log.debug('Using default logger') |
|
336 | 336 | else: |
|
337 | 337 | self.log = log |
|
338 | 338 | |
|
339 | 339 | def clear(self): |
|
340 | 340 | self.config = Config() |
|
341 | 341 | |
|
342 | 342 | def load_config(self): |
|
343 | 343 | """Load a config from somewhere, return a :class:`Config` instance. |
|
344 | 344 | |
|
345 | 345 | Usually, this will cause self.config to be set and then returned. |
|
346 | 346 | However, in most cases, :meth:`ConfigLoader.clear` should be called |
|
347 | 347 | to erase any previous state. |
|
348 | 348 | """ |
|
349 | 349 | self.clear() |
|
350 | 350 | return self.config |
|
351 | 351 | |
|
352 | 352 | |
|
353 | 353 | class FileConfigLoader(ConfigLoader): |
|
354 | 354 | """A base class for file based configurations. |
|
355 | 355 | |
|
356 | 356 | As we add more file based config loaders, the common logic should go |
|
357 | 357 | here. |
|
358 | 358 | """ |
|
359 | 359 | |
|
360 | 360 | def __init__(self, filename, path=None, **kw): |
|
361 | 361 | """Build a config loader for a filename and path. |
|
362 | 362 | |
|
363 | 363 | Parameters |
|
364 | 364 | ---------- |
|
365 | 365 | filename : str |
|
366 | 366 | The file name of the config file. |
|
367 | 367 | path : str, list, tuple |
|
368 | 368 | The path to search for the config file on, or a sequence of |
|
369 | 369 | paths to try in order. |
|
370 | 370 | """ |
|
371 | 371 | super(FileConfigLoader, self).__init__(**kw) |
|
372 | 372 | self.filename = filename |
|
373 | 373 | self.path = path |
|
374 | 374 | self.full_filename = '' |
|
375 | 375 | |
|
376 | 376 | def _find_file(self): |
|
377 | 377 | """Try to find the file by searching the paths.""" |
|
378 | 378 | self.full_filename = filefind(self.filename, self.path) |
|
379 | 379 | |
|
380 | 380 | class JSONFileConfigLoader(FileConfigLoader): |
|
381 | 381 | """A Json file loader for config""" |
|
382 | 382 | |
|
383 | 383 | def load_config(self): |
|
384 | 384 | """Load the config from a file and return it as a Config object.""" |
|
385 | 385 | self.clear() |
|
386 | 386 | try: |
|
387 | 387 | self._find_file() |
|
388 | 388 | except IOError as e: |
|
389 | 389 | raise ConfigFileNotFound(str(e)) |
|
390 | 390 | dct = self._read_file_as_dict() |
|
391 | 391 | self.config = self._convert_to_config(dct) |
|
392 | 392 | return self.config |
|
393 | 393 | |
|
394 | 394 | def _read_file_as_dict(self): |
|
395 | 395 | with open(self.full_filename) as f: |
|
396 | 396 | return json.load(f) |
|
397 | 397 | |
|
398 | 398 | def _convert_to_config(self, dictionary): |
|
399 | 399 | if 'version' in dictionary: |
|
400 | 400 | version = dictionary.pop('version') |
|
401 | 401 | else: |
|
402 | 402 | version = 1 |
|
403 | 403 | self.log.warn("Unrecognized JSON config file version, assuming version {}".format(version)) |
|
404 | 404 | |
|
405 | 405 | if version == 1: |
|
406 | 406 | return Config(dictionary) |
|
407 | 407 | else: |
|
408 | 408 | raise ValueError('Unknown version of JSON config file: {version}'.format(version=version)) |
|
409 | 409 | |
|
410 | 410 | |
|
411 | 411 | class PyFileConfigLoader(FileConfigLoader): |
|
412 | 412 | """A config loader for pure python files. |
|
413 | 413 | |
|
414 | 414 | This is responsible for locating a Python config file by filename and |
|
415 | 415 | path, then executing it to construct a Config object. |
|
416 | 416 | """ |
|
417 | 417 | |
|
418 | 418 | def load_config(self): |
|
419 | 419 | """Load the config from a file and return it as a Config object.""" |
|
420 | 420 | self.clear() |
|
421 | 421 | try: |
|
422 | 422 | self._find_file() |
|
423 | 423 | except IOError as e: |
|
424 | 424 | raise ConfigFileNotFound(str(e)) |
|
425 | 425 | self._read_file_as_dict() |
|
426 | 426 | return self.config |
|
427 | 427 | |
|
428 | 428 | |
|
429 | 429 | def _read_file_as_dict(self): |
|
430 | 430 | """Load the config file into self.config, with recursive loading.""" |
|
431 | 431 | # This closure is made available in the namespace that is used |
|
432 | 432 | # to exec the config file. It allows users to call |
|
433 | 433 | # load_subconfig('myconfig.py') to load config files recursively. |
|
434 | 434 | # It needs to be a closure because it has references to self.path |
|
435 | 435 | # and self.config. The sub-config is loaded with the same path |
|
436 | 436 | # as the parent, but it uses an empty config which is then merged |
|
437 | 437 | # with the parents. |
|
438 | 438 | |
|
439 | 439 | # If a profile is specified, the config file will be loaded |
|
440 | 440 | # from that profile |
|
441 | 441 | |
|
442 | 442 | def load_subconfig(fname, profile=None): |
|
443 | 443 | # import here to prevent circular imports |
|
444 | 444 | from IPython.core.profiledir import ProfileDir, ProfileDirError |
|
445 | 445 | if profile is not None: |
|
446 | 446 | try: |
|
447 | 447 | profile_dir = ProfileDir.find_profile_dir_by_name( |
|
448 | 448 | get_ipython_dir(), |
|
449 | 449 | profile, |
|
450 | 450 | ) |
|
451 | 451 | except ProfileDirError: |
|
452 | 452 | return |
|
453 | 453 | path = profile_dir.location |
|
454 | 454 | else: |
|
455 | 455 | path = self.path |
|
456 | 456 | loader = PyFileConfigLoader(fname, path) |
|
457 | 457 | try: |
|
458 | 458 | sub_config = loader.load_config() |
|
459 | 459 | except ConfigFileNotFound: |
|
460 | 460 | # Pass silently if the sub config is not there. This happens |
|
461 | 461 | # when a user s using a profile, but not the default config. |
|
462 | 462 | pass |
|
463 | 463 | else: |
|
464 | 464 | self.config.merge(sub_config) |
|
465 | 465 | |
|
466 | 466 | # Again, this needs to be a closure and should be used in config |
|
467 | 467 | # files to get the config being loaded. |
|
468 | 468 | def get_config(): |
|
469 | 469 | return self.config |
|
470 | 470 | |
|
471 | 471 | namespace = dict( |
|
472 | 472 | load_subconfig=load_subconfig, |
|
473 | 473 | get_config=get_config, |
|
474 | 474 | __file__=self.full_filename, |
|
475 | 475 | ) |
|
476 | 476 | fs_encoding = sys.getfilesystemencoding() or 'ascii' |
|
477 | 477 | conf_filename = self.full_filename.encode(fs_encoding) |
|
478 | 478 | py3compat.execfile(conf_filename, namespace) |
|
479 | 479 | |
|
480 | 480 | |
|
481 | 481 | class CommandLineConfigLoader(ConfigLoader): |
|
482 | 482 | """A config loader for command line arguments. |
|
483 | 483 | |
|
484 | 484 | As we add more command line based loaders, the common logic should go |
|
485 | 485 | here. |
|
486 | 486 | """ |
|
487 | 487 | |
|
488 | 488 | def _exec_config_str(self, lhs, rhs): |
|
489 | 489 | """execute self.config.<lhs> = <rhs> |
|
490 | 490 | |
|
491 | 491 | * expands ~ with expanduser |
|
492 | 492 | * tries to assign with raw eval, otherwise assigns with just the string, |
|
493 | 493 | allowing `--C.a=foobar` and `--C.a="foobar"` to be equivalent. *Not* |
|
494 | 494 | equivalent are `--C.a=4` and `--C.a='4'`. |
|
495 | 495 | """ |
|
496 | 496 | rhs = os.path.expanduser(rhs) |
|
497 | 497 | try: |
|
498 | 498 | # Try to see if regular Python syntax will work. This |
|
499 | 499 | # won't handle strings as the quote marks are removed |
|
500 | 500 | # by the system shell. |
|
501 | 501 | value = eval(rhs) |
|
502 | 502 | except (NameError, SyntaxError): |
|
503 | 503 | # This case happens if the rhs is a string. |
|
504 | 504 | value = rhs |
|
505 | 505 | |
|
506 | 506 | exec(u'self.config.%s = value' % lhs) |
|
507 | 507 | |
|
508 | 508 | def _load_flag(self, cfg): |
|
509 | 509 | """update self.config from a flag, which can be a dict or Config""" |
|
510 | 510 | if isinstance(cfg, (dict, Config)): |
|
511 | 511 | # don't clobber whole config sections, update |
|
512 | 512 | # each section from config: |
|
513 | 513 | for sec,c in iteritems(cfg): |
|
514 | 514 | self.config[sec].update(c) |
|
515 | 515 | else: |
|
516 | 516 | raise TypeError("Invalid flag: %r" % cfg) |
|
517 | 517 | |
|
518 | 518 | # raw --identifier=value pattern |
|
519 | 519 | # but *also* accept '-' as wordsep, for aliases |
|
520 | 520 | # accepts: --foo=a |
|
521 | 521 | # --Class.trait=value |
|
522 | 522 | # --alias-name=value |
|
523 | 523 | # rejects: -foo=value |
|
524 | 524 | # --foo |
|
525 | 525 | # --Class.trait |
|
526 | 526 | kv_pattern = re.compile(r'\-\-[A-Za-z][\w\-]*(\.[\w\-]+)*\=.*') |
|
527 | 527 | |
|
528 | 528 | # just flags, no assignments, with two *or one* leading '-' |
|
529 | 529 | # accepts: --foo |
|
530 | 530 | # -foo-bar-again |
|
531 | 531 | # rejects: --anything=anything |
|
532 | 532 | # --two.word |
|
533 | 533 | |
|
534 | 534 | flag_pattern = re.compile(r'\-\-?\w+[\-\w]*$') |
|
535 | 535 | |
|
536 | 536 | class KeyValueConfigLoader(CommandLineConfigLoader): |
|
537 | 537 | """A config loader that loads key value pairs from the command line. |
|
538 | 538 | |
|
539 | 539 | This allows command line options to be gives in the following form:: |
|
540 | 540 | |
|
541 | 541 | ipython --profile="foo" --InteractiveShell.autocall=False |
|
542 | 542 | """ |
|
543 | 543 | |
|
544 | 544 | def __init__(self, argv=None, aliases=None, flags=None, **kw): |
|
545 | 545 | """Create a key value pair config loader. |
|
546 | 546 | |
|
547 | 547 | Parameters |
|
548 | 548 | ---------- |
|
549 | 549 | argv : list |
|
550 | 550 | A list that has the form of sys.argv[1:] which has unicode |
|
551 | 551 | elements of the form u"key=value". If this is None (default), |
|
552 | 552 | then sys.argv[1:] will be used. |
|
553 | 553 | aliases : dict |
|
554 | 554 | A dict of aliases for configurable traits. |
|
555 | 555 | Keys are the short aliases, Values are the resolved trait. |
|
556 | 556 | Of the form: `{'alias' : 'Configurable.trait'}` |
|
557 | 557 | flags : dict |
|
558 | 558 | A dict of flags, keyed by str name. Vaues can be Config objects, |
|
559 | 559 | dicts, or "key=value" strings. If Config or dict, when the flag |
|
560 | 560 | is triggered, The flag is loaded as `self.config.update(m)`. |
|
561 | 561 | |
|
562 | 562 | Returns |
|
563 | 563 | ------- |
|
564 | 564 | config : Config |
|
565 | 565 | The resulting Config object. |
|
566 | 566 | |
|
567 | 567 | Examples |
|
568 | 568 | -------- |
|
569 | 569 | |
|
570 | 570 | >>> from IPython.config.loader import KeyValueConfigLoader |
|
571 | 571 | >>> cl = KeyValueConfigLoader() |
|
572 | 572 | >>> d = cl.load_config(["--A.name='brian'","--B.number=0"]) |
|
573 | 573 | >>> sorted(d.items()) |
|
574 | 574 | [('A', {'name': 'brian'}), ('B', {'number': 0})] |
|
575 | 575 | """ |
|
576 | 576 | super(KeyValueConfigLoader, self).__init__(**kw) |
|
577 | 577 | if argv is None: |
|
578 | 578 | argv = sys.argv[1:] |
|
579 | 579 | self.argv = argv |
|
580 | 580 | self.aliases = aliases or {} |
|
581 | 581 | self.flags = flags or {} |
|
582 | 582 | |
|
583 | 583 | |
|
584 | 584 | def clear(self): |
|
585 | 585 | super(KeyValueConfigLoader, self).clear() |
|
586 | 586 | self.extra_args = [] |
|
587 | 587 | |
|
588 | 588 | |
|
589 | 589 | def _decode_argv(self, argv, enc=None): |
|
590 | 590 | """decode argv if bytes, using stin.encoding, falling back on default enc""" |
|
591 | 591 | uargv = [] |
|
592 | 592 | if enc is None: |
|
593 | 593 | enc = DEFAULT_ENCODING |
|
594 | 594 | for arg in argv: |
|
595 | 595 | if not isinstance(arg, unicode_type): |
|
596 | 596 | # only decode if not already decoded |
|
597 | 597 | arg = arg.decode(enc) |
|
598 | 598 | uargv.append(arg) |
|
599 | 599 | return uargv |
|
600 | 600 | |
|
601 | 601 | |
|
602 | 602 | def load_config(self, argv=None, aliases=None, flags=None): |
|
603 | 603 | """Parse the configuration and generate the Config object. |
|
604 | 604 | |
|
605 | 605 | After loading, any arguments that are not key-value or |
|
606 | 606 | flags will be stored in self.extra_args - a list of |
|
607 | 607 | unparsed command-line arguments. This is used for |
|
608 | 608 | arguments such as input files or subcommands. |
|
609 | 609 | |
|
610 | 610 | Parameters |
|
611 | 611 | ---------- |
|
612 | 612 | argv : list, optional |
|
613 | 613 | A list that has the form of sys.argv[1:] which has unicode |
|
614 | 614 | elements of the form u"key=value". If this is None (default), |
|
615 | 615 | then self.argv will be used. |
|
616 | 616 | aliases : dict |
|
617 | 617 | A dict of aliases for configurable traits. |
|
618 | 618 | Keys are the short aliases, Values are the resolved trait. |
|
619 | 619 | Of the form: `{'alias' : 'Configurable.trait'}` |
|
620 | 620 | flags : dict |
|
621 | 621 | A dict of flags, keyed by str name. Values can be Config objects |
|
622 | 622 | or dicts. When the flag is triggered, The config is loaded as |
|
623 | 623 | `self.config.update(cfg)`. |
|
624 | 624 | """ |
|
625 | 625 | self.clear() |
|
626 | 626 | if argv is None: |
|
627 | 627 | argv = self.argv |
|
628 | 628 | if aliases is None: |
|
629 | 629 | aliases = self.aliases |
|
630 | 630 | if flags is None: |
|
631 | 631 | flags = self.flags |
|
632 | 632 | |
|
633 | 633 | # ensure argv is a list of unicode strings: |
|
634 | 634 | uargv = self._decode_argv(argv) |
|
635 | 635 | for idx,raw in enumerate(uargv): |
|
636 | 636 | # strip leading '-' |
|
637 | 637 | item = raw.lstrip('-') |
|
638 | 638 | |
|
639 | 639 | if raw == '--': |
|
640 | 640 | # don't parse arguments after '--' |
|
641 | 641 | # this is useful for relaying arguments to scripts, e.g. |
|
642 | 642 | # ipython -i foo.py --matplotlib=qt -- args after '--' go-to-foo.py |
|
643 | 643 | self.extra_args.extend(uargv[idx+1:]) |
|
644 | 644 | break |
|
645 | 645 | |
|
646 | 646 | if kv_pattern.match(raw): |
|
647 | 647 | lhs,rhs = item.split('=',1) |
|
648 | 648 | # Substitute longnames for aliases. |
|
649 | 649 | if lhs in aliases: |
|
650 | 650 | lhs = aliases[lhs] |
|
651 | 651 | if '.' not in lhs: |
|
652 | 652 | # probably a mistyped alias, but not technically illegal |
|
653 | 653 | self.log.warn("Unrecognized alias: '%s', it will probably have no effect.", raw) |
|
654 | 654 | try: |
|
655 | 655 | self._exec_config_str(lhs, rhs) |
|
656 | 656 | except Exception: |
|
657 | 657 | raise ArgumentError("Invalid argument: '%s'" % raw) |
|
658 | 658 | |
|
659 | 659 | elif flag_pattern.match(raw): |
|
660 | 660 | if item in flags: |
|
661 | 661 | cfg,help = flags[item] |
|
662 | 662 | self._load_flag(cfg) |
|
663 | 663 | else: |
|
664 | 664 | raise ArgumentError("Unrecognized flag: '%s'"%raw) |
|
665 | 665 | elif raw.startswith('-'): |
|
666 | 666 | kv = '--'+item |
|
667 | 667 | if kv_pattern.match(kv): |
|
668 | 668 | raise ArgumentError("Invalid argument: '%s', did you mean '%s'?"%(raw, kv)) |
|
669 | 669 | else: |
|
670 | 670 | raise ArgumentError("Invalid argument: '%s'"%raw) |
|
671 | 671 | else: |
|
672 | 672 | # keep all args that aren't valid in a list, |
|
673 | 673 | # in case our parent knows what to do with them. |
|
674 | 674 | self.extra_args.append(item) |
|
675 | 675 | return self.config |
|
676 | 676 | |
|
677 | 677 | class ArgParseConfigLoader(CommandLineConfigLoader): |
|
678 | 678 | """A loader that uses the argparse module to load from the command line.""" |
|
679 | 679 | |
|
680 | 680 | def __init__(self, argv=None, aliases=None, flags=None, log=None, *parser_args, **parser_kw): |
|
681 | 681 | """Create a config loader for use with argparse. |
|
682 | 682 | |
|
683 | 683 | Parameters |
|
684 | 684 | ---------- |
|
685 | 685 | |
|
686 | 686 | argv : optional, list |
|
687 | 687 | If given, used to read command-line arguments from, otherwise |
|
688 | 688 | sys.argv[1:] is used. |
|
689 | 689 | |
|
690 | 690 | parser_args : tuple |
|
691 | 691 | A tuple of positional arguments that will be passed to the |
|
692 | 692 | constructor of :class:`argparse.ArgumentParser`. |
|
693 | 693 | |
|
694 | 694 | parser_kw : dict |
|
695 | 695 | A tuple of keyword arguments that will be passed to the |
|
696 | 696 | constructor of :class:`argparse.ArgumentParser`. |
|
697 | 697 | |
|
698 | 698 | Returns |
|
699 | 699 | ------- |
|
700 | 700 | config : Config |
|
701 | 701 | The resulting Config object. |
|
702 | 702 | """ |
|
703 | 703 | super(CommandLineConfigLoader, self).__init__(log=log) |
|
704 | 704 | self.clear() |
|
705 | 705 | if argv is None: |
|
706 | 706 | argv = sys.argv[1:] |
|
707 | 707 | self.argv = argv |
|
708 | 708 | self.aliases = aliases or {} |
|
709 | 709 | self.flags = flags or {} |
|
710 | 710 | |
|
711 | 711 | self.parser_args = parser_args |
|
712 | 712 | self.version = parser_kw.pop("version", None) |
|
713 | 713 | kwargs = dict(argument_default=argparse.SUPPRESS) |
|
714 | 714 | kwargs.update(parser_kw) |
|
715 | 715 | self.parser_kw = kwargs |
|
716 | 716 | |
|
717 | 717 | def load_config(self, argv=None, aliases=None, flags=None): |
|
718 | 718 | """Parse command line arguments and return as a Config object. |
|
719 | 719 | |
|
720 | 720 | Parameters |
|
721 | 721 | ---------- |
|
722 | 722 | |
|
723 | 723 | args : optional, list |
|
724 | 724 | If given, a list with the structure of sys.argv[1:] to parse |
|
725 | 725 | arguments from. If not given, the instance's self.argv attribute |
|
726 | 726 | (given at construction time) is used.""" |
|
727 | 727 | self.clear() |
|
728 | 728 | if argv is None: |
|
729 | 729 | argv = self.argv |
|
730 | 730 | if aliases is None: |
|
731 | 731 | aliases = self.aliases |
|
732 | 732 | if flags is None: |
|
733 | 733 | flags = self.flags |
|
734 | 734 | self._create_parser(aliases, flags) |
|
735 | 735 | self._parse_args(argv) |
|
736 | 736 | self._convert_to_config() |
|
737 | 737 | return self.config |
|
738 | 738 | |
|
739 | 739 | def get_extra_args(self): |
|
740 | 740 | if hasattr(self, 'extra_args'): |
|
741 | 741 | return self.extra_args |
|
742 | 742 | else: |
|
743 | 743 | return [] |
|
744 | 744 | |
|
745 | 745 | def _create_parser(self, aliases=None, flags=None): |
|
746 | 746 | self.parser = ArgumentParser(*self.parser_args, **self.parser_kw) |
|
747 | 747 | self._add_arguments(aliases, flags) |
|
748 | 748 | |
|
749 | 749 | def _add_arguments(self, aliases=None, flags=None): |
|
750 | 750 | raise NotImplementedError("subclasses must implement _add_arguments") |
|
751 | 751 | |
|
752 | 752 | def _parse_args(self, args): |
|
753 | 753 | """self.parser->self.parsed_data""" |
|
754 | 754 | # decode sys.argv to support unicode command-line options |
|
755 | 755 | enc = DEFAULT_ENCODING |
|
756 | 756 | uargs = [py3compat.cast_unicode(a, enc) for a in args] |
|
757 | 757 | self.parsed_data, self.extra_args = self.parser.parse_known_args(uargs) |
|
758 | 758 | |
|
759 | 759 | def _convert_to_config(self): |
|
760 | 760 | """self.parsed_data->self.config""" |
|
761 | 761 | for k, v in iteritems(vars(self.parsed_data)): |
|
762 | 762 | exec("self.config.%s = v"%k, locals(), globals()) |
|
763 | 763 | |
|
764 | 764 | class KVArgParseConfigLoader(ArgParseConfigLoader): |
|
765 | 765 | """A config loader that loads aliases and flags with argparse, |
|
766 | 766 | but will use KVLoader for the rest. This allows better parsing |
|
767 | 767 | of common args, such as `ipython -c 'print 5'`, but still gets |
|
768 | 768 | arbitrary config with `ipython --InteractiveShell.use_readline=False`""" |
|
769 | 769 | |
|
770 | 770 | def _add_arguments(self, aliases=None, flags=None): |
|
771 | 771 | self.alias_flags = {} |
|
772 | 772 | # print aliases, flags |
|
773 | 773 | if aliases is None: |
|
774 | 774 | aliases = self.aliases |
|
775 | 775 | if flags is None: |
|
776 | 776 | flags = self.flags |
|
777 | 777 | paa = self.parser.add_argument |
|
778 | 778 | for key,value in iteritems(aliases): |
|
779 | 779 | if key in flags: |
|
780 | 780 | # flags |
|
781 | 781 | nargs = '?' |
|
782 | 782 | else: |
|
783 | 783 | nargs = None |
|
784 | 784 | if len(key) is 1: |
|
785 | 785 | paa('-'+key, '--'+key, type=unicode_type, dest=value, nargs=nargs) |
|
786 | 786 | else: |
|
787 | 787 | paa('--'+key, type=unicode_type, dest=value, nargs=nargs) |
|
788 | 788 | for key, (value, help) in iteritems(flags): |
|
789 | 789 | if key in self.aliases: |
|
790 | 790 | # |
|
791 | 791 | self.alias_flags[self.aliases[key]] = value |
|
792 | 792 | continue |
|
793 | 793 | if len(key) is 1: |
|
794 | 794 | paa('-'+key, '--'+key, action='append_const', dest='_flags', const=value) |
|
795 | 795 | else: |
|
796 | 796 | paa('--'+key, action='append_const', dest='_flags', const=value) |
|
797 | 797 | |
|
798 | 798 | def _convert_to_config(self): |
|
799 | 799 | """self.parsed_data->self.config, parse unrecognized extra args via KVLoader.""" |
|
800 | 800 | # remove subconfigs list from namespace before transforming the Namespace |
|
801 | 801 | if '_flags' in self.parsed_data: |
|
802 | 802 | subcs = self.parsed_data._flags |
|
803 | 803 | del self.parsed_data._flags |
|
804 | 804 | else: |
|
805 | 805 | subcs = [] |
|
806 | 806 | |
|
807 | 807 | for k, v in iteritems(vars(self.parsed_data)): |
|
808 | 808 | if v is None: |
|
809 | 809 | # it was a flag that shares the name of an alias |
|
810 | 810 | subcs.append(self.alias_flags[k]) |
|
811 | 811 | else: |
|
812 | 812 | # eval the KV assignment |
|
813 | 813 | self._exec_config_str(k, v) |
|
814 | 814 | |
|
815 | 815 | for subc in subcs: |
|
816 | 816 | self._load_flag(subc) |
|
817 | 817 | |
|
818 | 818 | if self.extra_args: |
|
819 | 819 | sub_parser = KeyValueConfigLoader(log=self.log) |
|
820 | 820 | sub_parser.load_config(self.extra_args) |
|
821 | 821 | self.config.merge(sub_parser.config) |
|
822 | 822 | self.extra_args = sub_parser.extra_args |
|
823 | 823 | |
|
824 | 824 | |
|
825 | 825 | def load_pyconfig_files(config_files, path): |
|
826 | 826 | """Load multiple Python config files, merging each of them in turn. |
|
827 | 827 | |
|
828 | 828 | Parameters |
|
829 | 829 | ========== |
|
830 | 830 | config_files : list of str |
|
831 | 831 | List of config files names to load and merge into the config. |
|
832 | 832 | path : unicode |
|
833 | 833 | The full path to the location of the config files. |
|
834 | 834 | """ |
|
835 | 835 | config = Config() |
|
836 | 836 | for cf in config_files: |
|
837 | 837 | loader = PyFileConfigLoader(cf, path=path) |
|
838 | 838 | try: |
|
839 | 839 | next_config = loader.load_config() |
|
840 | 840 | except ConfigFileNotFound: |
|
841 | 841 | pass |
|
842 | 842 | except: |
|
843 | 843 | raise |
|
844 | 844 | else: |
|
845 | 845 | config.merge(next_config) |
|
846 | 846 | return config |
@@ -1,390 +1,389 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """ |
|
3 | 3 | An application for IPython. |
|
4 | 4 | |
|
5 | 5 | All top-level applications should use the classes in this module for |
|
6 | 6 | handling configuration and creating configurables. |
|
7 | 7 | |
|
8 | 8 | The job of an :class:`Application` is to create the master configuration |
|
9 | 9 | object and then create the configurable objects, passing the config to them. |
|
10 | 10 | |
|
11 | 11 | Authors: |
|
12 | 12 | |
|
13 | 13 | * Brian Granger |
|
14 | 14 | * Fernando Perez |
|
15 | 15 | * Min RK |
|
16 | 16 | |
|
17 | 17 | """ |
|
18 | 18 | |
|
19 | 19 | #----------------------------------------------------------------------------- |
|
20 | 20 | # Copyright (C) 2008 The IPython Development Team |
|
21 | 21 | # |
|
22 | 22 | # Distributed under the terms of the BSD License. The full license is in |
|
23 | 23 | # the file COPYING, distributed as part of this software. |
|
24 | 24 | #----------------------------------------------------------------------------- |
|
25 | 25 | |
|
26 | 26 | #----------------------------------------------------------------------------- |
|
27 | 27 | # Imports |
|
28 | 28 | #----------------------------------------------------------------------------- |
|
29 | 29 | |
|
30 | 30 | import atexit |
|
31 | import errno | |
|
32 | 31 | import glob |
|
33 | 32 | import logging |
|
34 | 33 | import os |
|
35 | 34 | import shutil |
|
36 | 35 | import sys |
|
37 | 36 | |
|
38 | 37 | from IPython.config.application import Application, catch_config_error |
|
39 | 38 | from IPython.config.loader import ConfigFileNotFound |
|
40 | 39 | from IPython.core import release, crashhandler |
|
41 | 40 | from IPython.core.profiledir import ProfileDir, ProfileDirError |
|
42 | 41 | from IPython.utils.path import get_ipython_dir, get_ipython_package_dir, ensure_dir_exists |
|
43 | 42 | from IPython.utils import py3compat |
|
44 | 43 | from IPython.utils.traitlets import List, Unicode, Type, Bool, Dict, Set, Instance |
|
45 | 44 | |
|
46 | 45 | #----------------------------------------------------------------------------- |
|
47 | 46 | # Classes and functions |
|
48 | 47 | #----------------------------------------------------------------------------- |
|
49 | 48 | |
|
50 | 49 | |
|
51 | 50 | #----------------------------------------------------------------------------- |
|
52 | 51 | # Base Application Class |
|
53 | 52 | #----------------------------------------------------------------------------- |
|
54 | 53 | |
|
55 | 54 | # aliases and flags |
|
56 | 55 | |
|
57 | 56 | base_aliases = { |
|
58 | 57 | 'profile-dir' : 'ProfileDir.location', |
|
59 | 58 | 'profile' : 'BaseIPythonApplication.profile', |
|
60 | 59 | 'ipython-dir' : 'BaseIPythonApplication.ipython_dir', |
|
61 | 60 | 'log-level' : 'Application.log_level', |
|
62 | 61 | 'config' : 'BaseIPythonApplication.extra_config_file', |
|
63 | 62 | } |
|
64 | 63 | |
|
65 | 64 | base_flags = dict( |
|
66 | 65 | debug = ({'Application' : {'log_level' : logging.DEBUG}}, |
|
67 | 66 | "set log level to logging.DEBUG (maximize logging output)"), |
|
68 | 67 | quiet = ({'Application' : {'log_level' : logging.CRITICAL}}, |
|
69 | 68 | "set log level to logging.CRITICAL (minimize logging output)"), |
|
70 | 69 | init = ({'BaseIPythonApplication' : { |
|
71 | 70 | 'copy_config_files' : True, |
|
72 | 71 | 'auto_create' : True} |
|
73 | 72 | }, """Initialize profile with default config files. This is equivalent |
|
74 | 73 | to running `ipython profile create <profile>` prior to startup. |
|
75 | 74 | """) |
|
76 | 75 | ) |
|
77 | 76 | |
|
78 | 77 | |
|
79 | 78 | class BaseIPythonApplication(Application): |
|
80 | 79 | |
|
81 | 80 | name = Unicode(u'ipython') |
|
82 | 81 | description = Unicode(u'IPython: an enhanced interactive Python shell.') |
|
83 | 82 | version = Unicode(release.version) |
|
84 | 83 | |
|
85 | 84 | aliases = Dict(base_aliases) |
|
86 | 85 | flags = Dict(base_flags) |
|
87 | 86 | classes = List([ProfileDir]) |
|
88 | 87 | |
|
89 | 88 | # Track whether the config_file has changed, |
|
90 | 89 | # because some logic happens only if we aren't using the default. |
|
91 | 90 | config_file_specified = Set() |
|
92 | 91 | |
|
93 | 92 | config_file_name = Unicode() |
|
94 | 93 | def _config_file_name_default(self): |
|
95 | 94 | return self.name.replace('-','_') + u'_config.py' |
|
96 | 95 | def _config_file_name_changed(self, name, old, new): |
|
97 | 96 | if new != old: |
|
98 | 97 | self.config_file_specified.add(new) |
|
99 | 98 | |
|
100 | 99 | # The directory that contains IPython's builtin profiles. |
|
101 | 100 | builtin_profile_dir = Unicode( |
|
102 | 101 | os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default') |
|
103 | 102 | ) |
|
104 | 103 | |
|
105 | 104 | config_file_paths = List(Unicode) |
|
106 | 105 | def _config_file_paths_default(self): |
|
107 | 106 | return [py3compat.getcwd()] |
|
108 | 107 | |
|
109 | 108 | extra_config_file = Unicode(config=True, |
|
110 | 109 | help="""Path to an extra config file to load. |
|
111 | 110 | |
|
112 | 111 | If specified, load this config file in addition to any other IPython config. |
|
113 | 112 | """) |
|
114 | 113 | def _extra_config_file_changed(self, name, old, new): |
|
115 | 114 | try: |
|
116 | 115 | self.config_files.remove(old) |
|
117 | 116 | except ValueError: |
|
118 | 117 | pass |
|
119 | 118 | self.config_file_specified.add(new) |
|
120 | 119 | self.config_files.append(new) |
|
121 | 120 | |
|
122 | 121 | profile = Unicode(u'default', config=True, |
|
123 | 122 | help="""The IPython profile to use.""" |
|
124 | 123 | ) |
|
125 | 124 | |
|
126 | 125 | def _profile_changed(self, name, old, new): |
|
127 | 126 | self.builtin_profile_dir = os.path.join( |
|
128 | 127 | get_ipython_package_dir(), u'config', u'profile', new |
|
129 | 128 | ) |
|
130 | 129 | |
|
131 | 130 | ipython_dir = Unicode(config=True, |
|
132 | 131 | help=""" |
|
133 | 132 | The name of the IPython directory. This directory is used for logging |
|
134 | 133 | configuration (through profiles), history storage, etc. The default |
|
135 | 134 | is usually $HOME/.ipython. This options can also be specified through |
|
136 | 135 | the environment variable IPYTHONDIR. |
|
137 | 136 | """ |
|
138 | 137 | ) |
|
139 | 138 | def _ipython_dir_default(self): |
|
140 | 139 | d = get_ipython_dir() |
|
141 | 140 | self._ipython_dir_changed('ipython_dir', d, d) |
|
142 | 141 | return d |
|
143 | 142 | |
|
144 | 143 | _in_init_profile_dir = False |
|
145 | 144 | profile_dir = Instance(ProfileDir) |
|
146 | 145 | def _profile_dir_default(self): |
|
147 | 146 | # avoid recursion |
|
148 | 147 | if self._in_init_profile_dir: |
|
149 | 148 | return |
|
150 | 149 | # profile_dir requested early, force initialization |
|
151 | 150 | self.init_profile_dir() |
|
152 | 151 | return self.profile_dir |
|
153 | 152 | |
|
154 | 153 | overwrite = Bool(False, config=True, |
|
155 | 154 | help="""Whether to overwrite existing config files when copying""") |
|
156 | 155 | auto_create = Bool(False, config=True, |
|
157 | 156 | help="""Whether to create profile dir if it doesn't exist""") |
|
158 | 157 | |
|
159 | 158 | config_files = List(Unicode) |
|
160 | 159 | def _config_files_default(self): |
|
161 | 160 | return [self.config_file_name] |
|
162 | 161 | |
|
163 | 162 | copy_config_files = Bool(False, config=True, |
|
164 | 163 | help="""Whether to install the default config files into the profile dir. |
|
165 | 164 | If a new profile is being created, and IPython contains config files for that |
|
166 | 165 | profile, then they will be staged into the new directory. Otherwise, |
|
167 | 166 | default config files will be automatically generated. |
|
168 | 167 | """) |
|
169 | 168 | |
|
170 | 169 | verbose_crash = Bool(False, config=True, |
|
171 | 170 | help="""Create a massive crash report when IPython encounters what may be an |
|
172 | 171 | internal error. The default is to append a short message to the |
|
173 | 172 | usual traceback""") |
|
174 | 173 | |
|
175 | 174 | # The class to use as the crash handler. |
|
176 | 175 | crash_handler_class = Type(crashhandler.CrashHandler) |
|
177 | 176 | |
|
178 | 177 | @catch_config_error |
|
179 | 178 | def __init__(self, **kwargs): |
|
180 | 179 | super(BaseIPythonApplication, self).__init__(**kwargs) |
|
181 | 180 | # ensure current working directory exists |
|
182 | 181 | try: |
|
183 | 182 | directory = py3compat.getcwd() |
|
184 | 183 | except: |
|
185 | 184 | # raise exception |
|
186 | 185 | self.log.error("Current working directory doesn't exist.") |
|
187 | 186 | raise |
|
188 | 187 | |
|
189 | 188 | #------------------------------------------------------------------------- |
|
190 | 189 | # Various stages of Application creation |
|
191 | 190 | #------------------------------------------------------------------------- |
|
192 | 191 | |
|
193 | 192 | def init_crash_handler(self): |
|
194 | 193 | """Create a crash handler, typically setting sys.excepthook to it.""" |
|
195 | 194 | self.crash_handler = self.crash_handler_class(self) |
|
196 | 195 | sys.excepthook = self.excepthook |
|
197 | 196 | def unset_crashhandler(): |
|
198 | 197 | sys.excepthook = sys.__excepthook__ |
|
199 | 198 | atexit.register(unset_crashhandler) |
|
200 | 199 | |
|
201 | 200 | def excepthook(self, etype, evalue, tb): |
|
202 | 201 | """this is sys.excepthook after init_crashhandler |
|
203 | 202 | |
|
204 | 203 | set self.verbose_crash=True to use our full crashhandler, instead of |
|
205 | 204 | a regular traceback with a short message (crash_handler_lite) |
|
206 | 205 | """ |
|
207 | 206 | |
|
208 | 207 | if self.verbose_crash: |
|
209 | 208 | return self.crash_handler(etype, evalue, tb) |
|
210 | 209 | else: |
|
211 | 210 | return crashhandler.crash_handler_lite(etype, evalue, tb) |
|
212 | 211 | |
|
213 | 212 | def _ipython_dir_changed(self, name, old, new): |
|
214 | 213 | str_old = py3compat.cast_bytes_py2(os.path.abspath(old), |
|
215 | 214 | sys.getfilesystemencoding() |
|
216 | 215 | ) |
|
217 | 216 | if str_old in sys.path: |
|
218 | 217 | sys.path.remove(str_old) |
|
219 | 218 | str_path = py3compat.cast_bytes_py2(os.path.abspath(new), |
|
220 | 219 | sys.getfilesystemencoding() |
|
221 | 220 | ) |
|
222 | 221 | sys.path.append(str_path) |
|
223 | 222 | ensure_dir_exists(new) |
|
224 | 223 | readme = os.path.join(new, 'README') |
|
225 | 224 | readme_src = os.path.join(get_ipython_package_dir(), u'config', u'profile', 'README') |
|
226 | 225 | if not os.path.exists(readme) and os.path.exists(readme_src): |
|
227 | 226 | shutil.copy(readme_src, readme) |
|
228 | 227 | for d in ('extensions', 'nbextensions'): |
|
229 | 228 | path = os.path.join(new, d) |
|
230 | 229 | try: |
|
231 | 230 | ensure_dir_exists(path) |
|
232 | 231 | except OSError: |
|
233 | 232 | # this will not be EEXIST |
|
234 | 233 | self.log.error("couldn't create path %s: %s", path, e) |
|
235 | 234 | self.log.debug("IPYTHONDIR set to: %s" % new) |
|
236 | 235 | |
|
237 | 236 | def load_config_file(self, suppress_errors=True): |
|
238 | 237 | """Load the config file. |
|
239 | 238 | |
|
240 | 239 | By default, errors in loading config are handled, and a warning |
|
241 | 240 | printed on screen. For testing, the suppress_errors option is set |
|
242 | 241 | to False, so errors will make tests fail. |
|
243 | 242 | """ |
|
244 | 243 | self.log.debug("Searching path %s for config files", self.config_file_paths) |
|
245 | 244 | base_config = 'ipython_config.py' |
|
246 | 245 | self.log.debug("Attempting to load config file: %s" % |
|
247 | 246 | base_config) |
|
248 | 247 | try: |
|
249 | 248 | Application.load_config_file( |
|
250 | 249 | self, |
|
251 | 250 | base_config, |
|
252 | 251 | path=self.config_file_paths |
|
253 | 252 | ) |
|
254 | 253 | except ConfigFileNotFound: |
|
255 | 254 | # ignore errors loading parent |
|
256 | 255 | self.log.debug("Config file %s not found", base_config) |
|
257 | 256 | pass |
|
258 | 257 | |
|
259 | 258 | for config_file_name in self.config_files: |
|
260 | 259 | if not config_file_name or config_file_name == base_config: |
|
261 | 260 | continue |
|
262 | 261 | self.log.debug("Attempting to load config file: %s" % |
|
263 | 262 | self.config_file_name) |
|
264 | 263 | try: |
|
265 | 264 | Application.load_config_file( |
|
266 | 265 | self, |
|
267 | 266 | config_file_name, |
|
268 | 267 | path=self.config_file_paths |
|
269 | 268 | ) |
|
270 | 269 | except ConfigFileNotFound: |
|
271 | 270 | # Only warn if the default config file was NOT being used. |
|
272 | 271 | if config_file_name in self.config_file_specified: |
|
273 | 272 | msg = self.log.warn |
|
274 | 273 | else: |
|
275 | 274 | msg = self.log.debug |
|
276 | 275 | msg("Config file not found, skipping: %s", config_file_name) |
|
277 | 276 | except: |
|
278 | 277 | # For testing purposes. |
|
279 | 278 | if not suppress_errors: |
|
280 | 279 | raise |
|
281 | 280 | self.log.warn("Error loading config file: %s" % |
|
282 | 281 | self.config_file_name, exc_info=True) |
|
283 | 282 | |
|
284 | 283 | def init_profile_dir(self): |
|
285 | 284 | """initialize the profile dir""" |
|
286 | 285 | self._in_init_profile_dir = True |
|
287 | 286 | if self.profile_dir is not None: |
|
288 | 287 | # already ran |
|
289 | 288 | return |
|
290 | 289 | if 'ProfileDir.location' not in self.config: |
|
291 | 290 | # location not specified, find by profile name |
|
292 | 291 | try: |
|
293 | 292 | p = ProfileDir.find_profile_dir_by_name(self.ipython_dir, self.profile, self.config) |
|
294 | 293 | except ProfileDirError: |
|
295 | 294 | # not found, maybe create it (always create default profile) |
|
296 | 295 | if self.auto_create or self.profile == 'default': |
|
297 | 296 | try: |
|
298 | 297 | p = ProfileDir.create_profile_dir_by_name(self.ipython_dir, self.profile, self.config) |
|
299 | 298 | except ProfileDirError: |
|
300 | 299 | self.log.fatal("Could not create profile: %r"%self.profile) |
|
301 | 300 | self.exit(1) |
|
302 | 301 | else: |
|
303 | 302 | self.log.info("Created profile dir: %r"%p.location) |
|
304 | 303 | else: |
|
305 | 304 | self.log.fatal("Profile %r not found."%self.profile) |
|
306 | 305 | self.exit(1) |
|
307 | 306 | else: |
|
308 | 307 | self.log.info("Using existing profile dir: %r"%p.location) |
|
309 | 308 | else: |
|
310 | 309 | location = self.config.ProfileDir.location |
|
311 | 310 | # location is fully specified |
|
312 | 311 | try: |
|
313 | 312 | p = ProfileDir.find_profile_dir(location, self.config) |
|
314 | 313 | except ProfileDirError: |
|
315 | 314 | # not found, maybe create it |
|
316 | 315 | if self.auto_create: |
|
317 | 316 | try: |
|
318 | 317 | p = ProfileDir.create_profile_dir(location, self.config) |
|
319 | 318 | except ProfileDirError: |
|
320 | 319 | self.log.fatal("Could not create profile directory: %r"%location) |
|
321 | 320 | self.exit(1) |
|
322 | 321 | else: |
|
323 | 322 | self.log.info("Creating new profile dir: %r"%location) |
|
324 | 323 | else: |
|
325 | 324 | self.log.fatal("Profile directory %r not found."%location) |
|
326 | 325 | self.exit(1) |
|
327 | 326 | else: |
|
328 | 327 | self.log.info("Using existing profile dir: %r"%location) |
|
329 | 328 | # if profile_dir is specified explicitly, set profile name |
|
330 | 329 | dir_name = os.path.basename(p.location) |
|
331 | 330 | if dir_name.startswith('profile_'): |
|
332 | 331 | self.profile = dir_name[8:] |
|
333 | 332 | |
|
334 | 333 | self.profile_dir = p |
|
335 | 334 | self.config_file_paths.append(p.location) |
|
336 | 335 | self._in_init_profile_dir = False |
|
337 | 336 | |
|
338 | 337 | def init_config_files(self): |
|
339 | 338 | """[optionally] copy default config files into profile dir.""" |
|
340 | 339 | # copy config files |
|
341 | 340 | path = self.builtin_profile_dir |
|
342 | 341 | if self.copy_config_files: |
|
343 | 342 | src = self.profile |
|
344 | 343 | |
|
345 | 344 | cfg = self.config_file_name |
|
346 | 345 | if path and os.path.exists(os.path.join(path, cfg)): |
|
347 | 346 | self.log.warn("Staging %r from %s into %r [overwrite=%s]"%( |
|
348 | 347 | cfg, src, self.profile_dir.location, self.overwrite) |
|
349 | 348 | ) |
|
350 | 349 | self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite) |
|
351 | 350 | else: |
|
352 | 351 | self.stage_default_config_file() |
|
353 | 352 | else: |
|
354 | 353 | # Still stage *bundled* config files, but not generated ones |
|
355 | 354 | # This is necessary for `ipython profile=sympy` to load the profile |
|
356 | 355 | # on the first go |
|
357 | 356 | files = glob.glob(os.path.join(path, '*.py')) |
|
358 | 357 | for fullpath in files: |
|
359 | 358 | cfg = os.path.basename(fullpath) |
|
360 | 359 | if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False): |
|
361 | 360 | # file was copied |
|
362 | 361 | self.log.warn("Staging bundled %s from %s into %r"%( |
|
363 | 362 | cfg, self.profile, self.profile_dir.location) |
|
364 | 363 | ) |
|
365 | 364 | |
|
366 | 365 | |
|
367 | 366 | def stage_default_config_file(self): |
|
368 | 367 | """auto generate default config file, and stage it into the profile.""" |
|
369 | 368 | s = self.generate_config_file() |
|
370 | 369 | fname = os.path.join(self.profile_dir.location, self.config_file_name) |
|
371 | 370 | if self.overwrite or not os.path.exists(fname): |
|
372 | 371 | self.log.warn("Generating default config file: %r"%(fname)) |
|
373 | 372 | with open(fname, 'w') as f: |
|
374 | 373 | f.write(s) |
|
375 | 374 | |
|
376 | 375 | @catch_config_error |
|
377 | 376 | def initialize(self, argv=None): |
|
378 | 377 | # don't hook up crash handler before parsing command-line |
|
379 | 378 | self.parse_command_line(argv) |
|
380 | 379 | self.init_crash_handler() |
|
381 | 380 | if self.subapp is not None: |
|
382 | 381 | # stop here if subapp is taking over |
|
383 | 382 | return |
|
384 | 383 | cl_config = self.config |
|
385 | 384 | self.init_profile_dir() |
|
386 | 385 | self.init_config_files() |
|
387 | 386 | self.load_config_file() |
|
388 | 387 | # enforce cl-opts override configfile opts: |
|
389 | 388 | self.update_config(cl_config) |
|
390 | 389 |
@@ -1,997 +1,996 b'' | |||
|
1 | 1 | """Word completion for IPython. |
|
2 | 2 | |
|
3 | 3 | This module is a fork of the rlcompleter module in the Python standard |
|
4 | 4 | library. The original enhancements made to rlcompleter have been sent |
|
5 | 5 | upstream and were accepted as of Python 2.3, but we need a lot more |
|
6 | 6 | functionality specific to IPython, so this module will continue to live as an |
|
7 | 7 | IPython-specific utility. |
|
8 | 8 | |
|
9 | 9 | Original rlcompleter documentation: |
|
10 | 10 | |
|
11 | 11 | This requires the latest extension to the readline module (the |
|
12 | 12 | completes keywords, built-ins and globals in __main__; when completing |
|
13 | 13 | NAME.NAME..., it evaluates (!) the expression up to the last dot and |
|
14 | 14 | completes its attributes. |
|
15 | 15 | |
|
16 | 16 | It's very cool to do "import string" type "string.", hit the |
|
17 | 17 | completion key (twice), and see the list of names defined by the |
|
18 | 18 | string module! |
|
19 | 19 | |
|
20 | 20 | Tip: to use the tab key as the completion key, call |
|
21 | 21 | |
|
22 | 22 | readline.parse_and_bind("tab: complete") |
|
23 | 23 | |
|
24 | 24 | Notes: |
|
25 | 25 | |
|
26 | 26 | - Exceptions raised by the completer function are *ignored* (and |
|
27 | 27 | generally cause the completion to fail). This is a feature -- since |
|
28 | 28 | readline sets the tty device in raw (or cbreak) mode, printing a |
|
29 | 29 | traceback wouldn't work well without some complicated hoopla to save, |
|
30 | 30 | reset and restore the tty state. |
|
31 | 31 | |
|
32 | 32 | - The evaluation of the NAME.NAME... form may cause arbitrary |
|
33 | 33 | application defined code to be executed if an object with a |
|
34 | 34 | ``__getattr__`` hook is found. Since it is the responsibility of the |
|
35 | 35 | application (or the user) to enable this feature, I consider this an |
|
36 | 36 | acceptable risk. More complicated expressions (e.g. function calls or |
|
37 | 37 | indexing operations) are *not* evaluated. |
|
38 | 38 | |
|
39 | 39 | - GNU readline is also used by the built-in functions input() and |
|
40 | 40 | raw_input(), and thus these also benefit/suffer from the completer |
|
41 | 41 | features. Clearly an interactive application can benefit by |
|
42 | 42 | specifying its own completer function and using raw_input() for all |
|
43 | 43 | its input. |
|
44 | 44 | |
|
45 | 45 | - When the original stdin is not a tty device, GNU readline is never |
|
46 | 46 | used, and this module (and the readline module) are silently inactive. |
|
47 | 47 | """ |
|
48 | 48 | |
|
49 | 49 | #***************************************************************************** |
|
50 | 50 | # |
|
51 | 51 | # Since this file is essentially a minimally modified copy of the rlcompleter |
|
52 | 52 | # module which is part of the standard Python distribution, I assume that the |
|
53 | 53 | # proper procedure is to maintain its copyright as belonging to the Python |
|
54 | 54 | # Software Foundation (in addition to my own, for all new code). |
|
55 | 55 | # |
|
56 | 56 | # Copyright (C) 2008 IPython Development Team |
|
57 | 57 | # Copyright (C) 2001 Fernando Perez. <fperez@colorado.edu> |
|
58 | 58 | # Copyright (C) 2001 Python Software Foundation, www.python.org |
|
59 | 59 | # |
|
60 | 60 | # Distributed under the terms of the BSD License. The full license is in |
|
61 | 61 | # the file COPYING, distributed as part of this software. |
|
62 | 62 | # |
|
63 | 63 | #***************************************************************************** |
|
64 | 64 | |
|
65 | 65 | #----------------------------------------------------------------------------- |
|
66 | 66 | # Imports |
|
67 | 67 | #----------------------------------------------------------------------------- |
|
68 | 68 | |
|
69 | 69 | import __main__ |
|
70 | 70 | import glob |
|
71 | 71 | import inspect |
|
72 | 72 | import itertools |
|
73 | 73 | import keyword |
|
74 | 74 | import os |
|
75 | 75 | import re |
|
76 | 76 | import sys |
|
77 | 77 | |
|
78 | 78 | from IPython.config.configurable import Configurable |
|
79 | 79 | from IPython.core.error import TryNext |
|
80 | 80 | from IPython.core.inputsplitter import ESC_MAGIC |
|
81 | 81 | from IPython.utils import generics |
|
82 | from IPython.utils import io | |
|
83 | 82 | from IPython.utils.dir2 import dir2 |
|
84 | 83 | from IPython.utils.process import arg_split |
|
85 | 84 | from IPython.utils.py3compat import builtin_mod, string_types |
|
86 | 85 | from IPython.utils.traitlets import CBool, Enum |
|
87 | 86 | |
|
88 | 87 | #----------------------------------------------------------------------------- |
|
89 | 88 | # Globals |
|
90 | 89 | #----------------------------------------------------------------------------- |
|
91 | 90 | |
|
92 | 91 | # Public API |
|
93 | 92 | __all__ = ['Completer','IPCompleter'] |
|
94 | 93 | |
|
95 | 94 | if sys.platform == 'win32': |
|
96 | 95 | PROTECTABLES = ' ' |
|
97 | 96 | else: |
|
98 | 97 | PROTECTABLES = ' ()[]{}?=\\|;:\'#*"^&' |
|
99 | 98 | |
|
100 | 99 | #----------------------------------------------------------------------------- |
|
101 | 100 | # Main functions and classes |
|
102 | 101 | #----------------------------------------------------------------------------- |
|
103 | 102 | |
|
104 | 103 | def has_open_quotes(s): |
|
105 | 104 | """Return whether a string has open quotes. |
|
106 | 105 | |
|
107 | 106 | This simply counts whether the number of quote characters of either type in |
|
108 | 107 | the string is odd. |
|
109 | 108 | |
|
110 | 109 | Returns |
|
111 | 110 | ------- |
|
112 | 111 | If there is an open quote, the quote character is returned. Else, return |
|
113 | 112 | False. |
|
114 | 113 | """ |
|
115 | 114 | # We check " first, then ', so complex cases with nested quotes will get |
|
116 | 115 | # the " to take precedence. |
|
117 | 116 | if s.count('"') % 2: |
|
118 | 117 | return '"' |
|
119 | 118 | elif s.count("'") % 2: |
|
120 | 119 | return "'" |
|
121 | 120 | else: |
|
122 | 121 | return False |
|
123 | 122 | |
|
124 | 123 | |
|
125 | 124 | def protect_filename(s): |
|
126 | 125 | """Escape a string to protect certain characters.""" |
|
127 | 126 | |
|
128 | 127 | return "".join([(ch in PROTECTABLES and '\\' + ch or ch) |
|
129 | 128 | for ch in s]) |
|
130 | 129 | |
|
131 | 130 | def expand_user(path): |
|
132 | 131 | """Expand '~'-style usernames in strings. |
|
133 | 132 | |
|
134 | 133 | This is similar to :func:`os.path.expanduser`, but it computes and returns |
|
135 | 134 | extra information that will be useful if the input was being used in |
|
136 | 135 | computing completions, and you wish to return the completions with the |
|
137 | 136 | original '~' instead of its expanded value. |
|
138 | 137 | |
|
139 | 138 | Parameters |
|
140 | 139 | ---------- |
|
141 | 140 | path : str |
|
142 | 141 | String to be expanded. If no ~ is present, the output is the same as the |
|
143 | 142 | input. |
|
144 | 143 | |
|
145 | 144 | Returns |
|
146 | 145 | ------- |
|
147 | 146 | newpath : str |
|
148 | 147 | Result of ~ expansion in the input path. |
|
149 | 148 | tilde_expand : bool |
|
150 | 149 | Whether any expansion was performed or not. |
|
151 | 150 | tilde_val : str |
|
152 | 151 | The value that ~ was replaced with. |
|
153 | 152 | """ |
|
154 | 153 | # Default values |
|
155 | 154 | tilde_expand = False |
|
156 | 155 | tilde_val = '' |
|
157 | 156 | newpath = path |
|
158 | 157 | |
|
159 | 158 | if path.startswith('~'): |
|
160 | 159 | tilde_expand = True |
|
161 | 160 | rest = len(path)-1 |
|
162 | 161 | newpath = os.path.expanduser(path) |
|
163 | 162 | if rest: |
|
164 | 163 | tilde_val = newpath[:-rest] |
|
165 | 164 | else: |
|
166 | 165 | tilde_val = newpath |
|
167 | 166 | |
|
168 | 167 | return newpath, tilde_expand, tilde_val |
|
169 | 168 | |
|
170 | 169 | |
|
171 | 170 | def compress_user(path, tilde_expand, tilde_val): |
|
172 | 171 | """Does the opposite of expand_user, with its outputs. |
|
173 | 172 | """ |
|
174 | 173 | if tilde_expand: |
|
175 | 174 | return path.replace(tilde_val, '~') |
|
176 | 175 | else: |
|
177 | 176 | return path |
|
178 | 177 | |
|
179 | 178 | |
|
180 | 179 | |
|
181 | 180 | def penalize_magics_key(word): |
|
182 | 181 | """key for sorting that penalizes magic commands in the ordering |
|
183 | 182 | |
|
184 | 183 | Normal words are left alone. |
|
185 | 184 | |
|
186 | 185 | Magic commands have the initial % moved to the end, e.g. |
|
187 | 186 | %matplotlib is transformed as follows: |
|
188 | 187 | |
|
189 | 188 | %matplotlib -> matplotlib% |
|
190 | 189 | |
|
191 | 190 | [The choice of the final % is arbitrary.] |
|
192 | 191 | |
|
193 | 192 | Since "matplotlib" < "matplotlib%" as strings, |
|
194 | 193 | "timeit" will appear before the magic "%timeit" in the ordering |
|
195 | 194 | |
|
196 | 195 | For consistency, move "%%" to the end, so cell magics appear *after* |
|
197 | 196 | line magics with the same name. |
|
198 | 197 | |
|
199 | 198 | A check is performed that there are no other "%" in the string; |
|
200 | 199 | if there are, then the string is not a magic command and is left unchanged. |
|
201 | 200 | |
|
202 | 201 | """ |
|
203 | 202 | |
|
204 | 203 | # Move any % signs from start to end of the key |
|
205 | 204 | # provided there are no others elsewhere in the string |
|
206 | 205 | |
|
207 | 206 | if word[:2] == "%%": |
|
208 | 207 | if not "%" in word[2:]: |
|
209 | 208 | return word[2:] + "%%" |
|
210 | 209 | |
|
211 | 210 | if word[:1] == "%": |
|
212 | 211 | if not "%" in word[1:]: |
|
213 | 212 | return word[1:] + "%" |
|
214 | 213 | |
|
215 | 214 | return word |
|
216 | 215 | |
|
217 | 216 | |
|
218 | 217 | |
|
219 | 218 | class Bunch(object): pass |
|
220 | 219 | |
|
221 | 220 | |
|
222 | 221 | DELIMS = ' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?' |
|
223 | 222 | GREEDY_DELIMS = ' =\r\n' |
|
224 | 223 | |
|
225 | 224 | |
|
226 | 225 | class CompletionSplitter(object): |
|
227 | 226 | """An object to split an input line in a manner similar to readline. |
|
228 | 227 | |
|
229 | 228 | By having our own implementation, we can expose readline-like completion in |
|
230 | 229 | a uniform manner to all frontends. This object only needs to be given the |
|
231 | 230 | line of text to be split and the cursor position on said line, and it |
|
232 | 231 | returns the 'word' to be completed on at the cursor after splitting the |
|
233 | 232 | entire line. |
|
234 | 233 | |
|
235 | 234 | What characters are used as splitting delimiters can be controlled by |
|
236 | 235 | setting the `delims` attribute (this is a property that internally |
|
237 | 236 | automatically builds the necessary regular expression)""" |
|
238 | 237 | |
|
239 | 238 | # Private interface |
|
240 | 239 | |
|
241 | 240 | # A string of delimiter characters. The default value makes sense for |
|
242 | 241 | # IPython's most typical usage patterns. |
|
243 | 242 | _delims = DELIMS |
|
244 | 243 | |
|
245 | 244 | # The expression (a normal string) to be compiled into a regular expression |
|
246 | 245 | # for actual splitting. We store it as an attribute mostly for ease of |
|
247 | 246 | # debugging, since this type of code can be so tricky to debug. |
|
248 | 247 | _delim_expr = None |
|
249 | 248 | |
|
250 | 249 | # The regular expression that does the actual splitting |
|
251 | 250 | _delim_re = None |
|
252 | 251 | |
|
253 | 252 | def __init__(self, delims=None): |
|
254 | 253 | delims = CompletionSplitter._delims if delims is None else delims |
|
255 | 254 | self.delims = delims |
|
256 | 255 | |
|
257 | 256 | @property |
|
258 | 257 | def delims(self): |
|
259 | 258 | """Return the string of delimiter characters.""" |
|
260 | 259 | return self._delims |
|
261 | 260 | |
|
262 | 261 | @delims.setter |
|
263 | 262 | def delims(self, delims): |
|
264 | 263 | """Set the delimiters for line splitting.""" |
|
265 | 264 | expr = '[' + ''.join('\\'+ c for c in delims) + ']' |
|
266 | 265 | self._delim_re = re.compile(expr) |
|
267 | 266 | self._delims = delims |
|
268 | 267 | self._delim_expr = expr |
|
269 | 268 | |
|
270 | 269 | def split_line(self, line, cursor_pos=None): |
|
271 | 270 | """Split a line of text with a cursor at the given position. |
|
272 | 271 | """ |
|
273 | 272 | l = line if cursor_pos is None else line[:cursor_pos] |
|
274 | 273 | return self._delim_re.split(l)[-1] |
|
275 | 274 | |
|
276 | 275 | |
|
277 | 276 | class Completer(Configurable): |
|
278 | 277 | |
|
279 | 278 | greedy = CBool(False, config=True, |
|
280 | 279 | help="""Activate greedy completion |
|
281 | 280 | |
|
282 | 281 | This will enable completion on elements of lists, results of function calls, etc., |
|
283 | 282 | but can be unsafe because the code is actually evaluated on TAB. |
|
284 | 283 | """ |
|
285 | 284 | ) |
|
286 | 285 | |
|
287 | 286 | |
|
288 | 287 | def __init__(self, namespace=None, global_namespace=None, **kwargs): |
|
289 | 288 | """Create a new completer for the command line. |
|
290 | 289 | |
|
291 | 290 | Completer(namespace=ns,global_namespace=ns2) -> completer instance. |
|
292 | 291 | |
|
293 | 292 | If unspecified, the default namespace where completions are performed |
|
294 | 293 | is __main__ (technically, __main__.__dict__). Namespaces should be |
|
295 | 294 | given as dictionaries. |
|
296 | 295 | |
|
297 | 296 | An optional second namespace can be given. This allows the completer |
|
298 | 297 | to handle cases where both the local and global scopes need to be |
|
299 | 298 | distinguished. |
|
300 | 299 | |
|
301 | 300 | Completer instances should be used as the completion mechanism of |
|
302 | 301 | readline via the set_completer() call: |
|
303 | 302 | |
|
304 | 303 | readline.set_completer(Completer(my_namespace).complete) |
|
305 | 304 | """ |
|
306 | 305 | |
|
307 | 306 | # Don't bind to namespace quite yet, but flag whether the user wants a |
|
308 | 307 | # specific namespace or to use __main__.__dict__. This will allow us |
|
309 | 308 | # to bind to __main__.__dict__ at completion time, not now. |
|
310 | 309 | if namespace is None: |
|
311 | 310 | self.use_main_ns = 1 |
|
312 | 311 | else: |
|
313 | 312 | self.use_main_ns = 0 |
|
314 | 313 | self.namespace = namespace |
|
315 | 314 | |
|
316 | 315 | # The global namespace, if given, can be bound directly |
|
317 | 316 | if global_namespace is None: |
|
318 | 317 | self.global_namespace = {} |
|
319 | 318 | else: |
|
320 | 319 | self.global_namespace = global_namespace |
|
321 | 320 | |
|
322 | 321 | super(Completer, self).__init__(**kwargs) |
|
323 | 322 | |
|
324 | 323 | def complete(self, text, state): |
|
325 | 324 | """Return the next possible completion for 'text'. |
|
326 | 325 | |
|
327 | 326 | This is called successively with state == 0, 1, 2, ... until it |
|
328 | 327 | returns None. The completion should begin with 'text'. |
|
329 | 328 | |
|
330 | 329 | """ |
|
331 | 330 | if self.use_main_ns: |
|
332 | 331 | self.namespace = __main__.__dict__ |
|
333 | 332 | |
|
334 | 333 | if state == 0: |
|
335 | 334 | if "." in text: |
|
336 | 335 | self.matches = self.attr_matches(text) |
|
337 | 336 | else: |
|
338 | 337 | self.matches = self.global_matches(text) |
|
339 | 338 | try: |
|
340 | 339 | return self.matches[state] |
|
341 | 340 | except IndexError: |
|
342 | 341 | return None |
|
343 | 342 | |
|
344 | 343 | def global_matches(self, text): |
|
345 | 344 | """Compute matches when text is a simple name. |
|
346 | 345 | |
|
347 | 346 | Return a list of all keywords, built-in functions and names currently |
|
348 | 347 | defined in self.namespace or self.global_namespace that match. |
|
349 | 348 | |
|
350 | 349 | """ |
|
351 | 350 | #print 'Completer->global_matches, txt=%r' % text # dbg |
|
352 | 351 | matches = [] |
|
353 | 352 | match_append = matches.append |
|
354 | 353 | n = len(text) |
|
355 | 354 | for lst in [keyword.kwlist, |
|
356 | 355 | builtin_mod.__dict__.keys(), |
|
357 | 356 | self.namespace.keys(), |
|
358 | 357 | self.global_namespace.keys()]: |
|
359 | 358 | for word in lst: |
|
360 | 359 | if word[:n] == text and word != "__builtins__": |
|
361 | 360 | match_append(word) |
|
362 | 361 | return matches |
|
363 | 362 | |
|
364 | 363 | def attr_matches(self, text): |
|
365 | 364 | """Compute matches when text contains a dot. |
|
366 | 365 | |
|
367 | 366 | Assuming the text is of the form NAME.NAME....[NAME], and is |
|
368 | 367 | evaluatable in self.namespace or self.global_namespace, it will be |
|
369 | 368 | evaluated and its attributes (as revealed by dir()) are used as |
|
370 | 369 | possible completions. (For class instances, class members are are |
|
371 | 370 | also considered.) |
|
372 | 371 | |
|
373 | 372 | WARNING: this can still invoke arbitrary C code, if an object |
|
374 | 373 | with a __getattr__ hook is evaluated. |
|
375 | 374 | |
|
376 | 375 | """ |
|
377 | 376 | |
|
378 | 377 | #io.rprint('Completer->attr_matches, txt=%r' % text) # dbg |
|
379 | 378 | # Another option, seems to work great. Catches things like ''.<tab> |
|
380 | 379 | m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) |
|
381 | 380 | |
|
382 | 381 | if m: |
|
383 | 382 | expr, attr = m.group(1, 3) |
|
384 | 383 | elif self.greedy: |
|
385 | 384 | m2 = re.match(r"(.+)\.(\w*)$", self.line_buffer) |
|
386 | 385 | if not m2: |
|
387 | 386 | return [] |
|
388 | 387 | expr, attr = m2.group(1,2) |
|
389 | 388 | else: |
|
390 | 389 | return [] |
|
391 | 390 | |
|
392 | 391 | try: |
|
393 | 392 | obj = eval(expr, self.namespace) |
|
394 | 393 | except: |
|
395 | 394 | try: |
|
396 | 395 | obj = eval(expr, self.global_namespace) |
|
397 | 396 | except: |
|
398 | 397 | return [] |
|
399 | 398 | |
|
400 | 399 | if self.limit_to__all__ and hasattr(obj, '__all__'): |
|
401 | 400 | words = get__all__entries(obj) |
|
402 | 401 | else: |
|
403 | 402 | words = dir2(obj) |
|
404 | 403 | |
|
405 | 404 | try: |
|
406 | 405 | words = generics.complete_object(obj, words) |
|
407 | 406 | except TryNext: |
|
408 | 407 | pass |
|
409 | 408 | except Exception: |
|
410 | 409 | # Silence errors from completion function |
|
411 | 410 | #raise # dbg |
|
412 | 411 | pass |
|
413 | 412 | # Build match list to return |
|
414 | 413 | n = len(attr) |
|
415 | 414 | res = ["%s.%s" % (expr, w) for w in words if w[:n] == attr ] |
|
416 | 415 | return res |
|
417 | 416 | |
|
418 | 417 | |
|
419 | 418 | def get__all__entries(obj): |
|
420 | 419 | """returns the strings in the __all__ attribute""" |
|
421 | 420 | try: |
|
422 | 421 | words = getattr(obj, '__all__') |
|
423 | 422 | except: |
|
424 | 423 | return [] |
|
425 | 424 | |
|
426 | 425 | return [w for w in words if isinstance(w, string_types)] |
|
427 | 426 | |
|
428 | 427 | |
|
429 | 428 | class IPCompleter(Completer): |
|
430 | 429 | """Extension of the completer class with IPython-specific features""" |
|
431 | 430 | |
|
432 | 431 | def _greedy_changed(self, name, old, new): |
|
433 | 432 | """update the splitter and readline delims when greedy is changed""" |
|
434 | 433 | if new: |
|
435 | 434 | self.splitter.delims = GREEDY_DELIMS |
|
436 | 435 | else: |
|
437 | 436 | self.splitter.delims = DELIMS |
|
438 | 437 | |
|
439 | 438 | if self.readline: |
|
440 | 439 | self.readline.set_completer_delims(self.splitter.delims) |
|
441 | 440 | |
|
442 | 441 | merge_completions = CBool(True, config=True, |
|
443 | 442 | help="""Whether to merge completion results into a single list |
|
444 | 443 | |
|
445 | 444 | If False, only the completion results from the first non-empty |
|
446 | 445 | completer will be returned. |
|
447 | 446 | """ |
|
448 | 447 | ) |
|
449 | 448 | omit__names = Enum((0,1,2), default_value=2, config=True, |
|
450 | 449 | help="""Instruct the completer to omit private method names |
|
451 | 450 | |
|
452 | 451 | Specifically, when completing on ``object.<tab>``. |
|
453 | 452 | |
|
454 | 453 | When 2 [default]: all names that start with '_' will be excluded. |
|
455 | 454 | |
|
456 | 455 | When 1: all 'magic' names (``__foo__``) will be excluded. |
|
457 | 456 | |
|
458 | 457 | When 0: nothing will be excluded. |
|
459 | 458 | """ |
|
460 | 459 | ) |
|
461 | 460 | limit_to__all__ = CBool(default_value=False, config=True, |
|
462 | 461 | help="""Instruct the completer to use __all__ for the completion |
|
463 | 462 | |
|
464 | 463 | Specifically, when completing on ``object.<tab>``. |
|
465 | 464 | |
|
466 | 465 | When True: only those names in obj.__all__ will be included. |
|
467 | 466 | |
|
468 | 467 | When False [default]: the __all__ attribute is ignored |
|
469 | 468 | """ |
|
470 | 469 | ) |
|
471 | 470 | |
|
472 | 471 | def __init__(self, shell=None, namespace=None, global_namespace=None, |
|
473 | 472 | use_readline=True, config=None, **kwargs): |
|
474 | 473 | """IPCompleter() -> completer |
|
475 | 474 | |
|
476 | 475 | Return a completer object suitable for use by the readline library |
|
477 | 476 | via readline.set_completer(). |
|
478 | 477 | |
|
479 | 478 | Inputs: |
|
480 | 479 | |
|
481 | 480 | - shell: a pointer to the ipython shell itself. This is needed |
|
482 | 481 | because this completer knows about magic functions, and those can |
|
483 | 482 | only be accessed via the ipython instance. |
|
484 | 483 | |
|
485 | 484 | - namespace: an optional dict where completions are performed. |
|
486 | 485 | |
|
487 | 486 | - global_namespace: secondary optional dict for completions, to |
|
488 | 487 | handle cases (such as IPython embedded inside functions) where |
|
489 | 488 | both Python scopes are visible. |
|
490 | 489 | |
|
491 | 490 | use_readline : bool, optional |
|
492 | 491 | If true, use the readline library. This completer can still function |
|
493 | 492 | without readline, though in that case callers must provide some extra |
|
494 | 493 | information on each call about the current line.""" |
|
495 | 494 | |
|
496 | 495 | self.magic_escape = ESC_MAGIC |
|
497 | 496 | self.splitter = CompletionSplitter() |
|
498 | 497 | |
|
499 | 498 | # Readline configuration, only used by the rlcompleter method. |
|
500 | 499 | if use_readline: |
|
501 | 500 | # We store the right version of readline so that later code |
|
502 | 501 | import IPython.utils.rlineimpl as readline |
|
503 | 502 | self.readline = readline |
|
504 | 503 | else: |
|
505 | 504 | self.readline = None |
|
506 | 505 | |
|
507 | 506 | # _greedy_changed() depends on splitter and readline being defined: |
|
508 | 507 | Completer.__init__(self, namespace=namespace, global_namespace=global_namespace, |
|
509 | 508 | config=config, **kwargs) |
|
510 | 509 | |
|
511 | 510 | # List where completion matches will be stored |
|
512 | 511 | self.matches = [] |
|
513 | 512 | self.shell = shell |
|
514 | 513 | # Regexp to split filenames with spaces in them |
|
515 | 514 | self.space_name_re = re.compile(r'([^\\] )') |
|
516 | 515 | # Hold a local ref. to glob.glob for speed |
|
517 | 516 | self.glob = glob.glob |
|
518 | 517 | |
|
519 | 518 | # Determine if we are running on 'dumb' terminals, like (X)Emacs |
|
520 | 519 | # buffers, to avoid completion problems. |
|
521 | 520 | term = os.environ.get('TERM','xterm') |
|
522 | 521 | self.dumb_terminal = term in ['dumb','emacs'] |
|
523 | 522 | |
|
524 | 523 | # Special handling of backslashes needed in win32 platforms |
|
525 | 524 | if sys.platform == "win32": |
|
526 | 525 | self.clean_glob = self._clean_glob_win32 |
|
527 | 526 | else: |
|
528 | 527 | self.clean_glob = self._clean_glob |
|
529 | 528 | |
|
530 | 529 | #regexp to parse docstring for function signature |
|
531 | 530 | self.docstring_sig_re = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') |
|
532 | 531 | self.docstring_kwd_re = re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') |
|
533 | 532 | #use this if positional argument name is also needed |
|
534 | 533 | #= re.compile(r'[\s|\[]*(\w+)(?:\s*=?\s*.*)') |
|
535 | 534 | |
|
536 | 535 | # All active matcher routines for completion |
|
537 | 536 | self.matchers = [self.python_matches, |
|
538 | 537 | self.file_matches, |
|
539 | 538 | self.magic_matches, |
|
540 | 539 | self.python_func_kw_matches, |
|
541 | 540 | ] |
|
542 | 541 | |
|
543 | 542 | def all_completions(self, text): |
|
544 | 543 | """ |
|
545 | 544 | Wrapper around the complete method for the benefit of emacs |
|
546 | 545 | and pydb. |
|
547 | 546 | """ |
|
548 | 547 | return self.complete(text)[1] |
|
549 | 548 | |
|
550 | 549 | def _clean_glob(self,text): |
|
551 | 550 | return self.glob("%s*" % text) |
|
552 | 551 | |
|
553 | 552 | def _clean_glob_win32(self,text): |
|
554 | 553 | return [f.replace("\\","/") |
|
555 | 554 | for f in self.glob("%s*" % text)] |
|
556 | 555 | |
|
557 | 556 | def file_matches(self, text): |
|
558 | 557 | """Match filenames, expanding ~USER type strings. |
|
559 | 558 | |
|
560 | 559 | Most of the seemingly convoluted logic in this completer is an |
|
561 | 560 | attempt to handle filenames with spaces in them. And yet it's not |
|
562 | 561 | quite perfect, because Python's readline doesn't expose all of the |
|
563 | 562 | GNU readline details needed for this to be done correctly. |
|
564 | 563 | |
|
565 | 564 | For a filename with a space in it, the printed completions will be |
|
566 | 565 | only the parts after what's already been typed (instead of the |
|
567 | 566 | full completions, as is normally done). I don't think with the |
|
568 | 567 | current (as of Python 2.3) Python readline it's possible to do |
|
569 | 568 | better.""" |
|
570 | 569 | |
|
571 | 570 | #io.rprint('Completer->file_matches: <%r>' % text) # dbg |
|
572 | 571 | |
|
573 | 572 | # chars that require escaping with backslash - i.e. chars |
|
574 | 573 | # that readline treats incorrectly as delimiters, but we |
|
575 | 574 | # don't want to treat as delimiters in filename matching |
|
576 | 575 | # when escaped with backslash |
|
577 | 576 | if text.startswith('!'): |
|
578 | 577 | text = text[1:] |
|
579 | 578 | text_prefix = '!' |
|
580 | 579 | else: |
|
581 | 580 | text_prefix = '' |
|
582 | 581 | |
|
583 | 582 | text_until_cursor = self.text_until_cursor |
|
584 | 583 | # track strings with open quotes |
|
585 | 584 | open_quotes = has_open_quotes(text_until_cursor) |
|
586 | 585 | |
|
587 | 586 | if '(' in text_until_cursor or '[' in text_until_cursor: |
|
588 | 587 | lsplit = text |
|
589 | 588 | else: |
|
590 | 589 | try: |
|
591 | 590 | # arg_split ~ shlex.split, but with unicode bugs fixed by us |
|
592 | 591 | lsplit = arg_split(text_until_cursor)[-1] |
|
593 | 592 | except ValueError: |
|
594 | 593 | # typically an unmatched ", or backslash without escaped char. |
|
595 | 594 | if open_quotes: |
|
596 | 595 | lsplit = text_until_cursor.split(open_quotes)[-1] |
|
597 | 596 | else: |
|
598 | 597 | return [] |
|
599 | 598 | except IndexError: |
|
600 | 599 | # tab pressed on empty line |
|
601 | 600 | lsplit = "" |
|
602 | 601 | |
|
603 | 602 | if not open_quotes and lsplit != protect_filename(lsplit): |
|
604 | 603 | # if protectables are found, do matching on the whole escaped name |
|
605 | 604 | has_protectables = True |
|
606 | 605 | text0,text = text,lsplit |
|
607 | 606 | else: |
|
608 | 607 | has_protectables = False |
|
609 | 608 | text = os.path.expanduser(text) |
|
610 | 609 | |
|
611 | 610 | if text == "": |
|
612 | 611 | return [text_prefix + protect_filename(f) for f in self.glob("*")] |
|
613 | 612 | |
|
614 | 613 | # Compute the matches from the filesystem |
|
615 | 614 | m0 = self.clean_glob(text.replace('\\','')) |
|
616 | 615 | |
|
617 | 616 | if has_protectables: |
|
618 | 617 | # If we had protectables, we need to revert our changes to the |
|
619 | 618 | # beginning of filename so that we don't double-write the part |
|
620 | 619 | # of the filename we have so far |
|
621 | 620 | len_lsplit = len(lsplit) |
|
622 | 621 | matches = [text_prefix + text0 + |
|
623 | 622 | protect_filename(f[len_lsplit:]) for f in m0] |
|
624 | 623 | else: |
|
625 | 624 | if open_quotes: |
|
626 | 625 | # if we have a string with an open quote, we don't need to |
|
627 | 626 | # protect the names at all (and we _shouldn't_, as it |
|
628 | 627 | # would cause bugs when the filesystem call is made). |
|
629 | 628 | matches = m0 |
|
630 | 629 | else: |
|
631 | 630 | matches = [text_prefix + |
|
632 | 631 | protect_filename(f) for f in m0] |
|
633 | 632 | |
|
634 | 633 | #io.rprint('mm', matches) # dbg |
|
635 | 634 | |
|
636 | 635 | # Mark directories in input list by appending '/' to their names. |
|
637 | 636 | matches = [x+'/' if os.path.isdir(x) else x for x in matches] |
|
638 | 637 | return matches |
|
639 | 638 | |
|
640 | 639 | def magic_matches(self, text): |
|
641 | 640 | """Match magics""" |
|
642 | 641 | #print 'Completer->magic_matches:',text,'lb',self.text_until_cursor # dbg |
|
643 | 642 | # Get all shell magics now rather than statically, so magics loaded at |
|
644 | 643 | # runtime show up too. |
|
645 | 644 | lsm = self.shell.magics_manager.lsmagic() |
|
646 | 645 | line_magics = lsm['line'] |
|
647 | 646 | cell_magics = lsm['cell'] |
|
648 | 647 | pre = self.magic_escape |
|
649 | 648 | pre2 = pre+pre |
|
650 | 649 | |
|
651 | 650 | # Completion logic: |
|
652 | 651 | # - user gives %%: only do cell magics |
|
653 | 652 | # - user gives %: do both line and cell magics |
|
654 | 653 | # - no prefix: do both |
|
655 | 654 | # In other words, line magics are skipped if the user gives %% explicitly |
|
656 | 655 | bare_text = text.lstrip(pre) |
|
657 | 656 | comp = [ pre2+m for m in cell_magics if m.startswith(bare_text)] |
|
658 | 657 | if not text.startswith(pre2): |
|
659 | 658 | comp += [ pre+m for m in line_magics if m.startswith(bare_text)] |
|
660 | 659 | return comp |
|
661 | 660 | |
|
662 | 661 | def python_matches(self,text): |
|
663 | 662 | """Match attributes or global python names""" |
|
664 | 663 | |
|
665 | 664 | #io.rprint('Completer->python_matches, txt=%r' % text) # dbg |
|
666 | 665 | if "." in text: |
|
667 | 666 | try: |
|
668 | 667 | matches = self.attr_matches(text) |
|
669 | 668 | if text.endswith('.') and self.omit__names: |
|
670 | 669 | if self.omit__names == 1: |
|
671 | 670 | # true if txt is _not_ a __ name, false otherwise: |
|
672 | 671 | no__name = (lambda txt: |
|
673 | 672 | re.match(r'.*\.__.*?__',txt) is None) |
|
674 | 673 | else: |
|
675 | 674 | # true if txt is _not_ a _ name, false otherwise: |
|
676 | 675 | no__name = (lambda txt: |
|
677 | 676 | re.match(r'.*\._.*?',txt) is None) |
|
678 | 677 | matches = filter(no__name, matches) |
|
679 | 678 | except NameError: |
|
680 | 679 | # catches <undefined attributes>.<tab> |
|
681 | 680 | matches = [] |
|
682 | 681 | else: |
|
683 | 682 | matches = self.global_matches(text) |
|
684 | 683 | |
|
685 | 684 | return matches |
|
686 | 685 | |
|
687 | 686 | def _default_arguments_from_docstring(self, doc): |
|
688 | 687 | """Parse the first line of docstring for call signature. |
|
689 | 688 | |
|
690 | 689 | Docstring should be of the form 'min(iterable[, key=func])\n'. |
|
691 | 690 | It can also parse cython docstring of the form |
|
692 | 691 | 'Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)'. |
|
693 | 692 | """ |
|
694 | 693 | if doc is None: |
|
695 | 694 | return [] |
|
696 | 695 | |
|
697 | 696 | #care only the firstline |
|
698 | 697 | line = doc.lstrip().splitlines()[0] |
|
699 | 698 | |
|
700 | 699 | #p = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') |
|
701 | 700 | #'min(iterable[, key=func])\n' -> 'iterable[, key=func]' |
|
702 | 701 | sig = self.docstring_sig_re.search(line) |
|
703 | 702 | if sig is None: |
|
704 | 703 | return [] |
|
705 | 704 | # iterable[, key=func]' -> ['iterable[' ,' key=func]'] |
|
706 | 705 | sig = sig.groups()[0].split(',') |
|
707 | 706 | ret = [] |
|
708 | 707 | for s in sig: |
|
709 | 708 | #re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') |
|
710 | 709 | ret += self.docstring_kwd_re.findall(s) |
|
711 | 710 | return ret |
|
712 | 711 | |
|
713 | 712 | def _default_arguments(self, obj): |
|
714 | 713 | """Return the list of default arguments of obj if it is callable, |
|
715 | 714 | or empty list otherwise.""" |
|
716 | 715 | call_obj = obj |
|
717 | 716 | ret = [] |
|
718 | 717 | if inspect.isbuiltin(obj): |
|
719 | 718 | pass |
|
720 | 719 | elif not (inspect.isfunction(obj) or inspect.ismethod(obj)): |
|
721 | 720 | if inspect.isclass(obj): |
|
722 | 721 | #for cython embededsignature=True the constructor docstring |
|
723 | 722 | #belongs to the object itself not __init__ |
|
724 | 723 | ret += self._default_arguments_from_docstring( |
|
725 | 724 | getattr(obj, '__doc__', '')) |
|
726 | 725 | # for classes, check for __init__,__new__ |
|
727 | 726 | call_obj = (getattr(obj, '__init__', None) or |
|
728 | 727 | getattr(obj, '__new__', None)) |
|
729 | 728 | # for all others, check if they are __call__able |
|
730 | 729 | elif hasattr(obj, '__call__'): |
|
731 | 730 | call_obj = obj.__call__ |
|
732 | 731 | |
|
733 | 732 | ret += self._default_arguments_from_docstring( |
|
734 | 733 | getattr(call_obj, '__doc__', '')) |
|
735 | 734 | |
|
736 | 735 | try: |
|
737 | 736 | args,_,_1,defaults = inspect.getargspec(call_obj) |
|
738 | 737 | if defaults: |
|
739 | 738 | ret+=args[-len(defaults):] |
|
740 | 739 | except TypeError: |
|
741 | 740 | pass |
|
742 | 741 | |
|
743 | 742 | return list(set(ret)) |
|
744 | 743 | |
|
745 | 744 | def python_func_kw_matches(self,text): |
|
746 | 745 | """Match named parameters (kwargs) of the last open function""" |
|
747 | 746 | |
|
748 | 747 | if "." in text: # a parameter cannot be dotted |
|
749 | 748 | return [] |
|
750 | 749 | try: regexp = self.__funcParamsRegex |
|
751 | 750 | except AttributeError: |
|
752 | 751 | regexp = self.__funcParamsRegex = re.compile(r''' |
|
753 | 752 | '.*?(?<!\\)' | # single quoted strings or |
|
754 | 753 | ".*?(?<!\\)" | # double quoted strings or |
|
755 | 754 | \w+ | # identifier |
|
756 | 755 | \S # other characters |
|
757 | 756 | ''', re.VERBOSE | re.DOTALL) |
|
758 | 757 | # 1. find the nearest identifier that comes before an unclosed |
|
759 | 758 | # parenthesis before the cursor |
|
760 | 759 | # e.g. for "foo (1+bar(x), pa<cursor>,a=1)", the candidate is "foo" |
|
761 | 760 | tokens = regexp.findall(self.text_until_cursor) |
|
762 | 761 | tokens.reverse() |
|
763 | 762 | iterTokens = iter(tokens); openPar = 0 |
|
764 | 763 | |
|
765 | 764 | for token in iterTokens: |
|
766 | 765 | if token == ')': |
|
767 | 766 | openPar -= 1 |
|
768 | 767 | elif token == '(': |
|
769 | 768 | openPar += 1 |
|
770 | 769 | if openPar > 0: |
|
771 | 770 | # found the last unclosed parenthesis |
|
772 | 771 | break |
|
773 | 772 | else: |
|
774 | 773 | return [] |
|
775 | 774 | # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" ) |
|
776 | 775 | ids = [] |
|
777 | 776 | isId = re.compile(r'\w+$').match |
|
778 | 777 | |
|
779 | 778 | while True: |
|
780 | 779 | try: |
|
781 | 780 | ids.append(next(iterTokens)) |
|
782 | 781 | if not isId(ids[-1]): |
|
783 | 782 | ids.pop(); break |
|
784 | 783 | if not next(iterTokens) == '.': |
|
785 | 784 | break |
|
786 | 785 | except StopIteration: |
|
787 | 786 | break |
|
788 | 787 | # lookup the candidate callable matches either using global_matches |
|
789 | 788 | # or attr_matches for dotted names |
|
790 | 789 | if len(ids) == 1: |
|
791 | 790 | callableMatches = self.global_matches(ids[0]) |
|
792 | 791 | else: |
|
793 | 792 | callableMatches = self.attr_matches('.'.join(ids[::-1])) |
|
794 | 793 | argMatches = [] |
|
795 | 794 | for callableMatch in callableMatches: |
|
796 | 795 | try: |
|
797 | 796 | namedArgs = self._default_arguments(eval(callableMatch, |
|
798 | 797 | self.namespace)) |
|
799 | 798 | except: |
|
800 | 799 | continue |
|
801 | 800 | |
|
802 | 801 | for namedArg in namedArgs: |
|
803 | 802 | if namedArg.startswith(text): |
|
804 | 803 | argMatches.append("%s=" %namedArg) |
|
805 | 804 | return argMatches |
|
806 | 805 | |
|
807 | 806 | def dispatch_custom_completer(self, text): |
|
808 | 807 | #io.rprint("Custom! '%s' %s" % (text, self.custom_completers)) # dbg |
|
809 | 808 | line = self.line_buffer |
|
810 | 809 | if not line.strip(): |
|
811 | 810 | return None |
|
812 | 811 | |
|
813 | 812 | # Create a little structure to pass all the relevant information about |
|
814 | 813 | # the current completion to any custom completer. |
|
815 | 814 | event = Bunch() |
|
816 | 815 | event.line = line |
|
817 | 816 | event.symbol = text |
|
818 | 817 | cmd = line.split(None,1)[0] |
|
819 | 818 | event.command = cmd |
|
820 | 819 | event.text_until_cursor = self.text_until_cursor |
|
821 | 820 | |
|
822 | 821 | #print "\ncustom:{%s]\n" % event # dbg |
|
823 | 822 | |
|
824 | 823 | # for foo etc, try also to find completer for %foo |
|
825 | 824 | if not cmd.startswith(self.magic_escape): |
|
826 | 825 | try_magic = self.custom_completers.s_matches( |
|
827 | 826 | self.magic_escape + cmd) |
|
828 | 827 | else: |
|
829 | 828 | try_magic = [] |
|
830 | 829 | |
|
831 | 830 | for c in itertools.chain(self.custom_completers.s_matches(cmd), |
|
832 | 831 | try_magic, |
|
833 | 832 | self.custom_completers.flat_matches(self.text_until_cursor)): |
|
834 | 833 | #print "try",c # dbg |
|
835 | 834 | try: |
|
836 | 835 | res = c(event) |
|
837 | 836 | if res: |
|
838 | 837 | # first, try case sensitive match |
|
839 | 838 | withcase = [r for r in res if r.startswith(text)] |
|
840 | 839 | if withcase: |
|
841 | 840 | return withcase |
|
842 | 841 | # if none, then case insensitive ones are ok too |
|
843 | 842 | text_low = text.lower() |
|
844 | 843 | return [r for r in res if r.lower().startswith(text_low)] |
|
845 | 844 | except TryNext: |
|
846 | 845 | pass |
|
847 | 846 | |
|
848 | 847 | return None |
|
849 | 848 | |
|
850 | 849 | def complete(self, text=None, line_buffer=None, cursor_pos=None): |
|
851 | 850 | """Find completions for the given text and line context. |
|
852 | 851 | |
|
853 | 852 | This is called successively with state == 0, 1, 2, ... until it |
|
854 | 853 | returns None. The completion should begin with 'text'. |
|
855 | 854 | |
|
856 | 855 | Note that both the text and the line_buffer are optional, but at least |
|
857 | 856 | one of them must be given. |
|
858 | 857 | |
|
859 | 858 | Parameters |
|
860 | 859 | ---------- |
|
861 | 860 | text : string, optional |
|
862 | 861 | Text to perform the completion on. If not given, the line buffer |
|
863 | 862 | is split using the instance's CompletionSplitter object. |
|
864 | 863 | |
|
865 | 864 | line_buffer : string, optional |
|
866 | 865 | If not given, the completer attempts to obtain the current line |
|
867 | 866 | buffer via readline. This keyword allows clients which are |
|
868 | 867 | requesting for text completions in non-readline contexts to inform |
|
869 | 868 | the completer of the entire text. |
|
870 | 869 | |
|
871 | 870 | cursor_pos : int, optional |
|
872 | 871 | Index of the cursor in the full line buffer. Should be provided by |
|
873 | 872 | remote frontends where kernel has no access to frontend state. |
|
874 | 873 | |
|
875 | 874 | Returns |
|
876 | 875 | ------- |
|
877 | 876 | text : str |
|
878 | 877 | Text that was actually used in the completion. |
|
879 | 878 | |
|
880 | 879 | matches : list |
|
881 | 880 | A list of completion matches. |
|
882 | 881 | """ |
|
883 | 882 | #io.rprint('\nCOMP1 %r %r %r' % (text, line_buffer, cursor_pos)) # dbg |
|
884 | 883 | |
|
885 | 884 | # if the cursor position isn't given, the only sane assumption we can |
|
886 | 885 | # make is that it's at the end of the line (the common case) |
|
887 | 886 | if cursor_pos is None: |
|
888 | 887 | cursor_pos = len(line_buffer) if text is None else len(text) |
|
889 | 888 | |
|
890 | 889 | # if text is either None or an empty string, rely on the line buffer |
|
891 | 890 | if not text: |
|
892 | 891 | text = self.splitter.split_line(line_buffer, cursor_pos) |
|
893 | 892 | |
|
894 | 893 | # If no line buffer is given, assume the input text is all there was |
|
895 | 894 | if line_buffer is None: |
|
896 | 895 | line_buffer = text |
|
897 | 896 | |
|
898 | 897 | self.line_buffer = line_buffer |
|
899 | 898 | self.text_until_cursor = self.line_buffer[:cursor_pos] |
|
900 | 899 | #io.rprint('COMP2 %r %r %r' % (text, line_buffer, cursor_pos)) # dbg |
|
901 | 900 | |
|
902 | 901 | # Start with a clean slate of completions |
|
903 | 902 | self.matches[:] = [] |
|
904 | 903 | custom_res = self.dispatch_custom_completer(text) |
|
905 | 904 | if custom_res is not None: |
|
906 | 905 | # did custom completers produce something? |
|
907 | 906 | self.matches = custom_res |
|
908 | 907 | else: |
|
909 | 908 | # Extend the list of completions with the results of each |
|
910 | 909 | # matcher, so we return results to the user from all |
|
911 | 910 | # namespaces. |
|
912 | 911 | if self.merge_completions: |
|
913 | 912 | self.matches = [] |
|
914 | 913 | for matcher in self.matchers: |
|
915 | 914 | try: |
|
916 | 915 | self.matches.extend(matcher(text)) |
|
917 | 916 | except: |
|
918 | 917 | # Show the ugly traceback if the matcher causes an |
|
919 | 918 | # exception, but do NOT crash the kernel! |
|
920 | 919 | sys.excepthook(*sys.exc_info()) |
|
921 | 920 | else: |
|
922 | 921 | for matcher in self.matchers: |
|
923 | 922 | self.matches = matcher(text) |
|
924 | 923 | if self.matches: |
|
925 | 924 | break |
|
926 | 925 | # FIXME: we should extend our api to return a dict with completions for |
|
927 | 926 | # different types of objects. The rlcomplete() method could then |
|
928 | 927 | # simply collapse the dict into a list for readline, but we'd have |
|
929 | 928 | # richer completion semantics in other evironments. |
|
930 | 929 | |
|
931 | 930 | # use penalize_magics_key to put magics after variables with same name |
|
932 | 931 | self.matches = sorted(set(self.matches), key=penalize_magics_key) |
|
933 | 932 | |
|
934 | 933 | #io.rprint('COMP TEXT, MATCHES: %r, %r' % (text, self.matches)) # dbg |
|
935 | 934 | return text, self.matches |
|
936 | 935 | |
|
937 | 936 | def rlcomplete(self, text, state): |
|
938 | 937 | """Return the state-th possible completion for 'text'. |
|
939 | 938 | |
|
940 | 939 | This is called successively with state == 0, 1, 2, ... until it |
|
941 | 940 | returns None. The completion should begin with 'text'. |
|
942 | 941 | |
|
943 | 942 | Parameters |
|
944 | 943 | ---------- |
|
945 | 944 | text : string |
|
946 | 945 | Text to perform the completion on. |
|
947 | 946 | |
|
948 | 947 | state : int |
|
949 | 948 | Counter used by readline. |
|
950 | 949 | """ |
|
951 | 950 | if state==0: |
|
952 | 951 | |
|
953 | 952 | self.line_buffer = line_buffer = self.readline.get_line_buffer() |
|
954 | 953 | cursor_pos = self.readline.get_endidx() |
|
955 | 954 | |
|
956 | 955 | #io.rprint("\nRLCOMPLETE: %r %r %r" % |
|
957 | 956 | # (text, line_buffer, cursor_pos) ) # dbg |
|
958 | 957 | |
|
959 | 958 | # if there is only a tab on a line with only whitespace, instead of |
|
960 | 959 | # the mostly useless 'do you want to see all million completions' |
|
961 | 960 | # message, just do the right thing and give the user his tab! |
|
962 | 961 | # Incidentally, this enables pasting of tabbed text from an editor |
|
963 | 962 | # (as long as autoindent is off). |
|
964 | 963 | |
|
965 | 964 | # It should be noted that at least pyreadline still shows file |
|
966 | 965 | # completions - is there a way around it? |
|
967 | 966 | |
|
968 | 967 | # don't apply this on 'dumb' terminals, such as emacs buffers, so |
|
969 | 968 | # we don't interfere with their own tab-completion mechanism. |
|
970 | 969 | if not (self.dumb_terminal or line_buffer.strip()): |
|
971 | 970 | self.readline.insert_text('\t') |
|
972 | 971 | sys.stdout.flush() |
|
973 | 972 | return None |
|
974 | 973 | |
|
975 | 974 | # Note: debugging exceptions that may occur in completion is very |
|
976 | 975 | # tricky, because readline unconditionally silences them. So if |
|
977 | 976 | # during development you suspect a bug in the completion code, turn |
|
978 | 977 | # this flag on temporarily by uncommenting the second form (don't |
|
979 | 978 | # flip the value in the first line, as the '# dbg' marker can be |
|
980 | 979 | # automatically detected and is used elsewhere). |
|
981 | 980 | DEBUG = False |
|
982 | 981 | #DEBUG = True # dbg |
|
983 | 982 | if DEBUG: |
|
984 | 983 | try: |
|
985 | 984 | self.complete(text, line_buffer, cursor_pos) |
|
986 | 985 | except: |
|
987 | 986 | import traceback; traceback.print_exc() |
|
988 | 987 | else: |
|
989 | 988 | # The normal production version is here |
|
990 | 989 | |
|
991 | 990 | # This method computes the self.matches array |
|
992 | 991 | self.complete(text, line_buffer, cursor_pos) |
|
993 | 992 | |
|
994 | 993 | try: |
|
995 | 994 | return self.matches[state] |
|
996 | 995 | except IndexError: |
|
997 | 996 | return None |
@@ -1,902 +1,900 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Display formatters. |
|
3 | 3 | |
|
4 | 4 | Inheritance diagram: |
|
5 | 5 | |
|
6 | 6 | .. inheritance-diagram:: IPython.core.formatters |
|
7 | 7 | :parts: 3 |
|
8 | 8 | |
|
9 | 9 | Authors: |
|
10 | 10 | |
|
11 | 11 | * Robert Kern |
|
12 | 12 | * Brian Granger |
|
13 | 13 | """ |
|
14 | 14 | #----------------------------------------------------------------------------- |
|
15 | 15 | # Copyright (C) 2010-2011, IPython Development Team. |
|
16 | 16 | # |
|
17 | 17 | # Distributed under the terms of the Modified BSD License. |
|
18 | 18 | # |
|
19 | 19 | # The full license is in the file COPYING.txt, distributed with this software. |
|
20 | 20 | #----------------------------------------------------------------------------- |
|
21 | 21 | |
|
22 | 22 | #----------------------------------------------------------------------------- |
|
23 | 23 | # Imports |
|
24 | 24 | #----------------------------------------------------------------------------- |
|
25 | 25 | |
|
26 | 26 | # Stdlib imports |
|
27 | 27 | import abc |
|
28 | 28 | import inspect |
|
29 | 29 | import sys |
|
30 | 30 | import types |
|
31 | 31 | import warnings |
|
32 | 32 | |
|
33 | 33 | from IPython.external.decorator import decorator |
|
34 | 34 | |
|
35 | 35 | # Our own imports |
|
36 | 36 | from IPython.config.configurable import Configurable |
|
37 | 37 | from IPython.lib import pretty |
|
38 | from IPython.utils import io | |
|
39 | 38 | from IPython.utils.traitlets import ( |
|
40 | 39 | Bool, Dict, Integer, Unicode, CUnicode, ObjectName, List, |
|
41 | 40 | ) |
|
42 | from IPython.utils.warn import warn | |
|
43 | 41 | from IPython.utils.py3compat import ( |
|
44 | 42 | unicode_to_str, with_metaclass, PY3, string_types, unicode_type, |
|
45 | 43 | ) |
|
46 | 44 | |
|
47 | 45 | if PY3: |
|
48 | 46 | from io import StringIO |
|
49 | 47 | else: |
|
50 | 48 | from StringIO import StringIO |
|
51 | 49 | |
|
52 | 50 | |
|
53 | 51 | #----------------------------------------------------------------------------- |
|
54 | 52 | # The main DisplayFormatter class |
|
55 | 53 | #----------------------------------------------------------------------------- |
|
56 | 54 | |
|
57 | 55 | |
|
58 | 56 | def _valid_formatter(f): |
|
59 | 57 | """Return whether an object is a valid formatter |
|
60 | 58 | |
|
61 | 59 | Cases checked: |
|
62 | 60 | |
|
63 | 61 | - bound methods OK |
|
64 | 62 | - unbound methods NO |
|
65 | 63 | - callable with zero args OK |
|
66 | 64 | """ |
|
67 | 65 | if f is None: |
|
68 | 66 | return False |
|
69 | 67 | elif isinstance(f, type(str.find)): |
|
70 | 68 | # unbound methods on compiled classes have type method_descriptor |
|
71 | 69 | return False |
|
72 | 70 | elif isinstance(f, types.BuiltinFunctionType): |
|
73 | 71 | # bound methods on compiled classes have type builtin_function |
|
74 | 72 | return True |
|
75 | 73 | elif callable(f): |
|
76 | 74 | # anything that works with zero args should be okay |
|
77 | 75 | try: |
|
78 | 76 | inspect.getcallargs(f) |
|
79 | 77 | except TypeError: |
|
80 | 78 | return False |
|
81 | 79 | else: |
|
82 | 80 | return True |
|
83 | 81 | return False |
|
84 | 82 | |
|
85 | 83 | def _safe_get_formatter_method(obj, name): |
|
86 | 84 | """Safely get a formatter method""" |
|
87 | 85 | method = pretty._safe_getattr(obj, name, None) |
|
88 | 86 | # formatter methods must be bound |
|
89 | 87 | if _valid_formatter(method): |
|
90 | 88 | return method |
|
91 | 89 | |
|
92 | 90 | |
|
93 | 91 | class DisplayFormatter(Configurable): |
|
94 | 92 | |
|
95 | 93 | # When set to true only the default plain text formatter will be used. |
|
96 | 94 | plain_text_only = Bool(False, config=True) |
|
97 | 95 | def _plain_text_only_changed(self, name, old, new): |
|
98 | 96 | warnings.warn("""DisplayFormatter.plain_text_only is deprecated. |
|
99 | 97 | |
|
100 | 98 | Use DisplayFormatter.active_types = ['text/plain'] |
|
101 | 99 | for the same effect. |
|
102 | 100 | """, DeprecationWarning) |
|
103 | 101 | if new: |
|
104 | 102 | self.active_types = ['text/plain'] |
|
105 | 103 | else: |
|
106 | 104 | self.active_types = self.format_types |
|
107 | 105 | |
|
108 | 106 | active_types = List(Unicode, config=True, |
|
109 | 107 | help="""List of currently active mime-types to display. |
|
110 | 108 | You can use this to set a white-list for formats to display. |
|
111 | 109 | |
|
112 | 110 | Most users will not need to change this value. |
|
113 | 111 | """) |
|
114 | 112 | def _active_types_default(self): |
|
115 | 113 | return self.format_types |
|
116 | 114 | |
|
117 | 115 | def _active_types_changed(self, name, old, new): |
|
118 | 116 | for key, formatter in self.formatters.items(): |
|
119 | 117 | if key in new: |
|
120 | 118 | formatter.enabled = True |
|
121 | 119 | else: |
|
122 | 120 | formatter.enabled = False |
|
123 | 121 | |
|
124 | 122 | # A dict of formatter whose keys are format types (MIME types) and whose |
|
125 | 123 | # values are subclasses of BaseFormatter. |
|
126 | 124 | formatters = Dict() |
|
127 | 125 | def _formatters_default(self): |
|
128 | 126 | """Activate the default formatters.""" |
|
129 | 127 | formatter_classes = [ |
|
130 | 128 | PlainTextFormatter, |
|
131 | 129 | HTMLFormatter, |
|
132 | 130 | MarkdownFormatter, |
|
133 | 131 | SVGFormatter, |
|
134 | 132 | PNGFormatter, |
|
135 | 133 | PDFFormatter, |
|
136 | 134 | JPEGFormatter, |
|
137 | 135 | LatexFormatter, |
|
138 | 136 | JSONFormatter, |
|
139 | 137 | JavascriptFormatter |
|
140 | 138 | ] |
|
141 | 139 | d = {} |
|
142 | 140 | for cls in formatter_classes: |
|
143 | 141 | f = cls(parent=self) |
|
144 | 142 | d[f.format_type] = f |
|
145 | 143 | return d |
|
146 | 144 | |
|
147 | 145 | def format(self, obj, include=None, exclude=None): |
|
148 | 146 | """Return a format data dict for an object. |
|
149 | 147 | |
|
150 | 148 | By default all format types will be computed. |
|
151 | 149 | |
|
152 | 150 | The following MIME types are currently implemented: |
|
153 | 151 | |
|
154 | 152 | * text/plain |
|
155 | 153 | * text/html |
|
156 | 154 | * text/markdown |
|
157 | 155 | * text/latex |
|
158 | 156 | * application/json |
|
159 | 157 | * application/javascript |
|
160 | 158 | * application/pdf |
|
161 | 159 | * image/png |
|
162 | 160 | * image/jpeg |
|
163 | 161 | * image/svg+xml |
|
164 | 162 | |
|
165 | 163 | Parameters |
|
166 | 164 | ---------- |
|
167 | 165 | obj : object |
|
168 | 166 | The Python object whose format data will be computed. |
|
169 | 167 | include : list or tuple, optional |
|
170 | 168 | A list of format type strings (MIME types) to include in the |
|
171 | 169 | format data dict. If this is set *only* the format types included |
|
172 | 170 | in this list will be computed. |
|
173 | 171 | exclude : list or tuple, optional |
|
174 | 172 | A list of format type string (MIME types) to exclude in the format |
|
175 | 173 | data dict. If this is set all format types will be computed, |
|
176 | 174 | except for those included in this argument. |
|
177 | 175 | |
|
178 | 176 | Returns |
|
179 | 177 | ------- |
|
180 | 178 | (format_dict, metadata_dict) : tuple of two dicts |
|
181 | 179 | |
|
182 | 180 | format_dict is a dictionary of key/value pairs, one of each format that was |
|
183 | 181 | generated for the object. The keys are the format types, which |
|
184 | 182 | will usually be MIME type strings and the values and JSON'able |
|
185 | 183 | data structure containing the raw data for the representation in |
|
186 | 184 | that format. |
|
187 | 185 | |
|
188 | 186 | metadata_dict is a dictionary of metadata about each mime-type output. |
|
189 | 187 | Its keys will be a strict subset of the keys in format_dict. |
|
190 | 188 | """ |
|
191 | 189 | format_dict = {} |
|
192 | 190 | md_dict = {} |
|
193 | 191 | |
|
194 | 192 | for format_type, formatter in self.formatters.items(): |
|
195 | 193 | if include and format_type not in include: |
|
196 | 194 | continue |
|
197 | 195 | if exclude and format_type in exclude: |
|
198 | 196 | continue |
|
199 | 197 | |
|
200 | 198 | md = None |
|
201 | 199 | try: |
|
202 | 200 | data = formatter(obj) |
|
203 | 201 | except: |
|
204 | 202 | # FIXME: log the exception |
|
205 | 203 | raise |
|
206 | 204 | |
|
207 | 205 | # formatters can return raw data or (data, metadata) |
|
208 | 206 | if isinstance(data, tuple) and len(data) == 2: |
|
209 | 207 | data, md = data |
|
210 | 208 | |
|
211 | 209 | if data is not None: |
|
212 | 210 | format_dict[format_type] = data |
|
213 | 211 | if md is not None: |
|
214 | 212 | md_dict[format_type] = md |
|
215 | 213 | |
|
216 | 214 | return format_dict, md_dict |
|
217 | 215 | |
|
218 | 216 | @property |
|
219 | 217 | def format_types(self): |
|
220 | 218 | """Return the format types (MIME types) of the active formatters.""" |
|
221 | 219 | return list(self.formatters.keys()) |
|
222 | 220 | |
|
223 | 221 | |
|
224 | 222 | #----------------------------------------------------------------------------- |
|
225 | 223 | # Formatters for specific format types (text, html, svg, etc.) |
|
226 | 224 | #----------------------------------------------------------------------------- |
|
227 | 225 | |
|
228 | 226 | class FormatterWarning(UserWarning): |
|
229 | 227 | """Warning class for errors in formatters""" |
|
230 | 228 | |
|
231 | 229 | @decorator |
|
232 | 230 | def warn_format_error(method, self, *args, **kwargs): |
|
233 | 231 | """decorator for warning on failed format call""" |
|
234 | 232 | try: |
|
235 | 233 | r = method(self, *args, **kwargs) |
|
236 | 234 | except NotImplementedError as e: |
|
237 | 235 | # don't warn on NotImplementedErrors |
|
238 | 236 | return None |
|
239 | 237 | except Exception as e: |
|
240 | 238 | warnings.warn("Exception in %s formatter: %s" % (self.format_type, e), |
|
241 | 239 | FormatterWarning, |
|
242 | 240 | ) |
|
243 | 241 | return None |
|
244 | 242 | if r is None or isinstance(r, self._return_type) or \ |
|
245 | 243 | (isinstance(r, tuple) and r and isinstance(r[0], self._return_type)): |
|
246 | 244 | return r |
|
247 | 245 | else: |
|
248 | 246 | warnings.warn( |
|
249 | 247 | "%s formatter returned invalid type %s (expected %s) for object: %s" % \ |
|
250 | 248 | (self.format_type, type(r), self._return_type, pretty._safe_repr(args[0])), |
|
251 | 249 | FormatterWarning |
|
252 | 250 | ) |
|
253 | 251 | |
|
254 | 252 | |
|
255 | 253 | class FormatterABC(with_metaclass(abc.ABCMeta, object)): |
|
256 | 254 | """ Abstract base class for Formatters. |
|
257 | 255 | |
|
258 | 256 | A formatter is a callable class that is responsible for computing the |
|
259 | 257 | raw format data for a particular format type (MIME type). For example, |
|
260 | 258 | an HTML formatter would have a format type of `text/html` and would return |
|
261 | 259 | the HTML representation of the object when called. |
|
262 | 260 | """ |
|
263 | 261 | |
|
264 | 262 | # The format type of the data returned, usually a MIME type. |
|
265 | 263 | format_type = 'text/plain' |
|
266 | 264 | |
|
267 | 265 | # Is the formatter enabled... |
|
268 | 266 | enabled = True |
|
269 | 267 | |
|
270 | 268 | @abc.abstractmethod |
|
271 | 269 | @warn_format_error |
|
272 | 270 | def __call__(self, obj): |
|
273 | 271 | """Return a JSON'able representation of the object. |
|
274 | 272 | |
|
275 | 273 | If the object cannot be formatted by this formatter, |
|
276 | 274 | warn and return None. |
|
277 | 275 | """ |
|
278 | 276 | return repr(obj) |
|
279 | 277 | |
|
280 | 278 | |
|
281 | 279 | def _mod_name_key(typ): |
|
282 | 280 | """Return a (__module__, __name__) tuple for a type. |
|
283 | 281 | |
|
284 | 282 | Used as key in Formatter.deferred_printers. |
|
285 | 283 | """ |
|
286 | 284 | module = getattr(typ, '__module__', None) |
|
287 | 285 | name = getattr(typ, '__name__', None) |
|
288 | 286 | return (module, name) |
|
289 | 287 | |
|
290 | 288 | |
|
291 | 289 | def _get_type(obj): |
|
292 | 290 | """Return the type of an instance (old and new-style)""" |
|
293 | 291 | return getattr(obj, '__class__', None) or type(obj) |
|
294 | 292 | |
|
295 | 293 | _raise_key_error = object() |
|
296 | 294 | |
|
297 | 295 | |
|
298 | 296 | class BaseFormatter(Configurable): |
|
299 | 297 | """A base formatter class that is configurable. |
|
300 | 298 | |
|
301 | 299 | This formatter should usually be used as the base class of all formatters. |
|
302 | 300 | It is a traited :class:`Configurable` class and includes an extensible |
|
303 | 301 | API for users to determine how their objects are formatted. The following |
|
304 | 302 | logic is used to find a function to format an given object. |
|
305 | 303 | |
|
306 | 304 | 1. The object is introspected to see if it has a method with the name |
|
307 | 305 | :attr:`print_method`. If is does, that object is passed to that method |
|
308 | 306 | for formatting. |
|
309 | 307 | 2. If no print method is found, three internal dictionaries are consulted |
|
310 | 308 | to find print method: :attr:`singleton_printers`, :attr:`type_printers` |
|
311 | 309 | and :attr:`deferred_printers`. |
|
312 | 310 | |
|
313 | 311 | Users should use these dictionaries to register functions that will be |
|
314 | 312 | used to compute the format data for their objects (if those objects don't |
|
315 | 313 | have the special print methods). The easiest way of using these |
|
316 | 314 | dictionaries is through the :meth:`for_type` and :meth:`for_type_by_name` |
|
317 | 315 | methods. |
|
318 | 316 | |
|
319 | 317 | If no function/callable is found to compute the format data, ``None`` is |
|
320 | 318 | returned and this format type is not used. |
|
321 | 319 | """ |
|
322 | 320 | |
|
323 | 321 | format_type = Unicode('text/plain') |
|
324 | 322 | _return_type = string_types |
|
325 | 323 | |
|
326 | 324 | enabled = Bool(True, config=True) |
|
327 | 325 | |
|
328 | 326 | print_method = ObjectName('__repr__') |
|
329 | 327 | |
|
330 | 328 | # The singleton printers. |
|
331 | 329 | # Maps the IDs of the builtin singleton objects to the format functions. |
|
332 | 330 | singleton_printers = Dict(config=True) |
|
333 | 331 | |
|
334 | 332 | # The type-specific printers. |
|
335 | 333 | # Map type objects to the format functions. |
|
336 | 334 | type_printers = Dict(config=True) |
|
337 | 335 | |
|
338 | 336 | # The deferred-import type-specific printers. |
|
339 | 337 | # Map (modulename, classname) pairs to the format functions. |
|
340 | 338 | deferred_printers = Dict(config=True) |
|
341 | 339 | |
|
342 | 340 | @warn_format_error |
|
343 | 341 | def __call__(self, obj): |
|
344 | 342 | """Compute the format for an object.""" |
|
345 | 343 | if self.enabled: |
|
346 | 344 | # lookup registered printer |
|
347 | 345 | try: |
|
348 | 346 | printer = self.lookup(obj) |
|
349 | 347 | except KeyError: |
|
350 | 348 | pass |
|
351 | 349 | else: |
|
352 | 350 | return printer(obj) |
|
353 | 351 | # Finally look for special method names |
|
354 | 352 | method = _safe_get_formatter_method(obj, self.print_method) |
|
355 | 353 | if method is not None: |
|
356 | 354 | return method() |
|
357 | 355 | return None |
|
358 | 356 | else: |
|
359 | 357 | return None |
|
360 | 358 | |
|
361 | 359 | def __contains__(self, typ): |
|
362 | 360 | """map in to lookup_by_type""" |
|
363 | 361 | try: |
|
364 | 362 | self.lookup_by_type(typ) |
|
365 | 363 | except KeyError: |
|
366 | 364 | return False |
|
367 | 365 | else: |
|
368 | 366 | return True |
|
369 | 367 | |
|
370 | 368 | def lookup(self, obj): |
|
371 | 369 | """Look up the formatter for a given instance. |
|
372 | 370 | |
|
373 | 371 | Parameters |
|
374 | 372 | ---------- |
|
375 | 373 | obj : object instance |
|
376 | 374 | |
|
377 | 375 | Returns |
|
378 | 376 | ------- |
|
379 | 377 | f : callable |
|
380 | 378 | The registered formatting callable for the type. |
|
381 | 379 | |
|
382 | 380 | Raises |
|
383 | 381 | ------ |
|
384 | 382 | KeyError if the type has not been registered. |
|
385 | 383 | """ |
|
386 | 384 | # look for singleton first |
|
387 | 385 | obj_id = id(obj) |
|
388 | 386 | if obj_id in self.singleton_printers: |
|
389 | 387 | return self.singleton_printers[obj_id] |
|
390 | 388 | # then lookup by type |
|
391 | 389 | return self.lookup_by_type(_get_type(obj)) |
|
392 | 390 | |
|
393 | 391 | def lookup_by_type(self, typ): |
|
394 | 392 | """Look up the registered formatter for a type. |
|
395 | 393 | |
|
396 | 394 | Parameters |
|
397 | 395 | ---------- |
|
398 | 396 | typ : type or '__module__.__name__' string for a type |
|
399 | 397 | |
|
400 | 398 | Returns |
|
401 | 399 | ------- |
|
402 | 400 | f : callable |
|
403 | 401 | The registered formatting callable for the type. |
|
404 | 402 | |
|
405 | 403 | Raises |
|
406 | 404 | ------ |
|
407 | 405 | KeyError if the type has not been registered. |
|
408 | 406 | """ |
|
409 | 407 | if isinstance(typ, string_types): |
|
410 | 408 | typ_key = tuple(typ.rsplit('.',1)) |
|
411 | 409 | if typ_key not in self.deferred_printers: |
|
412 | 410 | # We may have it cached in the type map. We will have to |
|
413 | 411 | # iterate over all of the types to check. |
|
414 | 412 | for cls in self.type_printers: |
|
415 | 413 | if _mod_name_key(cls) == typ_key: |
|
416 | 414 | return self.type_printers[cls] |
|
417 | 415 | else: |
|
418 | 416 | return self.deferred_printers[typ_key] |
|
419 | 417 | else: |
|
420 | 418 | for cls in pretty._get_mro(typ): |
|
421 | 419 | if cls in self.type_printers or self._in_deferred_types(cls): |
|
422 | 420 | return self.type_printers[cls] |
|
423 | 421 | |
|
424 | 422 | # If we have reached here, the lookup failed. |
|
425 | 423 | raise KeyError("No registered printer for {0!r}".format(typ)) |
|
426 | 424 | |
|
427 | 425 | def for_type(self, typ, func=None): |
|
428 | 426 | """Add a format function for a given type. |
|
429 | 427 | |
|
430 | 428 | Parameters |
|
431 | 429 | ----------- |
|
432 | 430 | typ : type or '__module__.__name__' string for a type |
|
433 | 431 | The class of the object that will be formatted using `func`. |
|
434 | 432 | func : callable |
|
435 | 433 | A callable for computing the format data. |
|
436 | 434 | `func` will be called with the object to be formatted, |
|
437 | 435 | and will return the raw data in this formatter's format. |
|
438 | 436 | Subclasses may use a different call signature for the |
|
439 | 437 | `func` argument. |
|
440 | 438 | |
|
441 | 439 | If `func` is None or not specified, there will be no change, |
|
442 | 440 | only returning the current value. |
|
443 | 441 | |
|
444 | 442 | Returns |
|
445 | 443 | ------- |
|
446 | 444 | oldfunc : callable |
|
447 | 445 | The currently registered callable. |
|
448 | 446 | If you are registering a new formatter, |
|
449 | 447 | this will be the previous value (to enable restoring later). |
|
450 | 448 | """ |
|
451 | 449 | # if string given, interpret as 'pkg.module.class_name' |
|
452 | 450 | if isinstance(typ, string_types): |
|
453 | 451 | type_module, type_name = typ.rsplit('.', 1) |
|
454 | 452 | return self.for_type_by_name(type_module, type_name, func) |
|
455 | 453 | |
|
456 | 454 | try: |
|
457 | 455 | oldfunc = self.lookup_by_type(typ) |
|
458 | 456 | except KeyError: |
|
459 | 457 | oldfunc = None |
|
460 | 458 | |
|
461 | 459 | if func is not None: |
|
462 | 460 | self.type_printers[typ] = func |
|
463 | 461 | |
|
464 | 462 | return oldfunc |
|
465 | 463 | |
|
466 | 464 | def for_type_by_name(self, type_module, type_name, func=None): |
|
467 | 465 | """Add a format function for a type specified by the full dotted |
|
468 | 466 | module and name of the type, rather than the type of the object. |
|
469 | 467 | |
|
470 | 468 | Parameters |
|
471 | 469 | ---------- |
|
472 | 470 | type_module : str |
|
473 | 471 | The full dotted name of the module the type is defined in, like |
|
474 | 472 | ``numpy``. |
|
475 | 473 | type_name : str |
|
476 | 474 | The name of the type (the class name), like ``dtype`` |
|
477 | 475 | func : callable |
|
478 | 476 | A callable for computing the format data. |
|
479 | 477 | `func` will be called with the object to be formatted, |
|
480 | 478 | and will return the raw data in this formatter's format. |
|
481 | 479 | Subclasses may use a different call signature for the |
|
482 | 480 | `func` argument. |
|
483 | 481 | |
|
484 | 482 | If `func` is None or unspecified, there will be no change, |
|
485 | 483 | only returning the current value. |
|
486 | 484 | |
|
487 | 485 | Returns |
|
488 | 486 | ------- |
|
489 | 487 | oldfunc : callable |
|
490 | 488 | The currently registered callable. |
|
491 | 489 | If you are registering a new formatter, |
|
492 | 490 | this will be the previous value (to enable restoring later). |
|
493 | 491 | """ |
|
494 | 492 | key = (type_module, type_name) |
|
495 | 493 | |
|
496 | 494 | try: |
|
497 | 495 | oldfunc = self.lookup_by_type("%s.%s" % key) |
|
498 | 496 | except KeyError: |
|
499 | 497 | oldfunc = None |
|
500 | 498 | |
|
501 | 499 | if func is not None: |
|
502 | 500 | self.deferred_printers[key] = func |
|
503 | 501 | return oldfunc |
|
504 | 502 | |
|
505 | 503 | def pop(self, typ, default=_raise_key_error): |
|
506 | 504 | """Pop a formatter for the given type. |
|
507 | 505 | |
|
508 | 506 | Parameters |
|
509 | 507 | ---------- |
|
510 | 508 | typ : type or '__module__.__name__' string for a type |
|
511 | 509 | default : object |
|
512 | 510 | value to be returned if no formatter is registered for typ. |
|
513 | 511 | |
|
514 | 512 | Returns |
|
515 | 513 | ------- |
|
516 | 514 | obj : object |
|
517 | 515 | The last registered object for the type. |
|
518 | 516 | |
|
519 | 517 | Raises |
|
520 | 518 | ------ |
|
521 | 519 | KeyError if the type is not registered and default is not specified. |
|
522 | 520 | """ |
|
523 | 521 | |
|
524 | 522 | if isinstance(typ, string_types): |
|
525 | 523 | typ_key = tuple(typ.rsplit('.',1)) |
|
526 | 524 | if typ_key not in self.deferred_printers: |
|
527 | 525 | # We may have it cached in the type map. We will have to |
|
528 | 526 | # iterate over all of the types to check. |
|
529 | 527 | for cls in self.type_printers: |
|
530 | 528 | if _mod_name_key(cls) == typ_key: |
|
531 | 529 | old = self.type_printers.pop(cls) |
|
532 | 530 | break |
|
533 | 531 | else: |
|
534 | 532 | old = default |
|
535 | 533 | else: |
|
536 | 534 | old = self.deferred_printers.pop(typ_key) |
|
537 | 535 | else: |
|
538 | 536 | if typ in self.type_printers: |
|
539 | 537 | old = self.type_printers.pop(typ) |
|
540 | 538 | else: |
|
541 | 539 | old = self.deferred_printers.pop(_mod_name_key(typ), default) |
|
542 | 540 | if old is _raise_key_error: |
|
543 | 541 | raise KeyError("No registered value for {0!r}".format(typ)) |
|
544 | 542 | return old |
|
545 | 543 | |
|
546 | 544 | def _in_deferred_types(self, cls): |
|
547 | 545 | """ |
|
548 | 546 | Check if the given class is specified in the deferred type registry. |
|
549 | 547 | |
|
550 | 548 | Successful matches will be moved to the regular type registry for future use. |
|
551 | 549 | """ |
|
552 | 550 | mod = getattr(cls, '__module__', None) |
|
553 | 551 | name = getattr(cls, '__name__', None) |
|
554 | 552 | key = (mod, name) |
|
555 | 553 | if key in self.deferred_printers: |
|
556 | 554 | # Move the printer over to the regular registry. |
|
557 | 555 | printer = self.deferred_printers.pop(key) |
|
558 | 556 | self.type_printers[cls] = printer |
|
559 | 557 | return True |
|
560 | 558 | return False |
|
561 | 559 | |
|
562 | 560 | |
|
563 | 561 | class PlainTextFormatter(BaseFormatter): |
|
564 | 562 | """The default pretty-printer. |
|
565 | 563 | |
|
566 | 564 | This uses :mod:`IPython.lib.pretty` to compute the format data of |
|
567 | 565 | the object. If the object cannot be pretty printed, :func:`repr` is used. |
|
568 | 566 | See the documentation of :mod:`IPython.lib.pretty` for details on |
|
569 | 567 | how to write pretty printers. Here is a simple example:: |
|
570 | 568 | |
|
571 | 569 | def dtype_pprinter(obj, p, cycle): |
|
572 | 570 | if cycle: |
|
573 | 571 | return p.text('dtype(...)') |
|
574 | 572 | if hasattr(obj, 'fields'): |
|
575 | 573 | if obj.fields is None: |
|
576 | 574 | p.text(repr(obj)) |
|
577 | 575 | else: |
|
578 | 576 | p.begin_group(7, 'dtype([') |
|
579 | 577 | for i, field in enumerate(obj.descr): |
|
580 | 578 | if i > 0: |
|
581 | 579 | p.text(',') |
|
582 | 580 | p.breakable() |
|
583 | 581 | p.pretty(field) |
|
584 | 582 | p.end_group(7, '])') |
|
585 | 583 | """ |
|
586 | 584 | |
|
587 | 585 | # The format type of data returned. |
|
588 | 586 | format_type = Unicode('text/plain') |
|
589 | 587 | |
|
590 | 588 | # This subclass ignores this attribute as it always need to return |
|
591 | 589 | # something. |
|
592 | 590 | enabled = Bool(True, config=False) |
|
593 | 591 | |
|
594 | 592 | # Look for a _repr_pretty_ methods to use for pretty printing. |
|
595 | 593 | print_method = ObjectName('_repr_pretty_') |
|
596 | 594 | |
|
597 | 595 | # Whether to pretty-print or not. |
|
598 | 596 | pprint = Bool(True, config=True) |
|
599 | 597 | |
|
600 | 598 | # Whether to be verbose or not. |
|
601 | 599 | verbose = Bool(False, config=True) |
|
602 | 600 | |
|
603 | 601 | # The maximum width. |
|
604 | 602 | max_width = Integer(79, config=True) |
|
605 | 603 | |
|
606 | 604 | # The newline character. |
|
607 | 605 | newline = Unicode('\n', config=True) |
|
608 | 606 | |
|
609 | 607 | # format-string for pprinting floats |
|
610 | 608 | float_format = Unicode('%r') |
|
611 | 609 | # setter for float precision, either int or direct format-string |
|
612 | 610 | float_precision = CUnicode('', config=True) |
|
613 | 611 | |
|
614 | 612 | def _float_precision_changed(self, name, old, new): |
|
615 | 613 | """float_precision changed, set float_format accordingly. |
|
616 | 614 | |
|
617 | 615 | float_precision can be set by int or str. |
|
618 | 616 | This will set float_format, after interpreting input. |
|
619 | 617 | If numpy has been imported, numpy print precision will also be set. |
|
620 | 618 | |
|
621 | 619 | integer `n` sets format to '%.nf', otherwise, format set directly. |
|
622 | 620 | |
|
623 | 621 | An empty string returns to defaults (repr for float, 8 for numpy). |
|
624 | 622 | |
|
625 | 623 | This parameter can be set via the '%precision' magic. |
|
626 | 624 | """ |
|
627 | 625 | |
|
628 | 626 | if '%' in new: |
|
629 | 627 | # got explicit format string |
|
630 | 628 | fmt = new |
|
631 | 629 | try: |
|
632 | 630 | fmt%3.14159 |
|
633 | 631 | except Exception: |
|
634 | 632 | raise ValueError("Precision must be int or format string, not %r"%new) |
|
635 | 633 | elif new: |
|
636 | 634 | # otherwise, should be an int |
|
637 | 635 | try: |
|
638 | 636 | i = int(new) |
|
639 | 637 | assert i >= 0 |
|
640 | 638 | except ValueError: |
|
641 | 639 | raise ValueError("Precision must be int or format string, not %r"%new) |
|
642 | 640 | except AssertionError: |
|
643 | 641 | raise ValueError("int precision must be non-negative, not %r"%i) |
|
644 | 642 | |
|
645 | 643 | fmt = '%%.%if'%i |
|
646 | 644 | if 'numpy' in sys.modules: |
|
647 | 645 | # set numpy precision if it has been imported |
|
648 | 646 | import numpy |
|
649 | 647 | numpy.set_printoptions(precision=i) |
|
650 | 648 | else: |
|
651 | 649 | # default back to repr |
|
652 | 650 | fmt = '%r' |
|
653 | 651 | if 'numpy' in sys.modules: |
|
654 | 652 | import numpy |
|
655 | 653 | # numpy default is 8 |
|
656 | 654 | numpy.set_printoptions(precision=8) |
|
657 | 655 | self.float_format = fmt |
|
658 | 656 | |
|
659 | 657 | # Use the default pretty printers from IPython.lib.pretty. |
|
660 | 658 | def _singleton_printers_default(self): |
|
661 | 659 | return pretty._singleton_pprinters.copy() |
|
662 | 660 | |
|
663 | 661 | def _type_printers_default(self): |
|
664 | 662 | d = pretty._type_pprinters.copy() |
|
665 | 663 | d[float] = lambda obj,p,cycle: p.text(self.float_format%obj) |
|
666 | 664 | return d |
|
667 | 665 | |
|
668 | 666 | def _deferred_printers_default(self): |
|
669 | 667 | return pretty._deferred_type_pprinters.copy() |
|
670 | 668 | |
|
671 | 669 | #### FormatterABC interface #### |
|
672 | 670 | |
|
673 | 671 | @warn_format_error |
|
674 | 672 | def __call__(self, obj): |
|
675 | 673 | """Compute the pretty representation of the object.""" |
|
676 | 674 | if not self.pprint: |
|
677 | 675 | return pretty._safe_repr(obj) |
|
678 | 676 | else: |
|
679 | 677 | # This uses use StringIO, as cStringIO doesn't handle unicode. |
|
680 | 678 | stream = StringIO() |
|
681 | 679 | # self.newline.encode() is a quick fix for issue gh-597. We need to |
|
682 | 680 | # ensure that stream does not get a mix of unicode and bytestrings, |
|
683 | 681 | # or it will cause trouble. |
|
684 | 682 | printer = pretty.RepresentationPrinter(stream, self.verbose, |
|
685 | 683 | self.max_width, unicode_to_str(self.newline), |
|
686 | 684 | singleton_pprinters=self.singleton_printers, |
|
687 | 685 | type_pprinters=self.type_printers, |
|
688 | 686 | deferred_pprinters=self.deferred_printers) |
|
689 | 687 | printer.pretty(obj) |
|
690 | 688 | printer.flush() |
|
691 | 689 | return stream.getvalue() |
|
692 | 690 | |
|
693 | 691 | |
|
694 | 692 | class HTMLFormatter(BaseFormatter): |
|
695 | 693 | """An HTML formatter. |
|
696 | 694 | |
|
697 | 695 | To define the callables that compute the HTML representation of your |
|
698 | 696 | objects, define a :meth:`_repr_html_` method or use the :meth:`for_type` |
|
699 | 697 | or :meth:`for_type_by_name` methods to register functions that handle |
|
700 | 698 | this. |
|
701 | 699 | |
|
702 | 700 | The return value of this formatter should be a valid HTML snippet that |
|
703 | 701 | could be injected into an existing DOM. It should *not* include the |
|
704 | 702 | ```<html>`` or ```<body>`` tags. |
|
705 | 703 | """ |
|
706 | 704 | format_type = Unicode('text/html') |
|
707 | 705 | |
|
708 | 706 | print_method = ObjectName('_repr_html_') |
|
709 | 707 | |
|
710 | 708 | |
|
711 | 709 | class MarkdownFormatter(BaseFormatter): |
|
712 | 710 | """A Markdown formatter. |
|
713 | 711 | |
|
714 | 712 | To define the callables that compute the Markdown representation of your |
|
715 | 713 | objects, define a :meth:`_repr_markdown_` method or use the :meth:`for_type` |
|
716 | 714 | or :meth:`for_type_by_name` methods to register functions that handle |
|
717 | 715 | this. |
|
718 | 716 | |
|
719 | 717 | The return value of this formatter should be a valid Markdown. |
|
720 | 718 | """ |
|
721 | 719 | format_type = Unicode('text/markdown') |
|
722 | 720 | |
|
723 | 721 | print_method = ObjectName('_repr_markdown_') |
|
724 | 722 | |
|
725 | 723 | class SVGFormatter(BaseFormatter): |
|
726 | 724 | """An SVG formatter. |
|
727 | 725 | |
|
728 | 726 | To define the callables that compute the SVG representation of your |
|
729 | 727 | objects, define a :meth:`_repr_svg_` method or use the :meth:`for_type` |
|
730 | 728 | or :meth:`for_type_by_name` methods to register functions that handle |
|
731 | 729 | this. |
|
732 | 730 | |
|
733 | 731 | The return value of this formatter should be valid SVG enclosed in |
|
734 | 732 | ```<svg>``` tags, that could be injected into an existing DOM. It should |
|
735 | 733 | *not* include the ```<html>`` or ```<body>`` tags. |
|
736 | 734 | """ |
|
737 | 735 | format_type = Unicode('image/svg+xml') |
|
738 | 736 | |
|
739 | 737 | print_method = ObjectName('_repr_svg_') |
|
740 | 738 | |
|
741 | 739 | |
|
742 | 740 | class PNGFormatter(BaseFormatter): |
|
743 | 741 | """A PNG formatter. |
|
744 | 742 | |
|
745 | 743 | To define the callables that compute the PNG representation of your |
|
746 | 744 | objects, define a :meth:`_repr_png_` method or use the :meth:`for_type` |
|
747 | 745 | or :meth:`for_type_by_name` methods to register functions that handle |
|
748 | 746 | this. |
|
749 | 747 | |
|
750 | 748 | The return value of this formatter should be raw PNG data, *not* |
|
751 | 749 | base64 encoded. |
|
752 | 750 | """ |
|
753 | 751 | format_type = Unicode('image/png') |
|
754 | 752 | |
|
755 | 753 | print_method = ObjectName('_repr_png_') |
|
756 | 754 | |
|
757 | 755 | _return_type = (bytes, unicode_type) |
|
758 | 756 | |
|
759 | 757 | |
|
760 | 758 | class JPEGFormatter(BaseFormatter): |
|
761 | 759 | """A JPEG formatter. |
|
762 | 760 | |
|
763 | 761 | To define the callables that compute the JPEG representation of your |
|
764 | 762 | objects, define a :meth:`_repr_jpeg_` method or use the :meth:`for_type` |
|
765 | 763 | or :meth:`for_type_by_name` methods to register functions that handle |
|
766 | 764 | this. |
|
767 | 765 | |
|
768 | 766 | The return value of this formatter should be raw JPEG data, *not* |
|
769 | 767 | base64 encoded. |
|
770 | 768 | """ |
|
771 | 769 | format_type = Unicode('image/jpeg') |
|
772 | 770 | |
|
773 | 771 | print_method = ObjectName('_repr_jpeg_') |
|
774 | 772 | |
|
775 | 773 | _return_type = (bytes, unicode_type) |
|
776 | 774 | |
|
777 | 775 | |
|
778 | 776 | class LatexFormatter(BaseFormatter): |
|
779 | 777 | """A LaTeX formatter. |
|
780 | 778 | |
|
781 | 779 | To define the callables that compute the LaTeX representation of your |
|
782 | 780 | objects, define a :meth:`_repr_latex_` method or use the :meth:`for_type` |
|
783 | 781 | or :meth:`for_type_by_name` methods to register functions that handle |
|
784 | 782 | this. |
|
785 | 783 | |
|
786 | 784 | The return value of this formatter should be a valid LaTeX equation, |
|
787 | 785 | enclosed in either ```$```, ```$$``` or another LaTeX equation |
|
788 | 786 | environment. |
|
789 | 787 | """ |
|
790 | 788 | format_type = Unicode('text/latex') |
|
791 | 789 | |
|
792 | 790 | print_method = ObjectName('_repr_latex_') |
|
793 | 791 | |
|
794 | 792 | |
|
795 | 793 | class JSONFormatter(BaseFormatter): |
|
796 | 794 | """A JSON string formatter. |
|
797 | 795 | |
|
798 | 796 | To define the callables that compute the JSON string representation of |
|
799 | 797 | your objects, define a :meth:`_repr_json_` method or use the :meth:`for_type` |
|
800 | 798 | or :meth:`for_type_by_name` methods to register functions that handle |
|
801 | 799 | this. |
|
802 | 800 | |
|
803 | 801 | The return value of this formatter should be a valid JSON string. |
|
804 | 802 | """ |
|
805 | 803 | format_type = Unicode('application/json') |
|
806 | 804 | |
|
807 | 805 | print_method = ObjectName('_repr_json_') |
|
808 | 806 | |
|
809 | 807 | |
|
810 | 808 | class JavascriptFormatter(BaseFormatter): |
|
811 | 809 | """A Javascript formatter. |
|
812 | 810 | |
|
813 | 811 | To define the callables that compute the Javascript representation of |
|
814 | 812 | your objects, define a :meth:`_repr_javascript_` method or use the |
|
815 | 813 | :meth:`for_type` or :meth:`for_type_by_name` methods to register functions |
|
816 | 814 | that handle this. |
|
817 | 815 | |
|
818 | 816 | The return value of this formatter should be valid Javascript code and |
|
819 | 817 | should *not* be enclosed in ```<script>``` tags. |
|
820 | 818 | """ |
|
821 | 819 | format_type = Unicode('application/javascript') |
|
822 | 820 | |
|
823 | 821 | print_method = ObjectName('_repr_javascript_') |
|
824 | 822 | |
|
825 | 823 | |
|
826 | 824 | class PDFFormatter(BaseFormatter): |
|
827 | 825 | """A PDF formatter. |
|
828 | 826 | |
|
829 | 827 | To define the callables that compute the PDF representation of your |
|
830 | 828 | objects, define a :meth:`_repr_pdf_` method or use the :meth:`for_type` |
|
831 | 829 | or :meth:`for_type_by_name` methods to register functions that handle |
|
832 | 830 | this. |
|
833 | 831 | |
|
834 | 832 | The return value of this formatter should be raw PDF data, *not* |
|
835 | 833 | base64 encoded. |
|
836 | 834 | """ |
|
837 | 835 | format_type = Unicode('application/pdf') |
|
838 | 836 | |
|
839 | 837 | print_method = ObjectName('_repr_pdf_') |
|
840 | 838 | |
|
841 | 839 | |
|
842 | 840 | FormatterABC.register(BaseFormatter) |
|
843 | 841 | FormatterABC.register(PlainTextFormatter) |
|
844 | 842 | FormatterABC.register(HTMLFormatter) |
|
845 | 843 | FormatterABC.register(MarkdownFormatter) |
|
846 | 844 | FormatterABC.register(SVGFormatter) |
|
847 | 845 | FormatterABC.register(PNGFormatter) |
|
848 | 846 | FormatterABC.register(PDFFormatter) |
|
849 | 847 | FormatterABC.register(JPEGFormatter) |
|
850 | 848 | FormatterABC.register(LatexFormatter) |
|
851 | 849 | FormatterABC.register(JSONFormatter) |
|
852 | 850 | FormatterABC.register(JavascriptFormatter) |
|
853 | 851 | |
|
854 | 852 | |
|
855 | 853 | def format_display_data(obj, include=None, exclude=None): |
|
856 | 854 | """Return a format data dict for an object. |
|
857 | 855 | |
|
858 | 856 | By default all format types will be computed. |
|
859 | 857 | |
|
860 | 858 | The following MIME types are currently implemented: |
|
861 | 859 | |
|
862 | 860 | * text/plain |
|
863 | 861 | * text/html |
|
864 | 862 | * text/markdown |
|
865 | 863 | * text/latex |
|
866 | 864 | * application/json |
|
867 | 865 | * application/javascript |
|
868 | 866 | * application/pdf |
|
869 | 867 | * image/png |
|
870 | 868 | * image/jpeg |
|
871 | 869 | * image/svg+xml |
|
872 | 870 | |
|
873 | 871 | Parameters |
|
874 | 872 | ---------- |
|
875 | 873 | obj : object |
|
876 | 874 | The Python object whose format data will be computed. |
|
877 | 875 | |
|
878 | 876 | Returns |
|
879 | 877 | ------- |
|
880 | 878 | format_dict : dict |
|
881 | 879 | A dictionary of key/value pairs, one or each format that was |
|
882 | 880 | generated for the object. The keys are the format types, which |
|
883 | 881 | will usually be MIME type strings and the values and JSON'able |
|
884 | 882 | data structure containing the raw data for the representation in |
|
885 | 883 | that format. |
|
886 | 884 | include : list or tuple, optional |
|
887 | 885 | A list of format type strings (MIME types) to include in the |
|
888 | 886 | format data dict. If this is set *only* the format types included |
|
889 | 887 | in this list will be computed. |
|
890 | 888 | exclude : list or tuple, optional |
|
891 | 889 | A list of format type string (MIME types) to exclue in the format |
|
892 | 890 | data dict. If this is set all format types will be computed, |
|
893 | 891 | except for those included in this argument. |
|
894 | 892 | """ |
|
895 | 893 | from IPython.core.interactiveshell import InteractiveShell |
|
896 | 894 | |
|
897 | 895 | InteractiveShell.instance().display_formatter.format( |
|
898 | 896 | obj, |
|
899 | 897 | include, |
|
900 | 898 | exclude |
|
901 | 899 | ) |
|
902 | 900 |
@@ -1,250 +1,249 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """An object for managing IPython profile directories.""" |
|
3 | 3 | |
|
4 | 4 | # Copyright (c) IPython Development Team. |
|
5 | 5 | # Distributed under the terms of the Modified BSD License. |
|
6 | 6 | |
|
7 | 7 | import os |
|
8 | 8 | import shutil |
|
9 | 9 | import errno |
|
10 | import time | |
|
11 | 10 | |
|
12 | 11 | from IPython.config.configurable import LoggingConfigurable |
|
13 | 12 | from IPython.utils.path import get_ipython_package_dir, expand_path, ensure_dir_exists |
|
14 | 13 | from IPython.utils import py3compat |
|
15 | 14 | from IPython.utils.traitlets import Unicode, Bool |
|
16 | 15 | |
|
17 | 16 | #----------------------------------------------------------------------------- |
|
18 | 17 | # Module errors |
|
19 | 18 | #----------------------------------------------------------------------------- |
|
20 | 19 | |
|
21 | 20 | class ProfileDirError(Exception): |
|
22 | 21 | pass |
|
23 | 22 | |
|
24 | 23 | |
|
25 | 24 | #----------------------------------------------------------------------------- |
|
26 | 25 | # Class for managing profile directories |
|
27 | 26 | #----------------------------------------------------------------------------- |
|
28 | 27 | |
|
29 | 28 | class ProfileDir(LoggingConfigurable): |
|
30 | 29 | """An object to manage the profile directory and its resources. |
|
31 | 30 | |
|
32 | 31 | The profile directory is used by all IPython applications, to manage |
|
33 | 32 | configuration, logging and security. |
|
34 | 33 | |
|
35 | 34 | This object knows how to find, create and manage these directories. This |
|
36 | 35 | should be used by any code that wants to handle profiles. |
|
37 | 36 | """ |
|
38 | 37 | |
|
39 | 38 | security_dir_name = Unicode('security') |
|
40 | 39 | log_dir_name = Unicode('log') |
|
41 | 40 | startup_dir_name = Unicode('startup') |
|
42 | 41 | pid_dir_name = Unicode('pid') |
|
43 | 42 | static_dir_name = Unicode('static') |
|
44 | 43 | security_dir = Unicode(u'') |
|
45 | 44 | log_dir = Unicode(u'') |
|
46 | 45 | startup_dir = Unicode(u'') |
|
47 | 46 | pid_dir = Unicode(u'') |
|
48 | 47 | static_dir = Unicode(u'') |
|
49 | 48 | |
|
50 | 49 | location = Unicode(u'', config=True, |
|
51 | 50 | help="""Set the profile location directly. This overrides the logic used by the |
|
52 | 51 | `profile` option.""", |
|
53 | 52 | ) |
|
54 | 53 | |
|
55 | 54 | _location_isset = Bool(False) # flag for detecting multiply set location |
|
56 | 55 | |
|
57 | 56 | def _location_changed(self, name, old, new): |
|
58 | 57 | if self._location_isset: |
|
59 | 58 | raise RuntimeError("Cannot set profile location more than once.") |
|
60 | 59 | self._location_isset = True |
|
61 | 60 | ensure_dir_exists(new) |
|
62 | 61 | |
|
63 | 62 | # ensure config files exist: |
|
64 | 63 | self.security_dir = os.path.join(new, self.security_dir_name) |
|
65 | 64 | self.log_dir = os.path.join(new, self.log_dir_name) |
|
66 | 65 | self.startup_dir = os.path.join(new, self.startup_dir_name) |
|
67 | 66 | self.pid_dir = os.path.join(new, self.pid_dir_name) |
|
68 | 67 | self.static_dir = os.path.join(new, self.static_dir_name) |
|
69 | 68 | self.check_dirs() |
|
70 | 69 | |
|
71 | 70 | def _log_dir_changed(self, name, old, new): |
|
72 | 71 | self.check_log_dir() |
|
73 | 72 | |
|
74 | 73 | def _mkdir(self, path, mode=None): |
|
75 | 74 | """ensure a directory exists at a given path |
|
76 | 75 | |
|
77 | 76 | This is a version of os.mkdir, with the following differences: |
|
78 | 77 | |
|
79 | 78 | - returns True if it created the directory, False otherwise |
|
80 | 79 | - ignores EEXIST, protecting against race conditions where |
|
81 | 80 | the dir may have been created in between the check and |
|
82 | 81 | the creation |
|
83 | 82 | - sets permissions if requested and the dir already exists |
|
84 | 83 | """ |
|
85 | 84 | if os.path.exists(path): |
|
86 | 85 | if mode and os.stat(path).st_mode != mode: |
|
87 | 86 | try: |
|
88 | 87 | os.chmod(path, mode) |
|
89 | 88 | except OSError: |
|
90 | 89 | self.log.warn( |
|
91 | 90 | "Could not set permissions on %s", |
|
92 | 91 | path |
|
93 | 92 | ) |
|
94 | 93 | return False |
|
95 | 94 | try: |
|
96 | 95 | if mode: |
|
97 | 96 | os.mkdir(path, mode) |
|
98 | 97 | else: |
|
99 | 98 | os.mkdir(path) |
|
100 | 99 | except OSError as e: |
|
101 | 100 | if e.errno == errno.EEXIST: |
|
102 | 101 | return False |
|
103 | 102 | else: |
|
104 | 103 | raise |
|
105 | 104 | |
|
106 | 105 | return True |
|
107 | 106 | |
|
108 | 107 | def check_log_dir(self): |
|
109 | 108 | self._mkdir(self.log_dir) |
|
110 | 109 | |
|
111 | 110 | def _startup_dir_changed(self, name, old, new): |
|
112 | 111 | self.check_startup_dir() |
|
113 | 112 | |
|
114 | 113 | def check_startup_dir(self): |
|
115 | 114 | self._mkdir(self.startup_dir) |
|
116 | 115 | |
|
117 | 116 | readme = os.path.join(self.startup_dir, 'README') |
|
118 | 117 | src = os.path.join(get_ipython_package_dir(), u'config', u'profile', u'README_STARTUP') |
|
119 | 118 | |
|
120 | 119 | if not os.path.exists(src): |
|
121 | 120 | self.log.warn("Could not copy README_STARTUP to startup dir. Source file %s does not exist.", src) |
|
122 | 121 | |
|
123 | 122 | if os.path.exists(src) and not os.path.exists(readme): |
|
124 | 123 | shutil.copy(src, readme) |
|
125 | 124 | |
|
126 | 125 | def _security_dir_changed(self, name, old, new): |
|
127 | 126 | self.check_security_dir() |
|
128 | 127 | |
|
129 | 128 | def check_security_dir(self): |
|
130 | 129 | self._mkdir(self.security_dir, 0o40700) |
|
131 | 130 | |
|
132 | 131 | def _pid_dir_changed(self, name, old, new): |
|
133 | 132 | self.check_pid_dir() |
|
134 | 133 | |
|
135 | 134 | def check_pid_dir(self): |
|
136 | 135 | self._mkdir(self.pid_dir, 0o40700) |
|
137 | 136 | |
|
138 | 137 | def _static_dir_changed(self, name, old, new): |
|
139 | 138 | self.check_startup_dir() |
|
140 | 139 | |
|
141 | 140 | def check_static_dir(self): |
|
142 | 141 | self._mkdir(self.static_dir) |
|
143 | 142 | custom = os.path.join(self.static_dir, 'custom') |
|
144 | 143 | self._mkdir(custom) |
|
145 | 144 | from IPython.html import DEFAULT_STATIC_FILES_PATH |
|
146 | 145 | for fname in ('custom.js', 'custom.css'): |
|
147 | 146 | src = os.path.join(DEFAULT_STATIC_FILES_PATH, 'custom', fname) |
|
148 | 147 | dest = os.path.join(custom, fname) |
|
149 | 148 | if not os.path.exists(src): |
|
150 | 149 | self.log.warn("Could not copy default file to static dir. Source file %s does not exist.", src) |
|
151 | 150 | continue |
|
152 | 151 | if not os.path.exists(dest): |
|
153 | 152 | shutil.copy(src, dest) |
|
154 | 153 | |
|
155 | 154 | def check_dirs(self): |
|
156 | 155 | self.check_security_dir() |
|
157 | 156 | self.check_log_dir() |
|
158 | 157 | self.check_pid_dir() |
|
159 | 158 | self.check_startup_dir() |
|
160 | 159 | self.check_static_dir() |
|
161 | 160 | |
|
162 | 161 | def copy_config_file(self, config_file, path=None, overwrite=False): |
|
163 | 162 | """Copy a default config file into the active profile directory. |
|
164 | 163 | |
|
165 | 164 | Default configuration files are kept in :mod:`IPython.config.default`. |
|
166 | 165 | This function moves these from that location to the working profile |
|
167 | 166 | directory. |
|
168 | 167 | """ |
|
169 | 168 | dst = os.path.join(self.location, config_file) |
|
170 | 169 | if os.path.isfile(dst) and not overwrite: |
|
171 | 170 | return False |
|
172 | 171 | if path is None: |
|
173 | 172 | path = os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default') |
|
174 | 173 | src = os.path.join(path, config_file) |
|
175 | 174 | shutil.copy(src, dst) |
|
176 | 175 | return True |
|
177 | 176 | |
|
178 | 177 | @classmethod |
|
179 | 178 | def create_profile_dir(cls, profile_dir, config=None): |
|
180 | 179 | """Create a new profile directory given a full path. |
|
181 | 180 | |
|
182 | 181 | Parameters |
|
183 | 182 | ---------- |
|
184 | 183 | profile_dir : str |
|
185 | 184 | The full path to the profile directory. If it does exist, it will |
|
186 | 185 | be used. If not, it will be created. |
|
187 | 186 | """ |
|
188 | 187 | return cls(location=profile_dir, config=config) |
|
189 | 188 | |
|
190 | 189 | @classmethod |
|
191 | 190 | def create_profile_dir_by_name(cls, path, name=u'default', config=None): |
|
192 | 191 | """Create a profile dir by profile name and path. |
|
193 | 192 | |
|
194 | 193 | Parameters |
|
195 | 194 | ---------- |
|
196 | 195 | path : unicode |
|
197 | 196 | The path (directory) to put the profile directory in. |
|
198 | 197 | name : unicode |
|
199 | 198 | The name of the profile. The name of the profile directory will |
|
200 | 199 | be "profile_<profile>". |
|
201 | 200 | """ |
|
202 | 201 | if not os.path.isdir(path): |
|
203 | 202 | raise ProfileDirError('Directory not found: %s' % path) |
|
204 | 203 | profile_dir = os.path.join(path, u'profile_' + name) |
|
205 | 204 | return cls(location=profile_dir, config=config) |
|
206 | 205 | |
|
207 | 206 | @classmethod |
|
208 | 207 | def find_profile_dir_by_name(cls, ipython_dir, name=u'default', config=None): |
|
209 | 208 | """Find an existing profile dir by profile name, return its ProfileDir. |
|
210 | 209 | |
|
211 | 210 | This searches through a sequence of paths for a profile dir. If it |
|
212 | 211 | is not found, a :class:`ProfileDirError` exception will be raised. |
|
213 | 212 | |
|
214 | 213 | The search path algorithm is: |
|
215 | 214 | 1. ``py3compat.getcwd()`` |
|
216 | 215 | 2. ``ipython_dir`` |
|
217 | 216 | |
|
218 | 217 | Parameters |
|
219 | 218 | ---------- |
|
220 | 219 | ipython_dir : unicode or str |
|
221 | 220 | The IPython directory to use. |
|
222 | 221 | name : unicode or str |
|
223 | 222 | The name of the profile. The name of the profile directory |
|
224 | 223 | will be "profile_<profile>". |
|
225 | 224 | """ |
|
226 | 225 | dirname = u'profile_' + name |
|
227 | 226 | paths = [py3compat.getcwd(), ipython_dir] |
|
228 | 227 | for p in paths: |
|
229 | 228 | profile_dir = os.path.join(p, dirname) |
|
230 | 229 | if os.path.isdir(profile_dir): |
|
231 | 230 | return cls(location=profile_dir, config=config) |
|
232 | 231 | else: |
|
233 | 232 | raise ProfileDirError('Profile directory not found in paths: %s' % dirname) |
|
234 | 233 | |
|
235 | 234 | @classmethod |
|
236 | 235 | def find_profile_dir(cls, profile_dir, config=None): |
|
237 | 236 | """Find/create a profile dir and return its ProfileDir. |
|
238 | 237 | |
|
239 | 238 | This will create the profile directory if it doesn't exist. |
|
240 | 239 | |
|
241 | 240 | Parameters |
|
242 | 241 | ---------- |
|
243 | 242 | profile_dir : unicode or str |
|
244 | 243 | The path of the profile directory. This is expanded using |
|
245 | 244 | :func:`IPython.utils.genutils.expand_path`. |
|
246 | 245 | """ |
|
247 | 246 | profile_dir = expand_path(profile_dir) |
|
248 | 247 | if not os.path.isdir(profile_dir): |
|
249 | 248 | raise ProfileDirError('Profile directory not found: %s' % profile_dir) |
|
250 | 249 | return cls(location=profile_dir, config=config) |
@@ -1,382 +1,381 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Pylab (matplotlib) support utilities. |
|
3 | 3 | |
|
4 | 4 | Authors |
|
5 | 5 | ------- |
|
6 | 6 | |
|
7 | 7 | * Fernando Perez. |
|
8 | 8 | * Brian Granger |
|
9 | 9 | """ |
|
10 | 10 | from __future__ import print_function |
|
11 | 11 | |
|
12 | 12 | #----------------------------------------------------------------------------- |
|
13 | 13 | # Copyright (C) 2009 The IPython Development Team |
|
14 | 14 | # |
|
15 | 15 | # Distributed under the terms of the BSD License. The full license is in |
|
16 | 16 | # the file COPYING, distributed as part of this software. |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | 18 | |
|
19 | 19 | #----------------------------------------------------------------------------- |
|
20 | 20 | # Imports |
|
21 | 21 | #----------------------------------------------------------------------------- |
|
22 | 22 | |
|
23 | import sys | |
|
24 | 23 | from io import BytesIO |
|
25 | 24 | |
|
26 | 25 | from IPython.core.display import _pngxy |
|
27 | 26 | from IPython.utils.decorators import flag_calls |
|
28 | 27 | from IPython.utils import py3compat |
|
29 | 28 | |
|
30 | 29 | # If user specifies a GUI, that dictates the backend, otherwise we read the |
|
31 | 30 | # user's mpl default from the mpl rc structure |
|
32 | 31 | backends = {'tk': 'TkAgg', |
|
33 | 32 | 'gtk': 'GTKAgg', |
|
34 | 33 | 'gtk3': 'GTK3Agg', |
|
35 | 34 | 'wx': 'WXAgg', |
|
36 | 35 | 'qt': 'Qt4Agg', # qt3 not supported |
|
37 | 36 | 'qt4': 'Qt4Agg', |
|
38 | 37 | 'osx': 'MacOSX', |
|
39 | 38 | 'inline' : 'module://IPython.kernel.zmq.pylab.backend_inline'} |
|
40 | 39 | |
|
41 | 40 | # We also need a reverse backends2guis mapping that will properly choose which |
|
42 | 41 | # GUI support to activate based on the desired matplotlib backend. For the |
|
43 | 42 | # most part it's just a reverse of the above dict, but we also need to add a |
|
44 | 43 | # few others that map to the same GUI manually: |
|
45 | 44 | backend2gui = dict(zip(backends.values(), backends.keys())) |
|
46 | 45 | # Our tests expect backend2gui to just return 'qt' |
|
47 | 46 | backend2gui['Qt4Agg'] = 'qt' |
|
48 | 47 | # In the reverse mapping, there are a few extra valid matplotlib backends that |
|
49 | 48 | # map to the same GUI support |
|
50 | 49 | backend2gui['GTK'] = backend2gui['GTKCairo'] = 'gtk' |
|
51 | 50 | backend2gui['GTK3Cairo'] = 'gtk3' |
|
52 | 51 | backend2gui['WX'] = 'wx' |
|
53 | 52 | backend2gui['CocoaAgg'] = 'osx' |
|
54 | 53 | |
|
55 | 54 | #----------------------------------------------------------------------------- |
|
56 | 55 | # Matplotlib utilities |
|
57 | 56 | #----------------------------------------------------------------------------- |
|
58 | 57 | |
|
59 | 58 | |
|
60 | 59 | def getfigs(*fig_nums): |
|
61 | 60 | """Get a list of matplotlib figures by figure numbers. |
|
62 | 61 | |
|
63 | 62 | If no arguments are given, all available figures are returned. If the |
|
64 | 63 | argument list contains references to invalid figures, a warning is printed |
|
65 | 64 | but the function continues pasting further figures. |
|
66 | 65 | |
|
67 | 66 | Parameters |
|
68 | 67 | ---------- |
|
69 | 68 | figs : tuple |
|
70 | 69 | A tuple of ints giving the figure numbers of the figures to return. |
|
71 | 70 | """ |
|
72 | 71 | from matplotlib._pylab_helpers import Gcf |
|
73 | 72 | if not fig_nums: |
|
74 | 73 | fig_managers = Gcf.get_all_fig_managers() |
|
75 | 74 | return [fm.canvas.figure for fm in fig_managers] |
|
76 | 75 | else: |
|
77 | 76 | figs = [] |
|
78 | 77 | for num in fig_nums: |
|
79 | 78 | f = Gcf.figs.get(num) |
|
80 | 79 | if f is None: |
|
81 | 80 | print('Warning: figure %s not available.' % num) |
|
82 | 81 | else: |
|
83 | 82 | figs.append(f.canvas.figure) |
|
84 | 83 | return figs |
|
85 | 84 | |
|
86 | 85 | |
|
87 | 86 | def figsize(sizex, sizey): |
|
88 | 87 | """Set the default figure size to be [sizex, sizey]. |
|
89 | 88 | |
|
90 | 89 | This is just an easy to remember, convenience wrapper that sets:: |
|
91 | 90 | |
|
92 | 91 | matplotlib.rcParams['figure.figsize'] = [sizex, sizey] |
|
93 | 92 | """ |
|
94 | 93 | import matplotlib |
|
95 | 94 | matplotlib.rcParams['figure.figsize'] = [sizex, sizey] |
|
96 | 95 | |
|
97 | 96 | |
|
98 | 97 | def print_figure(fig, fmt='png', bbox_inches='tight', **kwargs): |
|
99 | 98 | """Print a figure to an image, and return the resulting file data |
|
100 | 99 | |
|
101 | 100 | Returned data will be bytes unless ``fmt='svg'``, |
|
102 | 101 | in which case it will be unicode. |
|
103 | 102 | |
|
104 | 103 | Any keyword args are passed to fig.canvas.print_figure, |
|
105 | 104 | such as ``quality`` or ``bbox_inches``. |
|
106 | 105 | """ |
|
107 | 106 | from matplotlib import rcParams |
|
108 | 107 | # When there's an empty figure, we shouldn't return anything, otherwise we |
|
109 | 108 | # get big blank areas in the qt console. |
|
110 | 109 | if not fig.axes and not fig.lines: |
|
111 | 110 | return |
|
112 | 111 | |
|
113 | 112 | dpi = rcParams['savefig.dpi'] |
|
114 | 113 | if fmt == 'retina': |
|
115 | 114 | dpi = dpi * 2 |
|
116 | 115 | fmt = 'png' |
|
117 | 116 | |
|
118 | 117 | # build keyword args |
|
119 | 118 | kw = dict( |
|
120 | 119 | format=fmt, |
|
121 | 120 | facecolor=fig.get_facecolor(), |
|
122 | 121 | edgecolor=fig.get_edgecolor(), |
|
123 | 122 | dpi=dpi, |
|
124 | 123 | bbox_inches=bbox_inches, |
|
125 | 124 | ) |
|
126 | 125 | # **kwargs get higher priority |
|
127 | 126 | kw.update(kwargs) |
|
128 | 127 | |
|
129 | 128 | bytes_io = BytesIO() |
|
130 | 129 | fig.canvas.print_figure(bytes_io, **kw) |
|
131 | 130 | data = bytes_io.getvalue() |
|
132 | 131 | if fmt == 'svg': |
|
133 | 132 | data = data.decode('utf-8') |
|
134 | 133 | return data |
|
135 | 134 | |
|
136 | 135 | def retina_figure(fig, **kwargs): |
|
137 | 136 | """format a figure as a pixel-doubled (retina) PNG""" |
|
138 | 137 | pngdata = print_figure(fig, fmt='retina', **kwargs) |
|
139 | 138 | w, h = _pngxy(pngdata) |
|
140 | 139 | metadata = dict(width=w//2, height=h//2) |
|
141 | 140 | return pngdata, metadata |
|
142 | 141 | |
|
143 | 142 | # We need a little factory function here to create the closure where |
|
144 | 143 | # safe_execfile can live. |
|
145 | 144 | def mpl_runner(safe_execfile): |
|
146 | 145 | """Factory to return a matplotlib-enabled runner for %run. |
|
147 | 146 | |
|
148 | 147 | Parameters |
|
149 | 148 | ---------- |
|
150 | 149 | safe_execfile : function |
|
151 | 150 | This must be a function with the same interface as the |
|
152 | 151 | :meth:`safe_execfile` method of IPython. |
|
153 | 152 | |
|
154 | 153 | Returns |
|
155 | 154 | ------- |
|
156 | 155 | A function suitable for use as the ``runner`` argument of the %run magic |
|
157 | 156 | function. |
|
158 | 157 | """ |
|
159 | 158 | |
|
160 | 159 | def mpl_execfile(fname,*where,**kw): |
|
161 | 160 | """matplotlib-aware wrapper around safe_execfile. |
|
162 | 161 | |
|
163 | 162 | Its interface is identical to that of the :func:`execfile` builtin. |
|
164 | 163 | |
|
165 | 164 | This is ultimately a call to execfile(), but wrapped in safeties to |
|
166 | 165 | properly handle interactive rendering.""" |
|
167 | 166 | |
|
168 | 167 | import matplotlib |
|
169 | 168 | import matplotlib.pylab as pylab |
|
170 | 169 | |
|
171 | 170 | #print '*** Matplotlib runner ***' # dbg |
|
172 | 171 | # turn off rendering until end of script |
|
173 | 172 | is_interactive = matplotlib.rcParams['interactive'] |
|
174 | 173 | matplotlib.interactive(False) |
|
175 | 174 | safe_execfile(fname,*where,**kw) |
|
176 | 175 | matplotlib.interactive(is_interactive) |
|
177 | 176 | # make rendering call now, if the user tried to do it |
|
178 | 177 | if pylab.draw_if_interactive.called: |
|
179 | 178 | pylab.draw() |
|
180 | 179 | pylab.draw_if_interactive.called = False |
|
181 | 180 | |
|
182 | 181 | return mpl_execfile |
|
183 | 182 | |
|
184 | 183 | |
|
185 | 184 | def select_figure_formats(shell, formats, **kwargs): |
|
186 | 185 | """Select figure formats for the inline backend. |
|
187 | 186 | |
|
188 | 187 | Parameters |
|
189 | 188 | ========== |
|
190 | 189 | shell : InteractiveShell |
|
191 | 190 | The main IPython instance. |
|
192 | 191 | formats : str or set |
|
193 | 192 | One or a set of figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'. |
|
194 | 193 | **kwargs : any |
|
195 | 194 | Extra keyword arguments to be passed to fig.canvas.print_figure. |
|
196 | 195 | """ |
|
197 | 196 | from matplotlib.figure import Figure |
|
198 | 197 | from IPython.kernel.zmq.pylab import backend_inline |
|
199 | 198 | |
|
200 | 199 | svg_formatter = shell.display_formatter.formatters['image/svg+xml'] |
|
201 | 200 | png_formatter = shell.display_formatter.formatters['image/png'] |
|
202 | 201 | jpg_formatter = shell.display_formatter.formatters['image/jpeg'] |
|
203 | 202 | pdf_formatter = shell.display_formatter.formatters['application/pdf'] |
|
204 | 203 | |
|
205 | 204 | if isinstance(formats, py3compat.string_types): |
|
206 | 205 | formats = {formats} |
|
207 | 206 | # cast in case of list / tuple |
|
208 | 207 | formats = set(formats) |
|
209 | 208 | |
|
210 | 209 | [ f.pop(Figure, None) for f in shell.display_formatter.formatters.values() ] |
|
211 | 210 | |
|
212 | 211 | supported = {'png', 'png2x', 'retina', 'jpg', 'jpeg', 'svg', 'pdf'} |
|
213 | 212 | bad = formats.difference(supported) |
|
214 | 213 | if bad: |
|
215 | 214 | bs = "%s" % ','.join([repr(f) for f in bad]) |
|
216 | 215 | gs = "%s" % ','.join([repr(f) for f in supported]) |
|
217 | 216 | raise ValueError("supported formats are: %s not %s" % (gs, bs)) |
|
218 | 217 | |
|
219 | 218 | if 'png' in formats: |
|
220 | 219 | png_formatter.for_type(Figure, lambda fig: print_figure(fig, 'png', **kwargs)) |
|
221 | 220 | if 'retina' in formats or 'png2x' in formats: |
|
222 | 221 | png_formatter.for_type(Figure, lambda fig: retina_figure(fig, **kwargs)) |
|
223 | 222 | if 'jpg' in formats or 'jpeg' in formats: |
|
224 | 223 | jpg_formatter.for_type(Figure, lambda fig: print_figure(fig, 'jpg', **kwargs)) |
|
225 | 224 | if 'svg' in formats: |
|
226 | 225 | svg_formatter.for_type(Figure, lambda fig: print_figure(fig, 'svg', **kwargs)) |
|
227 | 226 | if 'pdf' in formats: |
|
228 | 227 | pdf_formatter.for_type(Figure, lambda fig: print_figure(fig, 'pdf', **kwargs)) |
|
229 | 228 | |
|
230 | 229 | #----------------------------------------------------------------------------- |
|
231 | 230 | # Code for initializing matplotlib and importing pylab |
|
232 | 231 | #----------------------------------------------------------------------------- |
|
233 | 232 | |
|
234 | 233 | |
|
235 | 234 | def find_gui_and_backend(gui=None, gui_select=None): |
|
236 | 235 | """Given a gui string return the gui and mpl backend. |
|
237 | 236 | |
|
238 | 237 | Parameters |
|
239 | 238 | ---------- |
|
240 | 239 | gui : str |
|
241 | 240 | Can be one of ('tk','gtk','wx','qt','qt4','inline'). |
|
242 | 241 | gui_select : str |
|
243 | 242 | Can be one of ('tk','gtk','wx','qt','qt4','inline'). |
|
244 | 243 | This is any gui already selected by the shell. |
|
245 | 244 | |
|
246 | 245 | Returns |
|
247 | 246 | ------- |
|
248 | 247 | A tuple of (gui, backend) where backend is one of ('TkAgg','GTKAgg', |
|
249 | 248 | 'WXAgg','Qt4Agg','module://IPython.kernel.zmq.pylab.backend_inline'). |
|
250 | 249 | """ |
|
251 | 250 | |
|
252 | 251 | import matplotlib |
|
253 | 252 | |
|
254 | 253 | if gui and gui != 'auto': |
|
255 | 254 | # select backend based on requested gui |
|
256 | 255 | backend = backends[gui] |
|
257 | 256 | else: |
|
258 | 257 | # We need to read the backend from the original data structure, *not* |
|
259 | 258 | # from mpl.rcParams, since a prior invocation of %matplotlib may have |
|
260 | 259 | # overwritten that. |
|
261 | 260 | # WARNING: this assumes matplotlib 1.1 or newer!! |
|
262 | 261 | backend = matplotlib.rcParamsOrig['backend'] |
|
263 | 262 | # In this case, we need to find what the appropriate gui selection call |
|
264 | 263 | # should be for IPython, so we can activate inputhook accordingly |
|
265 | 264 | gui = backend2gui.get(backend, None) |
|
266 | 265 | |
|
267 | 266 | # If we have already had a gui active, we need it and inline are the |
|
268 | 267 | # ones allowed. |
|
269 | 268 | if gui_select and gui != gui_select: |
|
270 | 269 | gui = gui_select |
|
271 | 270 | backend = backends[gui] |
|
272 | 271 | |
|
273 | 272 | return gui, backend |
|
274 | 273 | |
|
275 | 274 | |
|
276 | 275 | def activate_matplotlib(backend): |
|
277 | 276 | """Activate the given backend and set interactive to True.""" |
|
278 | 277 | |
|
279 | 278 | import matplotlib |
|
280 | 279 | matplotlib.interactive(True) |
|
281 | 280 | |
|
282 | 281 | # Matplotlib had a bug where even switch_backend could not force |
|
283 | 282 | # the rcParam to update. This needs to be set *before* the module |
|
284 | 283 | # magic of switch_backend(). |
|
285 | 284 | matplotlib.rcParams['backend'] = backend |
|
286 | 285 | |
|
287 | 286 | import matplotlib.pyplot |
|
288 | 287 | matplotlib.pyplot.switch_backend(backend) |
|
289 | 288 | |
|
290 | 289 | # This must be imported last in the matplotlib series, after |
|
291 | 290 | # backend/interactivity choices have been made |
|
292 | 291 | import matplotlib.pylab as pylab |
|
293 | 292 | |
|
294 | 293 | pylab.show._needmain = False |
|
295 | 294 | # We need to detect at runtime whether show() is called by the user. |
|
296 | 295 | # For this, we wrap it into a decorator which adds a 'called' flag. |
|
297 | 296 | pylab.draw_if_interactive = flag_calls(pylab.draw_if_interactive) |
|
298 | 297 | |
|
299 | 298 | |
|
300 | 299 | def import_pylab(user_ns, import_all=True): |
|
301 | 300 | """Populate the namespace with pylab-related values. |
|
302 | 301 | |
|
303 | 302 | Imports matplotlib, pylab, numpy, and everything from pylab and numpy. |
|
304 | 303 | |
|
305 | 304 | Also imports a few names from IPython (figsize, display, getfigs) |
|
306 | 305 | |
|
307 | 306 | """ |
|
308 | 307 | |
|
309 | 308 | # Import numpy as np/pyplot as plt are conventions we're trying to |
|
310 | 309 | # somewhat standardize on. Making them available to users by default |
|
311 | 310 | # will greatly help this. |
|
312 | 311 | s = ("import numpy\n" |
|
313 | 312 | "import matplotlib\n" |
|
314 | 313 | "from matplotlib import pylab, mlab, pyplot\n" |
|
315 | 314 | "np = numpy\n" |
|
316 | 315 | "plt = pyplot\n" |
|
317 | 316 | ) |
|
318 | 317 | exec(s, user_ns) |
|
319 | 318 | |
|
320 | 319 | if import_all: |
|
321 | 320 | s = ("from matplotlib.pylab import *\n" |
|
322 | 321 | "from numpy import *\n") |
|
323 | 322 | exec(s, user_ns) |
|
324 | 323 | |
|
325 | 324 | # IPython symbols to add |
|
326 | 325 | user_ns['figsize'] = figsize |
|
327 | 326 | from IPython.core.display import display |
|
328 | 327 | # Add display and getfigs to the user's namespace |
|
329 | 328 | user_ns['display'] = display |
|
330 | 329 | user_ns['getfigs'] = getfigs |
|
331 | 330 | |
|
332 | 331 | |
|
333 | 332 | def configure_inline_support(shell, backend): |
|
334 | 333 | """Configure an IPython shell object for matplotlib use. |
|
335 | 334 | |
|
336 | 335 | Parameters |
|
337 | 336 | ---------- |
|
338 | 337 | shell : InteractiveShell instance |
|
339 | 338 | |
|
340 | 339 | backend : matplotlib backend |
|
341 | 340 | """ |
|
342 | 341 | # If using our svg payload backend, register the post-execution |
|
343 | 342 | # function that will pick up the results for display. This can only be |
|
344 | 343 | # done with access to the real shell object. |
|
345 | 344 | |
|
346 | 345 | # Note: if we can't load the inline backend, then there's no point |
|
347 | 346 | # continuing (such as in terminal-only shells in environments without |
|
348 | 347 | # zeromq available). |
|
349 | 348 | try: |
|
350 | 349 | from IPython.kernel.zmq.pylab.backend_inline import InlineBackend |
|
351 | 350 | except ImportError: |
|
352 | 351 | return |
|
353 | 352 | from matplotlib import pyplot |
|
354 | 353 | |
|
355 | 354 | cfg = InlineBackend.instance(parent=shell) |
|
356 | 355 | cfg.shell = shell |
|
357 | 356 | if cfg not in shell.configurables: |
|
358 | 357 | shell.configurables.append(cfg) |
|
359 | 358 | |
|
360 | 359 | if backend == backends['inline']: |
|
361 | 360 | from IPython.kernel.zmq.pylab.backend_inline import flush_figures |
|
362 | 361 | shell.events.register('post_execute', flush_figures) |
|
363 | 362 | |
|
364 | 363 | # Save rcParams that will be overwrittern |
|
365 | 364 | shell._saved_rcParams = dict() |
|
366 | 365 | for k in cfg.rc: |
|
367 | 366 | shell._saved_rcParams[k] = pyplot.rcParams[k] |
|
368 | 367 | # load inline_rc |
|
369 | 368 | pyplot.rcParams.update(cfg.rc) |
|
370 | 369 | else: |
|
371 | 370 | from IPython.kernel.zmq.pylab.backend_inline import flush_figures |
|
372 | 371 | try: |
|
373 | 372 | shell.events.unregister('post_execute', flush_figures) |
|
374 | 373 | except ValueError: |
|
375 | 374 | pass |
|
376 | 375 | if hasattr(shell, '_saved_rcParams'): |
|
377 | 376 | pyplot.rcParams.update(shell._saved_rcParams) |
|
378 | 377 | del shell._saved_rcParams |
|
379 | 378 | |
|
380 | 379 | # Setup the default figure format |
|
381 | 380 | select_figure_formats(shell, cfg.figure_formats, **cfg.print_figure_kwargs) |
|
382 | 381 |
@@ -1,438 +1,438 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """ |
|
3 | 3 | A mixin for :class:`~IPython.core.application.Application` classes that |
|
4 | 4 | launch InteractiveShell instances, load extensions, etc. |
|
5 | 5 | |
|
6 | 6 | Authors |
|
7 | 7 | ------- |
|
8 | 8 | |
|
9 | 9 | * Min Ragan-Kelley |
|
10 | 10 | """ |
|
11 | 11 | |
|
12 | 12 | #----------------------------------------------------------------------------- |
|
13 | 13 | # Copyright (C) 2008-2011 The IPython Development Team |
|
14 | 14 | # |
|
15 | 15 | # Distributed under the terms of the BSD License. The full license is in |
|
16 | 16 | # the file COPYING, distributed as part of this software. |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | 18 | |
|
19 | 19 | #----------------------------------------------------------------------------- |
|
20 | 20 | # Imports |
|
21 | 21 | #----------------------------------------------------------------------------- |
|
22 | 22 | |
|
23 | 23 | from __future__ import absolute_import |
|
24 | 24 | from __future__ import print_function |
|
25 | 25 | |
|
26 | 26 | import glob |
|
27 | 27 | import os |
|
28 | 28 | import sys |
|
29 | 29 | |
|
30 | 30 | from IPython.config.application import boolean_flag |
|
31 | 31 | from IPython.config.configurable import Configurable |
|
32 | 32 | from IPython.config.loader import Config |
|
33 | 33 | from IPython.core import pylabtools |
|
34 | 34 | from IPython.utils import py3compat |
|
35 | 35 | from IPython.utils.contexts import preserve_keys |
|
36 | 36 | from IPython.utils.path import filefind |
|
37 | 37 | from IPython.utils.traitlets import ( |
|
38 |
Unicode, Instance, List, Bool, CaselessStrEnum |
|
|
38 | Unicode, Instance, List, Bool, CaselessStrEnum | |
|
39 | 39 | ) |
|
40 | 40 | from IPython.lib.inputhook import guis |
|
41 | 41 | |
|
42 | 42 | #----------------------------------------------------------------------------- |
|
43 | 43 | # Aliases and Flags |
|
44 | 44 | #----------------------------------------------------------------------------- |
|
45 | 45 | |
|
46 | 46 | gui_keys = tuple(sorted([ key for key in guis if key is not None ])) |
|
47 | 47 | |
|
48 | 48 | backend_keys = sorted(pylabtools.backends.keys()) |
|
49 | 49 | backend_keys.insert(0, 'auto') |
|
50 | 50 | |
|
51 | 51 | shell_flags = {} |
|
52 | 52 | |
|
53 | 53 | addflag = lambda *args: shell_flags.update(boolean_flag(*args)) |
|
54 | 54 | addflag('autoindent', 'InteractiveShell.autoindent', |
|
55 | 55 | 'Turn on autoindenting.', 'Turn off autoindenting.' |
|
56 | 56 | ) |
|
57 | 57 | addflag('automagic', 'InteractiveShell.automagic', |
|
58 | 58 | """Turn on the auto calling of magic commands. Type %%magic at the |
|
59 | 59 | IPython prompt for more information.""", |
|
60 | 60 | 'Turn off the auto calling of magic commands.' |
|
61 | 61 | ) |
|
62 | 62 | addflag('pdb', 'InteractiveShell.pdb', |
|
63 | 63 | "Enable auto calling the pdb debugger after every exception.", |
|
64 | 64 | "Disable auto calling the pdb debugger after every exception." |
|
65 | 65 | ) |
|
66 | 66 | # pydb flag doesn't do any config, as core.debugger switches on import, |
|
67 | 67 | # which is before parsing. This just allows the flag to be passed. |
|
68 | 68 | shell_flags.update(dict( |
|
69 | 69 | pydb = ({}, |
|
70 | 70 | """Use the third party 'pydb' package as debugger, instead of pdb. |
|
71 | 71 | Requires that pydb is installed.""" |
|
72 | 72 | ) |
|
73 | 73 | )) |
|
74 | 74 | addflag('pprint', 'PlainTextFormatter.pprint', |
|
75 | 75 | "Enable auto pretty printing of results.", |
|
76 | 76 | "Disable auto pretty printing of results." |
|
77 | 77 | ) |
|
78 | 78 | addflag('color-info', 'InteractiveShell.color_info', |
|
79 | 79 | """IPython can display information about objects via a set of func- |
|
80 | 80 | tions, and optionally can use colors for this, syntax highlighting |
|
81 | 81 | source code and various other elements. However, because this |
|
82 | 82 | information is passed through a pager (like 'less') and many pagers get |
|
83 | 83 | confused with color codes, this option is off by default. You can test |
|
84 | 84 | it and turn it on permanently in your ipython_config.py file if it |
|
85 | 85 | works for you. Test it and turn it on permanently if it works with |
|
86 | 86 | your system. The magic function %%color_info allows you to toggle this |
|
87 | 87 | interactively for testing.""", |
|
88 | 88 | "Disable using colors for info related things." |
|
89 | 89 | ) |
|
90 | 90 | addflag('deep-reload', 'InteractiveShell.deep_reload', |
|
91 | 91 | """Enable deep (recursive) reloading by default. IPython can use the |
|
92 | 92 | deep_reload module which reloads changes in modules recursively (it |
|
93 | 93 | replaces the reload() function, so you don't need to change anything to |
|
94 | 94 | use it). deep_reload() forces a full reload of modules whose code may |
|
95 | 95 | have changed, which the default reload() function does not. When |
|
96 | 96 | deep_reload is off, IPython will use the normal reload(), but |
|
97 | 97 | deep_reload will still be available as dreload(). This feature is off |
|
98 | 98 | by default [which means that you have both normal reload() and |
|
99 | 99 | dreload()].""", |
|
100 | 100 | "Disable deep (recursive) reloading by default." |
|
101 | 101 | ) |
|
102 | 102 | nosep_config = Config() |
|
103 | 103 | nosep_config.InteractiveShell.separate_in = '' |
|
104 | 104 | nosep_config.InteractiveShell.separate_out = '' |
|
105 | 105 | nosep_config.InteractiveShell.separate_out2 = '' |
|
106 | 106 | |
|
107 | 107 | shell_flags['nosep']=(nosep_config, "Eliminate all spacing between prompts.") |
|
108 | 108 | shell_flags['pylab'] = ( |
|
109 | 109 | {'InteractiveShellApp' : {'pylab' : 'auto'}}, |
|
110 | 110 | """Pre-load matplotlib and numpy for interactive use with |
|
111 | 111 | the default matplotlib backend.""" |
|
112 | 112 | ) |
|
113 | 113 | shell_flags['matplotlib'] = ( |
|
114 | 114 | {'InteractiveShellApp' : {'matplotlib' : 'auto'}}, |
|
115 | 115 | """Configure matplotlib for interactive use with |
|
116 | 116 | the default matplotlib backend.""" |
|
117 | 117 | ) |
|
118 | 118 | |
|
119 | 119 | # it's possible we don't want short aliases for *all* of these: |
|
120 | 120 | shell_aliases = dict( |
|
121 | 121 | autocall='InteractiveShell.autocall', |
|
122 | 122 | colors='InteractiveShell.colors', |
|
123 | 123 | logfile='InteractiveShell.logfile', |
|
124 | 124 | logappend='InteractiveShell.logappend', |
|
125 | 125 | c='InteractiveShellApp.code_to_run', |
|
126 | 126 | m='InteractiveShellApp.module_to_run', |
|
127 | 127 | ext='InteractiveShellApp.extra_extension', |
|
128 | 128 | gui='InteractiveShellApp.gui', |
|
129 | 129 | pylab='InteractiveShellApp.pylab', |
|
130 | 130 | matplotlib='InteractiveShellApp.matplotlib', |
|
131 | 131 | ) |
|
132 | 132 | shell_aliases['cache-size'] = 'InteractiveShell.cache_size' |
|
133 | 133 | |
|
134 | 134 | #----------------------------------------------------------------------------- |
|
135 | 135 | # Main classes and functions |
|
136 | 136 | #----------------------------------------------------------------------------- |
|
137 | 137 | |
|
138 | 138 | class InteractiveShellApp(Configurable): |
|
139 | 139 | """A Mixin for applications that start InteractiveShell instances. |
|
140 | 140 | |
|
141 | 141 | Provides configurables for loading extensions and executing files |
|
142 | 142 | as part of configuring a Shell environment. |
|
143 | 143 | |
|
144 | 144 | The following methods should be called by the :meth:`initialize` method |
|
145 | 145 | of the subclass: |
|
146 | 146 | |
|
147 | 147 | - :meth:`init_path` |
|
148 | 148 | - :meth:`init_shell` (to be implemented by the subclass) |
|
149 | 149 | - :meth:`init_gui_pylab` |
|
150 | 150 | - :meth:`init_extensions` |
|
151 | 151 | - :meth:`init_code` |
|
152 | 152 | """ |
|
153 | 153 | extensions = List(Unicode, config=True, |
|
154 | 154 | help="A list of dotted module names of IPython extensions to load." |
|
155 | 155 | ) |
|
156 | 156 | extra_extension = Unicode('', config=True, |
|
157 | 157 | help="dotted module name of an IPython extension to load." |
|
158 | 158 | ) |
|
159 | 159 | def _extra_extension_changed(self, name, old, new): |
|
160 | 160 | if new: |
|
161 | 161 | # add to self.extensions |
|
162 | 162 | self.extensions.append(new) |
|
163 | 163 | |
|
164 | 164 | # Extensions that are always loaded (not configurable) |
|
165 | 165 | default_extensions = List(Unicode, [u'storemagic'], config=False) |
|
166 | 166 | |
|
167 | 167 | hide_initial_ns = Bool(True, config=True, |
|
168 | 168 | help="""Should variables loaded at startup (by startup files, exec_lines, etc.) |
|
169 | 169 | be hidden from tools like %who?""" |
|
170 | 170 | ) |
|
171 | 171 | |
|
172 | 172 | exec_files = List(Unicode, config=True, |
|
173 | 173 | help="""List of files to run at IPython startup.""" |
|
174 | 174 | ) |
|
175 | 175 | exec_PYTHONSTARTUP = Bool(True, config=True, |
|
176 | 176 | help="""Run the file referenced by the PYTHONSTARTUP environment |
|
177 | 177 | variable at IPython startup.""" |
|
178 | 178 | ) |
|
179 | 179 | file_to_run = Unicode('', config=True, |
|
180 | 180 | help="""A file to be run""") |
|
181 | 181 | |
|
182 | 182 | exec_lines = List(Unicode, config=True, |
|
183 | 183 | help="""lines of code to run at IPython startup.""" |
|
184 | 184 | ) |
|
185 | 185 | code_to_run = Unicode('', config=True, |
|
186 | 186 | help="Execute the given command string." |
|
187 | 187 | ) |
|
188 | 188 | module_to_run = Unicode('', config=True, |
|
189 | 189 | help="Run the module as a script." |
|
190 | 190 | ) |
|
191 | 191 | gui = CaselessStrEnum(gui_keys, config=True, |
|
192 | 192 | help="Enable GUI event loop integration with any of {0}.".format(gui_keys) |
|
193 | 193 | ) |
|
194 | 194 | matplotlib = CaselessStrEnum(backend_keys, |
|
195 | 195 | config=True, |
|
196 | 196 | help="""Configure matplotlib for interactive use with |
|
197 | 197 | the default matplotlib backend.""" |
|
198 | 198 | ) |
|
199 | 199 | pylab = CaselessStrEnum(backend_keys, |
|
200 | 200 | config=True, |
|
201 | 201 | help="""Pre-load matplotlib and numpy for interactive use, |
|
202 | 202 | selecting a particular matplotlib backend and loop integration. |
|
203 | 203 | """ |
|
204 | 204 | ) |
|
205 | 205 | pylab_import_all = Bool(True, config=True, |
|
206 | 206 | help="""If true, IPython will populate the user namespace with numpy, pylab, etc. |
|
207 | 207 | and an ``import *`` is done from numpy and pylab, when using pylab mode. |
|
208 | 208 | |
|
209 | 209 | When False, pylab mode should not import any names into the user namespace. |
|
210 | 210 | """ |
|
211 | 211 | ) |
|
212 | 212 | shell = Instance('IPython.core.interactiveshell.InteractiveShellABC') |
|
213 | 213 | |
|
214 | 214 | user_ns = Instance(dict, args=None, allow_none=True) |
|
215 | 215 | def _user_ns_changed(self, name, old, new): |
|
216 | 216 | if self.shell is not None: |
|
217 | 217 | self.shell.user_ns = new |
|
218 | 218 | self.shell.init_user_ns() |
|
219 | 219 | |
|
220 | 220 | def init_path(self): |
|
221 | 221 | """Add current working directory, '', to sys.path""" |
|
222 | 222 | if sys.path[0] != '': |
|
223 | 223 | sys.path.insert(0, '') |
|
224 | 224 | |
|
225 | 225 | def init_shell(self): |
|
226 | 226 | raise NotImplementedError("Override in subclasses") |
|
227 | 227 | |
|
228 | 228 | def init_gui_pylab(self): |
|
229 | 229 | """Enable GUI event loop integration, taking pylab into account.""" |
|
230 | 230 | enable = False |
|
231 | 231 | shell = self.shell |
|
232 | 232 | if self.pylab: |
|
233 | 233 | enable = lambda key: shell.enable_pylab(key, import_all=self.pylab_import_all) |
|
234 | 234 | key = self.pylab |
|
235 | 235 | elif self.matplotlib: |
|
236 | 236 | enable = shell.enable_matplotlib |
|
237 | 237 | key = self.matplotlib |
|
238 | 238 | elif self.gui: |
|
239 | 239 | enable = shell.enable_gui |
|
240 | 240 | key = self.gui |
|
241 | 241 | |
|
242 | 242 | if not enable: |
|
243 | 243 | return |
|
244 | 244 | |
|
245 | 245 | try: |
|
246 | 246 | r = enable(key) |
|
247 | 247 | except ImportError: |
|
248 | 248 | self.log.warn("Eventloop or matplotlib integration failed. Is matplotlib installed?") |
|
249 | 249 | self.shell.showtraceback() |
|
250 | 250 | return |
|
251 | 251 | except Exception: |
|
252 | 252 | self.log.warn("GUI event loop or pylab initialization failed") |
|
253 | 253 | self.shell.showtraceback() |
|
254 | 254 | return |
|
255 | 255 | |
|
256 | 256 | if isinstance(r, tuple): |
|
257 | 257 | gui, backend = r[:2] |
|
258 | 258 | self.log.info("Enabling GUI event loop integration, " |
|
259 | 259 | "eventloop=%s, matplotlib=%s", gui, backend) |
|
260 | 260 | if key == "auto": |
|
261 | 261 | print("Using matplotlib backend: %s" % backend) |
|
262 | 262 | else: |
|
263 | 263 | gui = r |
|
264 | 264 | self.log.info("Enabling GUI event loop integration, " |
|
265 | 265 | "eventloop=%s", gui) |
|
266 | 266 | |
|
267 | 267 | def init_extensions(self): |
|
268 | 268 | """Load all IPython extensions in IPythonApp.extensions. |
|
269 | 269 | |
|
270 | 270 | This uses the :meth:`ExtensionManager.load_extensions` to load all |
|
271 | 271 | the extensions listed in ``self.extensions``. |
|
272 | 272 | """ |
|
273 | 273 | try: |
|
274 | 274 | self.log.debug("Loading IPython extensions...") |
|
275 | 275 | extensions = self.default_extensions + self.extensions |
|
276 | 276 | for ext in extensions: |
|
277 | 277 | try: |
|
278 | 278 | self.log.info("Loading IPython extension: %s" % ext) |
|
279 | 279 | self.shell.extension_manager.load_extension(ext) |
|
280 | 280 | except: |
|
281 | 281 | self.log.warn("Error in loading extension: %s" % ext + |
|
282 | 282 | "\nCheck your config files in %s" % self.profile_dir.location |
|
283 | 283 | ) |
|
284 | 284 | self.shell.showtraceback() |
|
285 | 285 | except: |
|
286 | 286 | self.log.warn("Unknown error in loading extensions:") |
|
287 | 287 | self.shell.showtraceback() |
|
288 | 288 | |
|
289 | 289 | def init_code(self): |
|
290 | 290 | """run the pre-flight code, specified via exec_lines""" |
|
291 | 291 | self._run_startup_files() |
|
292 | 292 | self._run_exec_lines() |
|
293 | 293 | self._run_exec_files() |
|
294 | 294 | |
|
295 | 295 | # Hide variables defined here from %who etc. |
|
296 | 296 | if self.hide_initial_ns: |
|
297 | 297 | self.shell.user_ns_hidden.update(self.shell.user_ns) |
|
298 | 298 | |
|
299 | 299 | # command-line execution (ipython -i script.py, ipython -m module) |
|
300 | 300 | # should *not* be excluded from %whos |
|
301 | 301 | self._run_cmd_line_code() |
|
302 | 302 | self._run_module() |
|
303 | 303 | |
|
304 | 304 | # flush output, so itwon't be attached to the first cell |
|
305 | 305 | sys.stdout.flush() |
|
306 | 306 | sys.stderr.flush() |
|
307 | 307 | |
|
308 | 308 | def _run_exec_lines(self): |
|
309 | 309 | """Run lines of code in IPythonApp.exec_lines in the user's namespace.""" |
|
310 | 310 | if not self.exec_lines: |
|
311 | 311 | return |
|
312 | 312 | try: |
|
313 | 313 | self.log.debug("Running code from IPythonApp.exec_lines...") |
|
314 | 314 | for line in self.exec_lines: |
|
315 | 315 | try: |
|
316 | 316 | self.log.info("Running code in user namespace: %s" % |
|
317 | 317 | line) |
|
318 | 318 | self.shell.run_cell(line, store_history=False) |
|
319 | 319 | except: |
|
320 | 320 | self.log.warn("Error in executing line in user " |
|
321 | 321 | "namespace: %s" % line) |
|
322 | 322 | self.shell.showtraceback() |
|
323 | 323 | except: |
|
324 | 324 | self.log.warn("Unknown error in handling IPythonApp.exec_lines:") |
|
325 | 325 | self.shell.showtraceback() |
|
326 | 326 | |
|
327 | 327 | def _exec_file(self, fname): |
|
328 | 328 | try: |
|
329 | 329 | full_filename = filefind(fname, [u'.', self.ipython_dir]) |
|
330 | 330 | except IOError as e: |
|
331 | 331 | self.log.warn("File not found: %r"%fname) |
|
332 | 332 | return |
|
333 | 333 | # Make sure that the running script gets a proper sys.argv as if it |
|
334 | 334 | # were run from a system shell. |
|
335 | 335 | save_argv = sys.argv |
|
336 | 336 | sys.argv = [full_filename] + self.extra_args[1:] |
|
337 | 337 | # protect sys.argv from potential unicode strings on Python 2: |
|
338 | 338 | if not py3compat.PY3: |
|
339 | 339 | sys.argv = [ py3compat.cast_bytes(a) for a in sys.argv ] |
|
340 | 340 | try: |
|
341 | 341 | if os.path.isfile(full_filename): |
|
342 | 342 | self.log.info("Running file in user namespace: %s" % |
|
343 | 343 | full_filename) |
|
344 | 344 | # Ensure that __file__ is always defined to match Python |
|
345 | 345 | # behavior. |
|
346 | 346 | with preserve_keys(self.shell.user_ns, '__file__'): |
|
347 | 347 | self.shell.user_ns['__file__'] = fname |
|
348 | 348 | if full_filename.endswith('.ipy'): |
|
349 | 349 | self.shell.safe_execfile_ipy(full_filename) |
|
350 | 350 | else: |
|
351 | 351 | # default to python, even without extension |
|
352 | 352 | self.shell.safe_execfile(full_filename, |
|
353 | 353 | self.shell.user_ns) |
|
354 | 354 | finally: |
|
355 | 355 | sys.argv = save_argv |
|
356 | 356 | |
|
357 | 357 | def _run_startup_files(self): |
|
358 | 358 | """Run files from profile startup directory""" |
|
359 | 359 | startup_dir = self.profile_dir.startup_dir |
|
360 | 360 | startup_files = [] |
|
361 | 361 | |
|
362 | 362 | if self.exec_PYTHONSTARTUP and os.environ.get('PYTHONSTARTUP', False) and \ |
|
363 | 363 | not (self.file_to_run or self.code_to_run or self.module_to_run): |
|
364 | 364 | python_startup = os.environ['PYTHONSTARTUP'] |
|
365 | 365 | self.log.debug("Running PYTHONSTARTUP file %s...", python_startup) |
|
366 | 366 | try: |
|
367 | 367 | self._exec_file(python_startup) |
|
368 | 368 | except: |
|
369 | 369 | self.log.warn("Unknown error in handling PYTHONSTARTUP file %s:", python_startup) |
|
370 | 370 | self.shell.showtraceback() |
|
371 | 371 | finally: |
|
372 | 372 | # Many PYTHONSTARTUP files set up the readline completions, |
|
373 | 373 | # but this is often at odds with IPython's own completions. |
|
374 | 374 | # Do not allow PYTHONSTARTUP to set up readline. |
|
375 | 375 | if self.shell.has_readline: |
|
376 | 376 | self.shell.set_readline_completer() |
|
377 | 377 | |
|
378 | 378 | startup_files += glob.glob(os.path.join(startup_dir, '*.py')) |
|
379 | 379 | startup_files += glob.glob(os.path.join(startup_dir, '*.ipy')) |
|
380 | 380 | if not startup_files: |
|
381 | 381 | return |
|
382 | 382 | |
|
383 | 383 | self.log.debug("Running startup files from %s...", startup_dir) |
|
384 | 384 | try: |
|
385 | 385 | for fname in sorted(startup_files): |
|
386 | 386 | self._exec_file(fname) |
|
387 | 387 | except: |
|
388 | 388 | self.log.warn("Unknown error in handling startup files:") |
|
389 | 389 | self.shell.showtraceback() |
|
390 | 390 | |
|
391 | 391 | def _run_exec_files(self): |
|
392 | 392 | """Run files from IPythonApp.exec_files""" |
|
393 | 393 | if not self.exec_files: |
|
394 | 394 | return |
|
395 | 395 | |
|
396 | 396 | self.log.debug("Running files in IPythonApp.exec_files...") |
|
397 | 397 | try: |
|
398 | 398 | for fname in self.exec_files: |
|
399 | 399 | self._exec_file(fname) |
|
400 | 400 | except: |
|
401 | 401 | self.log.warn("Unknown error in handling IPythonApp.exec_files:") |
|
402 | 402 | self.shell.showtraceback() |
|
403 | 403 | |
|
404 | 404 | def _run_cmd_line_code(self): |
|
405 | 405 | """Run code or file specified at the command-line""" |
|
406 | 406 | if self.code_to_run: |
|
407 | 407 | line = self.code_to_run |
|
408 | 408 | try: |
|
409 | 409 | self.log.info("Running code given at command line (c=): %s" % |
|
410 | 410 | line) |
|
411 | 411 | self.shell.run_cell(line, store_history=False) |
|
412 | 412 | except: |
|
413 | 413 | self.log.warn("Error in executing line in user namespace: %s" % |
|
414 | 414 | line) |
|
415 | 415 | self.shell.showtraceback() |
|
416 | 416 | |
|
417 | 417 | # Like Python itself, ignore the second if the first of these is present |
|
418 | 418 | elif self.file_to_run: |
|
419 | 419 | fname = self.file_to_run |
|
420 | 420 | try: |
|
421 | 421 | self._exec_file(fname) |
|
422 | 422 | except: |
|
423 | 423 | self.log.warn("Error in executing file in user namespace: %s" % |
|
424 | 424 | fname) |
|
425 | 425 | self.shell.showtraceback() |
|
426 | 426 | |
|
427 | 427 | def _run_module(self): |
|
428 | 428 | """Run module specified at the command-line.""" |
|
429 | 429 | if self.module_to_run: |
|
430 | 430 | # Make sure that the module gets a proper sys.argv as if it were |
|
431 | 431 | # run using `python -m`. |
|
432 | 432 | save_argv = sys.argv |
|
433 | 433 | sys.argv = [sys.executable] + self.extra_args |
|
434 | 434 | try: |
|
435 | 435 | self.shell.safe_run_module(self.module_to_run, |
|
436 | 436 | self.shell.user_ns) |
|
437 | 437 | finally: |
|
438 | 438 | sys.argv = save_argv |
@@ -1,84 +1,83 b'' | |||
|
1 | 1 | """ Import Qt in a manner suitable for an IPython kernel. |
|
2 | 2 | |
|
3 | 3 | This is the import used for the `gui=qt` or `matplotlib=qt` initialization. |
|
4 | 4 | |
|
5 | 5 | Import Priority: |
|
6 | 6 | |
|
7 | 7 | if Qt4 has been imported anywhere else: |
|
8 | 8 | use that |
|
9 | 9 | |
|
10 | 10 | if matplotlib has been imported and doesn't support v2 (<= 1.0.1): |
|
11 | 11 | use PyQt4 @v1 |
|
12 | 12 | |
|
13 | 13 | Next, ask ETS' QT_API env variable |
|
14 | 14 | |
|
15 | 15 | if QT_API not set: |
|
16 | 16 | ask matplotlib via rcParams['backend.qt4'] |
|
17 | 17 | if it said PyQt: |
|
18 | 18 | use PyQt4 @v1 |
|
19 | 19 | elif it said PySide: |
|
20 | 20 | use PySide |
|
21 | 21 | |
|
22 | 22 | else: (matplotlib said nothing) |
|
23 | 23 | # this is the default path - nobody told us anything |
|
24 | 24 | try: |
|
25 | 25 | PyQt @v1 |
|
26 | 26 | except: |
|
27 | 27 | fallback on PySide |
|
28 | 28 | else: |
|
29 | 29 | use PyQt @v2 or PySide, depending on QT_API |
|
30 | 30 | because ETS doesn't work with PyQt @v1. |
|
31 | 31 | |
|
32 | 32 | """ |
|
33 | 33 | |
|
34 | 34 | import os |
|
35 | 35 | import sys |
|
36 | 36 | |
|
37 | from IPython.utils.warn import warn | |
|
38 | 37 | from IPython.utils.version import check_version |
|
39 | 38 | from IPython.external.qt_loaders import (load_qt, QT_API_PYSIDE, |
|
40 | 39 | QT_API_PYQT, QT_API_PYQT_DEFAULT, |
|
41 | 40 | loaded_api) |
|
42 | 41 | |
|
43 | 42 | #Constraints placed on an imported matplotlib |
|
44 | 43 | def matplotlib_options(mpl): |
|
45 | 44 | if mpl is None: |
|
46 | 45 | return |
|
47 | 46 | mpqt = mpl.rcParams.get('backend.qt4', None) |
|
48 | 47 | if mpqt is None: |
|
49 | 48 | return None |
|
50 | 49 | if mpqt.lower() == 'pyside': |
|
51 | 50 | return [QT_API_PYSIDE] |
|
52 | 51 | elif mpqt.lower() == 'pyqt4': |
|
53 | 52 | return [QT_API_PYQT_DEFAULT] |
|
54 | 53 | raise ImportError("unhandled value for backend.qt4 from matplotlib: %r" % |
|
55 | 54 | mpqt) |
|
56 | 55 | |
|
57 | 56 | def get_options(): |
|
58 | 57 | """Return a list of acceptable QT APIs, in decreasing order of |
|
59 | 58 | preference |
|
60 | 59 | """ |
|
61 | 60 | #already imported Qt somewhere. Use that |
|
62 | 61 | loaded = loaded_api() |
|
63 | 62 | if loaded is not None: |
|
64 | 63 | return [loaded] |
|
65 | 64 | |
|
66 | 65 | mpl = sys.modules.get('matplotlib', None) |
|
67 | 66 | |
|
68 | 67 | if mpl is not None and not check_version(mpl.__version__, '1.0.2'): |
|
69 | 68 | #1.0.1 only supports PyQt4 v1 |
|
70 | 69 | return [QT_API_PYQT_DEFAULT] |
|
71 | 70 | |
|
72 | 71 | if os.environ.get('QT_API', None) is None: |
|
73 | 72 | #no ETS variable. Ask mpl, then use either |
|
74 | 73 | return matplotlib_options(mpl) or [QT_API_PYQT_DEFAULT, QT_API_PYSIDE] |
|
75 | 74 | |
|
76 | 75 | #ETS variable present. Will fallback to external.qt |
|
77 | 76 | return None |
|
78 | 77 | |
|
79 | 78 | api_opts = get_options() |
|
80 | 79 | if api_opts is not None: |
|
81 | 80 | QtCore, QtGui, QtSvg, QT_API = load_qt(api_opts) |
|
82 | 81 | |
|
83 | 82 | else: # use ETS variable |
|
84 | 83 | from IPython.external.qt import QtCore, QtGui, QtSvg, QT_API |
@@ -1,140 +1,137 b'' | |||
|
1 | 1 | import io |
|
2 | 2 | import os |
|
3 | 3 | import zipfile |
|
4 | 4 | |
|
5 | 5 | from tornado import web |
|
6 | 6 | |
|
7 | 7 | from ..base.handlers import IPythonHandler, notebook_path_regex |
|
8 | 8 | from IPython.nbformat.current import to_notebook_json |
|
9 | 9 | |
|
10 | from IPython.utils import tz | |
|
11 | 10 | from IPython.utils.py3compat import cast_bytes |
|
12 | 11 | |
|
13 | import sys | |
|
14 | ||
|
15 | 12 | def find_resource_files(output_files_dir): |
|
16 | 13 | files = [] |
|
17 | 14 | for dirpath, dirnames, filenames in os.walk(output_files_dir): |
|
18 | 15 | files.extend([os.path.join(dirpath, f) for f in filenames]) |
|
19 | 16 | return files |
|
20 | 17 | |
|
21 | 18 | def respond_zip(handler, name, output, resources): |
|
22 | 19 | """Zip up the output and resource files and respond with the zip file. |
|
23 | 20 | |
|
24 | 21 | Returns True if it has served a zip file, False if there are no resource |
|
25 | 22 | files, in which case we serve the plain output file. |
|
26 | 23 | """ |
|
27 | 24 | # Check if we have resource files we need to zip |
|
28 | 25 | output_files = resources.get('outputs', None) |
|
29 | 26 | if not output_files: |
|
30 | 27 | return False |
|
31 | 28 | |
|
32 | 29 | # Headers |
|
33 | 30 | zip_filename = os.path.splitext(name)[0] + '.zip' |
|
34 | 31 | handler.set_header('Content-Disposition', |
|
35 | 32 | 'attachment; filename="%s"' % zip_filename) |
|
36 | 33 | handler.set_header('Content-Type', 'application/zip') |
|
37 | 34 | |
|
38 | 35 | # Prepare the zip file |
|
39 | 36 | buffer = io.BytesIO() |
|
40 | 37 | zipf = zipfile.ZipFile(buffer, mode='w', compression=zipfile.ZIP_DEFLATED) |
|
41 | 38 | output_filename = os.path.splitext(name)[0] + '.' + resources['output_extension'] |
|
42 | 39 | zipf.writestr(output_filename, cast_bytes(output, 'utf-8')) |
|
43 | 40 | for filename, data in output_files.items(): |
|
44 | 41 | zipf.writestr(os.path.basename(filename), data) |
|
45 | 42 | zipf.close() |
|
46 | 43 | |
|
47 | 44 | handler.finish(buffer.getvalue()) |
|
48 | 45 | return True |
|
49 | 46 | |
|
50 | 47 | def get_exporter(format, **kwargs): |
|
51 | 48 | """get an exporter, raising appropriate errors""" |
|
52 | 49 | # if this fails, will raise 500 |
|
53 | 50 | try: |
|
54 | 51 | from IPython.nbconvert.exporters.export import exporter_map |
|
55 | 52 | except ImportError as e: |
|
56 | 53 | raise web.HTTPError(500, "Could not import nbconvert: %s" % e) |
|
57 | 54 | |
|
58 | 55 | try: |
|
59 | 56 | Exporter = exporter_map[format] |
|
60 | 57 | except KeyError: |
|
61 | 58 | # should this be 400? |
|
62 | 59 | raise web.HTTPError(404, u"No exporter for format: %s" % format) |
|
63 | 60 | |
|
64 | 61 | try: |
|
65 | 62 | return Exporter(**kwargs) |
|
66 | 63 | except Exception as e: |
|
67 | 64 | raise web.HTTPError(500, "Could not construct Exporter: %s" % e) |
|
68 | 65 | |
|
69 | 66 | class NbconvertFileHandler(IPythonHandler): |
|
70 | 67 | |
|
71 | 68 | SUPPORTED_METHODS = ('GET',) |
|
72 | 69 | |
|
73 | 70 | @web.authenticated |
|
74 | 71 | def get(self, format, path='', name=None): |
|
75 | 72 | |
|
76 | 73 | exporter = get_exporter(format, config=self.config, log=self.log) |
|
77 | 74 | |
|
78 | 75 | path = path.strip('/') |
|
79 | 76 | model = self.notebook_manager.get_notebook(name=name, path=path) |
|
80 | 77 | |
|
81 | 78 | self.set_header('Last-Modified', model['last_modified']) |
|
82 | 79 | |
|
83 | 80 | try: |
|
84 | 81 | output, resources = exporter.from_notebook_node(model['content']) |
|
85 | 82 | except Exception as e: |
|
86 | 83 | raise web.HTTPError(500, "nbconvert failed: %s" % e) |
|
87 | 84 | |
|
88 | 85 | if respond_zip(self, name, output, resources): |
|
89 | 86 | return |
|
90 | 87 | |
|
91 | 88 | # Force download if requested |
|
92 | 89 | if self.get_argument('download', 'false').lower() == 'true': |
|
93 | 90 | filename = os.path.splitext(name)[0] + '.' + resources['output_extension'] |
|
94 | 91 | self.set_header('Content-Disposition', |
|
95 | 92 | 'attachment; filename="%s"' % filename) |
|
96 | 93 | |
|
97 | 94 | # MIME type |
|
98 | 95 | if exporter.output_mimetype: |
|
99 | 96 | self.set_header('Content-Type', |
|
100 | 97 | '%s; charset=utf-8' % exporter.output_mimetype) |
|
101 | 98 | |
|
102 | 99 | self.finish(output) |
|
103 | 100 | |
|
104 | 101 | class NbconvertPostHandler(IPythonHandler): |
|
105 | 102 | SUPPORTED_METHODS = ('POST',) |
|
106 | 103 | |
|
107 | 104 | @web.authenticated |
|
108 | 105 | def post(self, format): |
|
109 | 106 | exporter = get_exporter(format, config=self.config) |
|
110 | 107 | |
|
111 | 108 | model = self.get_json_body() |
|
112 | 109 | nbnode = to_notebook_json(model['content']) |
|
113 | 110 | |
|
114 | 111 | try: |
|
115 | 112 | output, resources = exporter.from_notebook_node(nbnode) |
|
116 | 113 | except Exception as e: |
|
117 | 114 | raise web.HTTPError(500, "nbconvert failed: %s" % e) |
|
118 | 115 | |
|
119 | 116 | if respond_zip(self, nbnode.metadata.name, output, resources): |
|
120 | 117 | return |
|
121 | 118 | |
|
122 | 119 | # MIME type |
|
123 | 120 | if exporter.output_mimetype: |
|
124 | 121 | self.set_header('Content-Type', |
|
125 | 122 | '%s; charset=utf-8' % exporter.output_mimetype) |
|
126 | 123 | |
|
127 | 124 | self.finish(output) |
|
128 | 125 | |
|
129 | 126 | #----------------------------------------------------------------------------- |
|
130 | 127 | # URL to handler mappings |
|
131 | 128 | #----------------------------------------------------------------------------- |
|
132 | 129 | |
|
133 | 130 | _format_regex = r"(?P<format>\w+)" |
|
134 | 131 | |
|
135 | 132 | |
|
136 | 133 | default_handlers = [ |
|
137 | 134 | (r"/nbconvert/%s%s" % (_format_regex, notebook_path_regex), |
|
138 | 135 | NbconvertFileHandler), |
|
139 | 136 | (r"/nbconvert/%s" % _format_regex, NbconvertPostHandler), |
|
140 | ] No newline at end of file | |
|
137 | ] |
@@ -1,132 +1,130 b'' | |||
|
1 | 1 | """A kernel manager relating notebooks and kernels |
|
2 | 2 | |
|
3 | 3 | Authors: |
|
4 | 4 | |
|
5 | 5 | * Brian Granger |
|
6 | 6 | """ |
|
7 | 7 | |
|
8 | 8 | #----------------------------------------------------------------------------- |
|
9 | 9 | # Copyright (C) 2013 The IPython Development Team |
|
10 | 10 | # |
|
11 | 11 | # Distributed under the terms of the BSD License. The full license is in |
|
12 | 12 | # the file COPYING, distributed as part of this software. |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | # Imports |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | 18 | |
|
19 | 19 | import os |
|
20 | 20 | |
|
21 | 21 | from tornado import web |
|
22 | 22 | |
|
23 | 23 | from IPython.kernel.multikernelmanager import MultiKernelManager |
|
24 |
from IPython.utils.traitlets import |
|
|
25 | Dict, List, Unicode, | |
|
26 | ) | |
|
24 | from IPython.utils.traitlets import List, Unicode, TraitError | |
|
27 | 25 | |
|
28 | 26 | from IPython.html.utils import to_os_path |
|
29 | 27 | from IPython.utils.py3compat import getcwd |
|
30 | 28 | |
|
31 | 29 | #----------------------------------------------------------------------------- |
|
32 | 30 | # Classes |
|
33 | 31 | #----------------------------------------------------------------------------- |
|
34 | 32 | |
|
35 | 33 | |
|
36 | 34 | class MappingKernelManager(MultiKernelManager): |
|
37 | 35 | """A KernelManager that handles notebook mapping and HTTP error handling""" |
|
38 | 36 | |
|
39 | 37 | def _kernel_manager_class_default(self): |
|
40 | 38 | return "IPython.kernel.ioloop.IOLoopKernelManager" |
|
41 | 39 | |
|
42 | 40 | kernel_argv = List(Unicode) |
|
43 | 41 | |
|
44 | 42 | root_dir = Unicode(getcwd(), config=True) |
|
45 | 43 | |
|
46 | 44 | def _root_dir_changed(self, name, old, new): |
|
47 | 45 | """Do a bit of validation of the root dir.""" |
|
48 | 46 | if not os.path.isabs(new): |
|
49 | 47 | # If we receive a non-absolute path, make it absolute. |
|
50 | 48 | self.root_dir = os.path.abspath(new) |
|
51 | 49 | return |
|
52 | 50 | if not os.path.exists(new) or not os.path.isdir(new): |
|
53 | 51 | raise TraitError("kernel root dir %r is not a directory" % new) |
|
54 | 52 | |
|
55 | 53 | #------------------------------------------------------------------------- |
|
56 | 54 | # Methods for managing kernels and sessions |
|
57 | 55 | #------------------------------------------------------------------------- |
|
58 | 56 | |
|
59 | 57 | def _handle_kernel_died(self, kernel_id): |
|
60 | 58 | """notice that a kernel died""" |
|
61 | 59 | self.log.warn("Kernel %s died, removing from map.", kernel_id) |
|
62 | 60 | self.remove_kernel(kernel_id) |
|
63 | 61 | |
|
64 | 62 | def cwd_for_path(self, path): |
|
65 | 63 | """Turn API path into absolute OS path.""" |
|
66 | 64 | # short circuit for NotebookManagers that pass in absolute paths |
|
67 | 65 | if os.path.exists(path): |
|
68 | 66 | return path |
|
69 | 67 | |
|
70 | 68 | os_path = to_os_path(path, self.root_dir) |
|
71 | 69 | # in the case of notebooks and kernels not being on the same filesystem, |
|
72 | 70 | # walk up to root_dir if the paths don't exist |
|
73 | 71 | while not os.path.exists(os_path) and os_path != self.root_dir: |
|
74 | 72 | os_path = os.path.dirname(os_path) |
|
75 | 73 | return os_path |
|
76 | 74 | |
|
77 | 75 | def start_kernel(self, kernel_id=None, path=None, **kwargs): |
|
78 | 76 | """Start a kernel for a session an return its kernel_id. |
|
79 | 77 | |
|
80 | 78 | Parameters |
|
81 | 79 | ---------- |
|
82 | 80 | kernel_id : uuid |
|
83 | 81 | The uuid to associate the new kernel with. If this |
|
84 | 82 | is not None, this kernel will be persistent whenever it is |
|
85 | 83 | requested. |
|
86 | 84 | path : API path |
|
87 | 85 | The API path (unicode, '/' delimited) for the cwd. |
|
88 | 86 | Will be transformed to an OS path relative to root_dir. |
|
89 | 87 | """ |
|
90 | 88 | if kernel_id is None: |
|
91 | 89 | kwargs['extra_arguments'] = self.kernel_argv |
|
92 | 90 | if path is not None: |
|
93 | 91 | kwargs['cwd'] = self.cwd_for_path(path) |
|
94 | 92 | kernel_id = super(MappingKernelManager, self).start_kernel(**kwargs) |
|
95 | 93 | self.log.info("Kernel started: %s" % kernel_id) |
|
96 | 94 | self.log.debug("Kernel args: %r" % kwargs) |
|
97 | 95 | # register callback for failed auto-restart |
|
98 | 96 | self.add_restart_callback(kernel_id, |
|
99 | 97 | lambda : self._handle_kernel_died(kernel_id), |
|
100 | 98 | 'dead', |
|
101 | 99 | ) |
|
102 | 100 | else: |
|
103 | 101 | self._check_kernel_id(kernel_id) |
|
104 | 102 | self.log.info("Using existing kernel: %s" % kernel_id) |
|
105 | 103 | return kernel_id |
|
106 | 104 | |
|
107 | 105 | def shutdown_kernel(self, kernel_id, now=False): |
|
108 | 106 | """Shutdown a kernel by kernel_id""" |
|
109 | 107 | self._check_kernel_id(kernel_id) |
|
110 | 108 | super(MappingKernelManager, self).shutdown_kernel(kernel_id, now=now) |
|
111 | 109 | |
|
112 | 110 | def kernel_model(self, kernel_id): |
|
113 | 111 | """Return a dictionary of kernel information described in the |
|
114 | 112 | JSON standard model.""" |
|
115 | 113 | self._check_kernel_id(kernel_id) |
|
116 | 114 | model = {"id":kernel_id} |
|
117 | 115 | return model |
|
118 | 116 | |
|
119 | 117 | def list_kernels(self): |
|
120 | 118 | """Returns a list of kernel_id's of kernels running.""" |
|
121 | 119 | kernels = [] |
|
122 | 120 | kernel_ids = super(MappingKernelManager, self).list_kernel_ids() |
|
123 | 121 | for kernel_id in kernel_ids: |
|
124 | 122 | model = self.kernel_model(kernel_id) |
|
125 | 123 | kernels.append(model) |
|
126 | 124 | return kernels |
|
127 | 125 | |
|
128 | 126 | # override _check_kernel_id to raise 404 instead of KeyError |
|
129 | 127 | def _check_kernel_id(self, kernel_id): |
|
130 | 128 | """Check a that a kernel_id exists and raise 404 if not.""" |
|
131 | 129 | if kernel_id not in self: |
|
132 | 130 | raise web.HTTPError(404, u'Kernel does not exist: %s' % kernel_id) |
@@ -1,122 +1,118 b'' | |||
|
1 | 1 | """Test the kernels service API.""" |
|
2 | 2 | |
|
3 | 3 | |
|
4 | import os | |
|
5 | import sys | |
|
6 | import json | |
|
7 | ||
|
8 | 4 | import requests |
|
9 | 5 | |
|
10 | 6 | from IPython.html.utils import url_path_join |
|
11 | 7 | from IPython.html.tests.launchnotebook import NotebookTestBase, assert_http_error |
|
12 | 8 | |
|
13 | 9 | class KernelAPI(object): |
|
14 | 10 | """Wrapper for kernel REST API requests""" |
|
15 | 11 | def __init__(self, base_url): |
|
16 | 12 | self.base_url = base_url |
|
17 | 13 | |
|
18 | 14 | def _req(self, verb, path, body=None): |
|
19 | 15 | response = requests.request(verb, |
|
20 | 16 | url_path_join(self.base_url, 'api/kernels', path), data=body) |
|
21 | 17 | |
|
22 | 18 | if 400 <= response.status_code < 600: |
|
23 | 19 | try: |
|
24 | 20 | response.reason = response.json()['message'] |
|
25 | 21 | except: |
|
26 | 22 | pass |
|
27 | 23 | response.raise_for_status() |
|
28 | 24 | |
|
29 | 25 | return response |
|
30 | 26 | |
|
31 | 27 | def list(self): |
|
32 | 28 | return self._req('GET', '') |
|
33 | 29 | |
|
34 | 30 | def get(self, id): |
|
35 | 31 | return self._req('GET', id) |
|
36 | 32 | |
|
37 | 33 | def start(self): |
|
38 | 34 | return self._req('POST', '') |
|
39 | 35 | |
|
40 | 36 | def shutdown(self, id): |
|
41 | 37 | return self._req('DELETE', id) |
|
42 | 38 | |
|
43 | 39 | def interrupt(self, id): |
|
44 | 40 | return self._req('POST', url_path_join(id, 'interrupt')) |
|
45 | 41 | |
|
46 | 42 | def restart(self, id): |
|
47 | 43 | return self._req('POST', url_path_join(id, 'restart')) |
|
48 | 44 | |
|
49 | 45 | class KernelAPITest(NotebookTestBase): |
|
50 | 46 | """Test the kernels web service API""" |
|
51 | 47 | def setUp(self): |
|
52 | 48 | self.kern_api = KernelAPI(self.base_url()) |
|
53 | 49 | |
|
54 | 50 | def tearDown(self): |
|
55 | 51 | for k in self.kern_api.list().json(): |
|
56 | 52 | self.kern_api.shutdown(k['id']) |
|
57 | 53 | |
|
58 | 54 | def test__no_kernels(self): |
|
59 | 55 | """Make sure there are no kernels running at the start""" |
|
60 | 56 | kernels = self.kern_api.list().json() |
|
61 | 57 | self.assertEqual(kernels, []) |
|
62 | 58 | |
|
63 | 59 | def test_main_kernel_handler(self): |
|
64 | 60 | # POST request |
|
65 | 61 | r = self.kern_api.start() |
|
66 | 62 | kern1 = r.json() |
|
67 | 63 | self.assertEqual(r.headers['location'], '/api/kernels/' + kern1['id']) |
|
68 | 64 | self.assertEqual(r.status_code, 201) |
|
69 | 65 | self.assertIsInstance(kern1, dict) |
|
70 | 66 | |
|
71 | 67 | # GET request |
|
72 | 68 | r = self.kern_api.list() |
|
73 | 69 | self.assertEqual(r.status_code, 200) |
|
74 | 70 | assert isinstance(r.json(), list) |
|
75 | 71 | self.assertEqual(r.json()[0]['id'], kern1['id']) |
|
76 | 72 | |
|
77 | 73 | # create another kernel and check that they both are added to the |
|
78 | 74 | # list of kernels from a GET request |
|
79 | 75 | kern2 = self.kern_api.start().json() |
|
80 | 76 | assert isinstance(kern2, dict) |
|
81 | 77 | r = self.kern_api.list() |
|
82 | 78 | kernels = r.json() |
|
83 | 79 | self.assertEqual(r.status_code, 200) |
|
84 | 80 | assert isinstance(kernels, list) |
|
85 | 81 | self.assertEqual(len(kernels), 2) |
|
86 | 82 | |
|
87 | 83 | # Interrupt a kernel |
|
88 | 84 | r = self.kern_api.interrupt(kern2['id']) |
|
89 | 85 | self.assertEqual(r.status_code, 204) |
|
90 | 86 | |
|
91 | 87 | # Restart a kernel |
|
92 | 88 | r = self.kern_api.restart(kern2['id']) |
|
93 | 89 | self.assertEqual(r.headers['Location'], '/api/kernels/'+kern2['id']) |
|
94 | 90 | rekern = r.json() |
|
95 | 91 | self.assertEqual(rekern['id'], kern2['id']) |
|
96 | 92 | |
|
97 | 93 | def test_kernel_handler(self): |
|
98 | 94 | # GET kernel with given id |
|
99 | 95 | kid = self.kern_api.start().json()['id'] |
|
100 | 96 | r = self.kern_api.get(kid) |
|
101 | 97 | kern1 = r.json() |
|
102 | 98 | self.assertEqual(r.status_code, 200) |
|
103 | 99 | assert isinstance(kern1, dict) |
|
104 | 100 | self.assertIn('id', kern1) |
|
105 | 101 | self.assertEqual(kern1['id'], kid) |
|
106 | 102 | |
|
107 | 103 | # Request a bad kernel id and check that a JSON |
|
108 | 104 | # message is returned! |
|
109 | 105 | bad_id = '111-111-111-111-111' |
|
110 | 106 | with assert_http_error(404, 'Kernel does not exist: ' + bad_id): |
|
111 | 107 | self.kern_api.get(bad_id) |
|
112 | 108 | |
|
113 | 109 | # DELETE kernel with id |
|
114 | 110 | r = self.kern_api.shutdown(kid) |
|
115 | 111 | self.assertEqual(r.status_code, 204) |
|
116 | 112 | kernels = self.kern_api.list().json() |
|
117 | 113 | self.assertEqual(kernels, []) |
|
118 | 114 | |
|
119 | 115 | # Request to delete a non-existent kernel id |
|
120 | 116 | bad_id = '111-111-111-111-111' |
|
121 | 117 | with assert_http_error(404, 'Kernel does not exist: ' + bad_id): |
|
122 | 118 | self.kern_api.shutdown(bad_id) |
@@ -1,103 +1,101 b'' | |||
|
1 | 1 | """Tornado handlers for the tree view. |
|
2 | 2 | |
|
3 | 3 | Authors: |
|
4 | 4 | |
|
5 | 5 | * Brian Granger |
|
6 | 6 | """ |
|
7 | 7 | |
|
8 | 8 | #----------------------------------------------------------------------------- |
|
9 | 9 | # Copyright (C) 2011 The IPython Development Team |
|
10 | 10 | # |
|
11 | 11 | # Distributed under the terms of the BSD License. The full license is in |
|
12 | 12 | # the file COPYING, distributed as part of this software. |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | # Imports |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | import os | |
|
19 | ||
|
20 | 18 | from tornado import web |
|
21 | 19 | from ..base.handlers import IPythonHandler, notebook_path_regex, path_regex |
|
22 |
from ..utils import url_path_join, |
|
|
20 | from ..utils import url_path_join, url_escape | |
|
23 | 21 | |
|
24 | 22 | #----------------------------------------------------------------------------- |
|
25 | 23 | # Handlers |
|
26 | 24 | #----------------------------------------------------------------------------- |
|
27 | 25 | |
|
28 | 26 | |
|
29 | 27 | class TreeHandler(IPythonHandler): |
|
30 | 28 | """Render the tree view, listing notebooks, clusters, etc.""" |
|
31 | 29 | |
|
32 | 30 | def generate_breadcrumbs(self, path): |
|
33 | 31 | breadcrumbs = [(url_escape(url_path_join(self.base_url, 'tree')), '')] |
|
34 | 32 | comps = path.split('/') |
|
35 | 33 | ncomps = len(comps) |
|
36 | 34 | for i in range(ncomps): |
|
37 | 35 | if comps[i]: |
|
38 | 36 | link = url_escape(url_path_join(self.base_url, 'tree', *comps[0:i+1])) |
|
39 | 37 | breadcrumbs.append((link, comps[i])) |
|
40 | 38 | return breadcrumbs |
|
41 | 39 | |
|
42 | 40 | def generate_page_title(self, path): |
|
43 | 41 | comps = path.split('/') |
|
44 | 42 | if len(comps) > 3: |
|
45 | 43 | for i in range(len(comps)-2): |
|
46 | 44 | comps.pop(0) |
|
47 | 45 | page_title = url_escape(url_path_join(*comps)) |
|
48 | 46 | if page_title: |
|
49 | 47 | return page_title+'/' |
|
50 | 48 | else: |
|
51 | 49 | return 'Home' |
|
52 | 50 | |
|
53 | 51 | @web.authenticated |
|
54 | 52 | def get(self, path='', name=None): |
|
55 | 53 | path = path.strip('/') |
|
56 | 54 | nbm = self.notebook_manager |
|
57 | 55 | if name is not None: |
|
58 | 56 | # is a notebook, redirect to notebook handler |
|
59 | 57 | url = url_escape(url_path_join( |
|
60 | 58 | self.base_url, 'notebooks', path, name |
|
61 | 59 | )) |
|
62 | 60 | self.log.debug("Redirecting %s to %s", self.request.path, url) |
|
63 | 61 | self.redirect(url) |
|
64 | 62 | else: |
|
65 | 63 | if not nbm.path_exists(path=path): |
|
66 | 64 | # Directory is hidden or does not exist. |
|
67 | 65 | raise web.HTTPError(404) |
|
68 | 66 | elif nbm.is_hidden(path): |
|
69 | 67 | self.log.info("Refusing to serve hidden directory, via 404 Error") |
|
70 | 68 | raise web.HTTPError(404) |
|
71 | 69 | breadcrumbs = self.generate_breadcrumbs(path) |
|
72 | 70 | page_title = self.generate_page_title(path) |
|
73 | 71 | self.write(self.render_template('tree.html', |
|
74 | 72 | project=self.project_dir, |
|
75 | 73 | page_title=page_title, |
|
76 | 74 | notebook_path=path, |
|
77 | 75 | breadcrumbs=breadcrumbs |
|
78 | 76 | )) |
|
79 | 77 | |
|
80 | 78 | |
|
81 | 79 | class TreeRedirectHandler(IPythonHandler): |
|
82 | 80 | """Redirect a request to the corresponding tree URL""" |
|
83 | 81 | |
|
84 | 82 | @web.authenticated |
|
85 | 83 | def get(self, path=''): |
|
86 | 84 | url = url_escape(url_path_join( |
|
87 | 85 | self.base_url, 'tree', path.strip('/') |
|
88 | 86 | )) |
|
89 | 87 | self.log.debug("Redirecting %s to %s", self.request.path, url) |
|
90 | 88 | self.redirect(url) |
|
91 | 89 | |
|
92 | 90 | |
|
93 | 91 | #----------------------------------------------------------------------------- |
|
94 | 92 | # URL to handler mappings |
|
95 | 93 | #----------------------------------------------------------------------------- |
|
96 | 94 | |
|
97 | 95 | |
|
98 | 96 | default_handlers = [ |
|
99 | 97 | (r"/tree%s" % notebook_path_regex, TreeHandler), |
|
100 | 98 | (r"/tree%s" % path_regex, TreeHandler), |
|
101 | 99 | (r"/tree", TreeHandler), |
|
102 | 100 | (r"/", TreeRedirectHandler), |
|
103 | 101 | ] |
@@ -1,312 +1,311 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | """NbConvert is a utility for conversion of .ipynb files. |
|
3 | 3 | |
|
4 | 4 | Command-line interface for the NbConvert conversion utility. |
|
5 | 5 | """ |
|
6 | 6 | |
|
7 | 7 | # Copyright (c) IPython Development Team. |
|
8 | 8 | # Distributed under the terms of the Modified BSD License. |
|
9 | 9 | |
|
10 | 10 | from __future__ import print_function |
|
11 | 11 | |
|
12 | 12 | import logging |
|
13 | 13 | import sys |
|
14 | 14 | import os |
|
15 | 15 | import glob |
|
16 | 16 | |
|
17 | 17 | from IPython.core.application import BaseIPythonApplication, base_aliases, base_flags |
|
18 | 18 | from IPython.core.profiledir import ProfileDir |
|
19 | 19 | from IPython.config import catch_config_error, Configurable |
|
20 | 20 | from IPython.utils.traitlets import ( |
|
21 | 21 | Unicode, List, Instance, DottedObjectName, Type, CaselessStrEnum, |
|
22 | 22 | ) |
|
23 | 23 | from IPython.utils.importstring import import_item |
|
24 | from IPython.utils.text import dedent | |
|
25 | 24 | |
|
26 | 25 | from .exporters.export import get_export_names, exporter_map |
|
27 | 26 | from IPython.nbconvert import exporters, preprocessors, writers, postprocessors |
|
28 | 27 | from .utils.base import NbConvertBase |
|
29 | 28 | from .utils.exceptions import ConversionException |
|
30 | 29 | |
|
31 | 30 | #----------------------------------------------------------------------------- |
|
32 | 31 | #Classes and functions |
|
33 | 32 | #----------------------------------------------------------------------------- |
|
34 | 33 | |
|
35 | 34 | class DottedOrNone(DottedObjectName): |
|
36 | 35 | """ |
|
37 | 36 | A string holding a valid dotted object name in Python, such as A.b3._c |
|
38 | 37 | Also allows for None type.""" |
|
39 | 38 | |
|
40 | 39 | default_value = u'' |
|
41 | 40 | |
|
42 | 41 | def validate(self, obj, value): |
|
43 | 42 | if value is not None and len(value) > 0: |
|
44 | 43 | return super(DottedOrNone, self).validate(obj, value) |
|
45 | 44 | else: |
|
46 | 45 | return value |
|
47 | 46 | |
|
48 | 47 | nbconvert_aliases = {} |
|
49 | 48 | nbconvert_aliases.update(base_aliases) |
|
50 | 49 | nbconvert_aliases.update({ |
|
51 | 50 | 'to' : 'NbConvertApp.export_format', |
|
52 | 51 | 'template' : 'TemplateExporter.template_file', |
|
53 | 52 | 'writer' : 'NbConvertApp.writer_class', |
|
54 | 53 | 'post': 'NbConvertApp.postprocessor_class', |
|
55 | 54 | 'output': 'NbConvertApp.output_base', |
|
56 | 55 | 'reveal-prefix': 'RevealHelpPreprocessor.url_prefix', |
|
57 | 56 | }) |
|
58 | 57 | |
|
59 | 58 | nbconvert_flags = {} |
|
60 | 59 | nbconvert_flags.update(base_flags) |
|
61 | 60 | nbconvert_flags.update({ |
|
62 | 61 | 'stdout' : ( |
|
63 | 62 | {'NbConvertApp' : {'writer_class' : "StdoutWriter"}}, |
|
64 | 63 | "Write notebook output to stdout instead of files." |
|
65 | 64 | ) |
|
66 | 65 | }) |
|
67 | 66 | |
|
68 | 67 | |
|
69 | 68 | class NbConvertApp(BaseIPythonApplication): |
|
70 | 69 | """Application used to convert from notebook file type (``*.ipynb``)""" |
|
71 | 70 | |
|
72 | 71 | name = 'ipython-nbconvert' |
|
73 | 72 | aliases = nbconvert_aliases |
|
74 | 73 | flags = nbconvert_flags |
|
75 | 74 | |
|
76 | 75 | def _log_level_default(self): |
|
77 | 76 | return logging.INFO |
|
78 | 77 | |
|
79 | 78 | def _classes_default(self): |
|
80 | 79 | classes = [NbConvertBase, ProfileDir] |
|
81 | 80 | for pkg in (exporters, preprocessors, writers, postprocessors): |
|
82 | 81 | for name in dir(pkg): |
|
83 | 82 | cls = getattr(pkg, name) |
|
84 | 83 | if isinstance(cls, type) and issubclass(cls, Configurable): |
|
85 | 84 | classes.append(cls) |
|
86 | 85 | |
|
87 | 86 | return classes |
|
88 | 87 | |
|
89 | 88 | description = Unicode( |
|
90 | 89 | u"""This application is used to convert notebook files (*.ipynb) |
|
91 | 90 | to various other formats. |
|
92 | 91 | |
|
93 | 92 | WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.""") |
|
94 | 93 | |
|
95 | 94 | output_base = Unicode('', config=True, help='''overwrite base name use for output files. |
|
96 | 95 | can only be use when converting one notebook at a time. |
|
97 | 96 | ''') |
|
98 | 97 | |
|
99 | 98 | examples = Unicode(u""" |
|
100 | 99 | The simplest way to use nbconvert is |
|
101 | 100 | |
|
102 | 101 | > ipython nbconvert mynotebook.ipynb |
|
103 | 102 | |
|
104 | 103 | which will convert mynotebook.ipynb to the default format (probably HTML). |
|
105 | 104 | |
|
106 | 105 | You can specify the export format with `--to`. |
|
107 | 106 | Options include {0} |
|
108 | 107 | |
|
109 | 108 | > ipython nbconvert --to latex mynotebook.ipynb |
|
110 | 109 | |
|
111 | 110 | Both HTML and LaTeX support multiple output templates. LaTeX includes |
|
112 | 111 | 'basic', 'book', and 'article'. HTML includes 'basic' and 'full'. You |
|
113 | 112 | can specify the flavor of the format used. |
|
114 | 113 | |
|
115 | 114 | > ipython nbconvert --to html --template basic mynotebook.ipynb |
|
116 | 115 | |
|
117 | 116 | You can also pipe the output to stdout, rather than a file |
|
118 | 117 | |
|
119 | 118 | > ipython nbconvert mynotebook.ipynb --stdout |
|
120 | 119 | |
|
121 | 120 | PDF is generated via latex |
|
122 | 121 | |
|
123 | 122 | > ipython nbconvert mynotebook.ipynb --to pdf |
|
124 | 123 | |
|
125 | 124 | You can get (and serve) a Reveal.js-powered slideshow |
|
126 | 125 | |
|
127 | 126 | > ipython nbconvert myslides.ipynb --to slides --post serve |
|
128 | 127 | |
|
129 | 128 | Multiple notebooks can be given at the command line in a couple of |
|
130 | 129 | different ways: |
|
131 | 130 | |
|
132 | 131 | > ipython nbconvert notebook*.ipynb |
|
133 | 132 | > ipython nbconvert notebook1.ipynb notebook2.ipynb |
|
134 | 133 | |
|
135 | 134 | or you can specify the notebooks list in a config file, containing:: |
|
136 | 135 | |
|
137 | 136 | c.NbConvertApp.notebooks = ["my_notebook.ipynb"] |
|
138 | 137 | |
|
139 | 138 | > ipython nbconvert --config mycfg.py |
|
140 | 139 | """.format(get_export_names())) |
|
141 | 140 | |
|
142 | 141 | # Writer specific variables |
|
143 | 142 | writer = Instance('IPython.nbconvert.writers.base.WriterBase', |
|
144 | 143 | help="""Instance of the writer class used to write the |
|
145 | 144 | results of the conversion.""") |
|
146 | 145 | writer_class = DottedObjectName('FilesWriter', config=True, |
|
147 | 146 | help="""Writer class used to write the |
|
148 | 147 | results of the conversion""") |
|
149 | 148 | writer_aliases = {'fileswriter': 'IPython.nbconvert.writers.files.FilesWriter', |
|
150 | 149 | 'debugwriter': 'IPython.nbconvert.writers.debug.DebugWriter', |
|
151 | 150 | 'stdoutwriter': 'IPython.nbconvert.writers.stdout.StdoutWriter'} |
|
152 | 151 | writer_factory = Type() |
|
153 | 152 | |
|
154 | 153 | def _writer_class_changed(self, name, old, new): |
|
155 | 154 | if new.lower() in self.writer_aliases: |
|
156 | 155 | new = self.writer_aliases[new.lower()] |
|
157 | 156 | self.writer_factory = import_item(new) |
|
158 | 157 | |
|
159 | 158 | # Post-processor specific variables |
|
160 | 159 | postprocessor = Instance('IPython.nbconvert.postprocessors.base.PostProcessorBase', |
|
161 | 160 | help="""Instance of the PostProcessor class used to write the |
|
162 | 161 | results of the conversion.""") |
|
163 | 162 | |
|
164 | 163 | postprocessor_class = DottedOrNone(config=True, |
|
165 | 164 | help="""PostProcessor class used to write the |
|
166 | 165 | results of the conversion""") |
|
167 | 166 | postprocessor_aliases = {'serve': 'IPython.nbconvert.postprocessors.serve.ServePostProcessor'} |
|
168 | 167 | postprocessor_factory = Type() |
|
169 | 168 | |
|
170 | 169 | def _postprocessor_class_changed(self, name, old, new): |
|
171 | 170 | if new.lower() in self.postprocessor_aliases: |
|
172 | 171 | new = self.postprocessor_aliases[new.lower()] |
|
173 | 172 | if new: |
|
174 | 173 | self.postprocessor_factory = import_item(new) |
|
175 | 174 | |
|
176 | 175 | |
|
177 | 176 | # Other configurable variables |
|
178 | 177 | export_format = CaselessStrEnum(get_export_names(), |
|
179 | 178 | default_value="html", |
|
180 | 179 | config=True, |
|
181 | 180 | help="""The export format to be used.""" |
|
182 | 181 | ) |
|
183 | 182 | |
|
184 | 183 | notebooks = List([], config=True, help="""List of notebooks to convert. |
|
185 | 184 | Wildcards are supported. |
|
186 | 185 | Filenames passed positionally will be added to the list. |
|
187 | 186 | """) |
|
188 | 187 | |
|
189 | 188 | @catch_config_error |
|
190 | 189 | def initialize(self, argv=None): |
|
191 | 190 | self.init_syspath() |
|
192 | 191 | super(NbConvertApp, self).initialize(argv) |
|
193 | 192 | self.init_notebooks() |
|
194 | 193 | self.init_writer() |
|
195 | 194 | self.init_postprocessor() |
|
196 | 195 | |
|
197 | 196 | |
|
198 | 197 | |
|
199 | 198 | def init_syspath(self): |
|
200 | 199 | """ |
|
201 | 200 | Add the cwd to the sys.path ($PYTHONPATH) |
|
202 | 201 | """ |
|
203 | 202 | sys.path.insert(0, os.getcwd()) |
|
204 | 203 | |
|
205 | 204 | |
|
206 | 205 | def init_notebooks(self): |
|
207 | 206 | """Construct the list of notebooks. |
|
208 | 207 | If notebooks are passed on the command-line, |
|
209 | 208 | they override notebooks specified in config files. |
|
210 | 209 | Glob each notebook to replace notebook patterns with filenames. |
|
211 | 210 | """ |
|
212 | 211 | |
|
213 | 212 | # Specifying notebooks on the command-line overrides (rather than adds) |
|
214 | 213 | # the notebook list |
|
215 | 214 | if self.extra_args: |
|
216 | 215 | patterns = self.extra_args |
|
217 | 216 | else: |
|
218 | 217 | patterns = self.notebooks |
|
219 | 218 | |
|
220 | 219 | # Use glob to replace all the notebook patterns with filenames. |
|
221 | 220 | filenames = [] |
|
222 | 221 | for pattern in patterns: |
|
223 | 222 | |
|
224 | 223 | # Use glob to find matching filenames. Allow the user to convert |
|
225 | 224 | # notebooks without having to type the extension. |
|
226 | 225 | globbed_files = glob.glob(pattern) |
|
227 | 226 | globbed_files.extend(glob.glob(pattern + '.ipynb')) |
|
228 | 227 | if not globbed_files: |
|
229 | 228 | self.log.warn("pattern %r matched no files", pattern) |
|
230 | 229 | |
|
231 | 230 | for filename in globbed_files: |
|
232 | 231 | if not filename in filenames: |
|
233 | 232 | filenames.append(filename) |
|
234 | 233 | self.notebooks = filenames |
|
235 | 234 | |
|
236 | 235 | def init_writer(self): |
|
237 | 236 | """ |
|
238 | 237 | Initialize the writer (which is stateless) |
|
239 | 238 | """ |
|
240 | 239 | self._writer_class_changed(None, self.writer_class, self.writer_class) |
|
241 | 240 | self.writer = self.writer_factory(parent=self) |
|
242 | 241 | |
|
243 | 242 | def init_postprocessor(self): |
|
244 | 243 | """ |
|
245 | 244 | Initialize the postprocessor (which is stateless) |
|
246 | 245 | """ |
|
247 | 246 | self._postprocessor_class_changed(None, self.postprocessor_class, |
|
248 | 247 | self.postprocessor_class) |
|
249 | 248 | if self.postprocessor_factory: |
|
250 | 249 | self.postprocessor = self.postprocessor_factory(parent=self) |
|
251 | 250 | |
|
252 | 251 | def start(self): |
|
253 | 252 | """ |
|
254 | 253 | Ran after initialization completed |
|
255 | 254 | """ |
|
256 | 255 | super(NbConvertApp, self).start() |
|
257 | 256 | self.convert_notebooks() |
|
258 | 257 | |
|
259 | 258 | def convert_notebooks(self): |
|
260 | 259 | """ |
|
261 | 260 | Convert the notebooks in the self.notebook traitlet |
|
262 | 261 | """ |
|
263 | 262 | # Export each notebook |
|
264 | 263 | conversion_success = 0 |
|
265 | 264 | |
|
266 | 265 | if self.output_base != '' and len(self.notebooks) > 1: |
|
267 | 266 | self.log.error( |
|
268 | 267 | """UsageError: --output flag or `NbConvertApp.output_base` config option |
|
269 | 268 | cannot be used when converting multiple notebooks. |
|
270 | 269 | """) |
|
271 | 270 | self.exit(1) |
|
272 | 271 | |
|
273 | 272 | exporter = exporter_map[self.export_format](config=self.config) |
|
274 | 273 | |
|
275 | 274 | for notebook_filename in self.notebooks: |
|
276 | 275 | self.log.info("Converting notebook %s to %s", notebook_filename, self.export_format) |
|
277 | 276 | |
|
278 | 277 | # Get a unique key for the notebook and set it in the resources object. |
|
279 | 278 | basename = os.path.basename(notebook_filename) |
|
280 | 279 | notebook_name = basename[:basename.rfind('.')] |
|
281 | 280 | if self.output_base: |
|
282 | 281 | notebook_name = self.output_base |
|
283 | 282 | resources = {} |
|
284 | 283 | resources['unique_key'] = notebook_name |
|
285 | 284 | resources['output_files_dir'] = '%s_files' % notebook_name |
|
286 | 285 | self.log.info("Support files will be in %s", os.path.join(resources['output_files_dir'], '')) |
|
287 | 286 | |
|
288 | 287 | # Try to export |
|
289 | 288 | try: |
|
290 | 289 | output, resources = exporter.from_filename(notebook_filename, resources=resources) |
|
291 | 290 | except ConversionException as e: |
|
292 | 291 | self.log.error("Error while converting '%s'", notebook_filename, |
|
293 | 292 | exc_info=True) |
|
294 | 293 | self.exit(1) |
|
295 | 294 | else: |
|
296 | 295 | write_resultes = self.writer.write(output, resources, notebook_name=notebook_name) |
|
297 | 296 | |
|
298 | 297 | #Post-process if post processor has been defined. |
|
299 | 298 | if hasattr(self, 'postprocessor') and self.postprocessor: |
|
300 | 299 | self.postprocessor(write_resultes) |
|
301 | 300 | conversion_success += 1 |
|
302 | 301 | |
|
303 | 302 | # If nothing was converted successfully, help the user. |
|
304 | 303 | if conversion_success == 0: |
|
305 | 304 | self.print_help() |
|
306 | 305 | sys.exit(-1) |
|
307 | 306 | |
|
308 | 307 | #----------------------------------------------------------------------------- |
|
309 | 308 | # Main entry point |
|
310 | 309 | #----------------------------------------------------------------------------- |
|
311 | 310 | |
|
312 | 311 | launch_new_instance = NbConvertApp.launch_instance |
@@ -1,72 +1,70 b'' | |||
|
1 | 1 | """API for converting notebooks between versions. |
|
2 | 2 | |
|
3 | 3 | Authors: |
|
4 | 4 | |
|
5 | 5 | * Jonathan Frederic |
|
6 | 6 | """ |
|
7 | 7 | |
|
8 | 8 | #----------------------------------------------------------------------------- |
|
9 | 9 | # Copyright (C) 2013 The IPython Development Team |
|
10 | 10 | # |
|
11 | 11 | # Distributed under the terms of the BSD License. The full license is in |
|
12 | 12 | # the file COPYING, distributed as part of this software. |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | # Imports |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | 18 | |
|
19 | import re | |
|
20 | ||
|
21 | 19 | from .reader import get_version, versions |
|
22 | 20 | |
|
23 | 21 | #----------------------------------------------------------------------------- |
|
24 | 22 | # Functions |
|
25 | 23 | #----------------------------------------------------------------------------- |
|
26 | 24 | |
|
27 | 25 | def convert(nb, to_version): |
|
28 | 26 | """Convert a notebook node object to a specific version. Assumes that |
|
29 | 27 | all the versions starting from 1 to the latest major X are implemented. |
|
30 | 28 | In other words, there should never be a case where v1 v2 v3 v5 exist without |
|
31 | 29 | a v4. Also assumes that all conversions can be made in one step increments |
|
32 | 30 | between major versions and ignores minor revisions. |
|
33 | 31 | |
|
34 | 32 | Parameters |
|
35 | 33 | ---------- |
|
36 | 34 | nb : NotebookNode |
|
37 | 35 | to_version : int |
|
38 | 36 | Major revision to convert the notebook to. Can either be an upgrade or |
|
39 | 37 | a downgrade. |
|
40 | 38 | """ |
|
41 | 39 | |
|
42 | 40 | # Get input notebook version. |
|
43 | 41 | (version, version_minor) = get_version(nb) |
|
44 | 42 | |
|
45 | 43 | # Check if destination is current version, if so return contents |
|
46 | 44 | if version == to_version: |
|
47 | 45 | return nb |
|
48 | 46 | |
|
49 | 47 | # If the version exist, try to convert to it one step at a time. |
|
50 | 48 | elif to_version in versions: |
|
51 | 49 | |
|
52 | 50 | # Get the the version that this recursion will convert to as a step |
|
53 | 51 | # closer to the final revision. Make sure the newer of the conversion |
|
54 | 52 | # functions is used to perform the conversion. |
|
55 | 53 | if to_version > version: |
|
56 | 54 | step_version = version + 1 |
|
57 | 55 | convert_function = versions[step_version].upgrade |
|
58 | 56 | else: |
|
59 | 57 | step_version = version - 1 |
|
60 | 58 | convert_function = versions[version].downgrade |
|
61 | 59 | |
|
62 | 60 | # Convert and make sure version changed during conversion. |
|
63 | 61 | converted = convert_function(nb) |
|
64 | 62 | if converted.get('nbformat', 1) == version: |
|
65 | 63 | raise Exception("Cannot convert notebook from v%d to v%d. Operation" \ |
|
66 | 64 | "failed silently." % (version, step_version)) |
|
67 | 65 | |
|
68 | 66 | # Recursively convert until target version is reached. |
|
69 | 67 | return convert(converted, to_version) |
|
70 | 68 | else: |
|
71 | 69 | raise Exception("Cannot convert notebook to v%d because that " \ |
|
72 | 70 | "version doesn't exist" % (to_version)) |
@@ -1,81 +1,73 b'' | |||
|
1 | 1 | """Base config factories. |
|
2 | 2 | |
|
3 | 3 | Authors: |
|
4 | 4 | |
|
5 | 5 | * Min RK |
|
6 | 6 | """ |
|
7 | 7 | |
|
8 | 8 | #----------------------------------------------------------------------------- |
|
9 | 9 | # Copyright (C) 2010-2011 The IPython Development Team |
|
10 | 10 | # |
|
11 | 11 | # Distributed under the terms of the BSD License. The full license is in |
|
12 | 12 | # the file COPYING, distributed as part of this software. |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | # Imports |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | 18 | |
|
19 | ||
|
20 | import logging | |
|
21 | import os | |
|
22 | ||
|
23 | import zmq | |
|
24 | from zmq.eventloop.ioloop import IOLoop | |
|
25 | ||
|
26 | from IPython.config.configurable import Configurable | |
|
27 | 19 | from IPython.utils.localinterfaces import localhost |
|
28 |
from IPython.utils.traitlets import Integer, |
|
|
20 | from IPython.utils.traitlets import Integer, Unicode | |
|
29 | 21 | |
|
30 | 22 | from IPython.parallel.util import select_random_ports |
|
31 |
from IPython.kernel.zmq.session import |
|
|
23 | from IPython.kernel.zmq.session import SessionFactory | |
|
32 | 24 | |
|
33 | 25 | #----------------------------------------------------------------------------- |
|
34 | 26 | # Classes |
|
35 | 27 | #----------------------------------------------------------------------------- |
|
36 | 28 | |
|
37 | 29 | |
|
38 | 30 | class RegistrationFactory(SessionFactory): |
|
39 | 31 | """The Base Configurable for objects that involve registration.""" |
|
40 | 32 | |
|
41 | 33 | url = Unicode('', config=True, |
|
42 | 34 | help="""The 0MQ url used for registration. This sets transport, ip, and port |
|
43 | 35 | in one variable. For example: url='tcp://127.0.0.1:12345' or |
|
44 | 36 | url='epgm://*:90210'""" |
|
45 | 37 | ) # url takes precedence over ip,regport,transport |
|
46 | 38 | transport = Unicode('tcp', config=True, |
|
47 | 39 | help="""The 0MQ transport for communications. This will likely be |
|
48 | 40 | the default of 'tcp', but other values include 'ipc', 'epgm', 'inproc'.""") |
|
49 | 41 | ip = Unicode(config=True, |
|
50 | 42 | help="""The IP address for registration. This is generally either |
|
51 | 43 | '127.0.0.1' for loopback only or '*' for all interfaces. |
|
52 | 44 | """) |
|
53 | 45 | def _ip_default(self): |
|
54 | 46 | return localhost() |
|
55 | 47 | regport = Integer(config=True, |
|
56 | 48 | help="""The port on which the Hub listens for registration.""") |
|
57 | 49 | def _regport_default(self): |
|
58 | 50 | return select_random_ports(1)[0] |
|
59 | 51 | |
|
60 | 52 | def __init__(self, **kwargs): |
|
61 | 53 | super(RegistrationFactory, self).__init__(**kwargs) |
|
62 | 54 | self._propagate_url() |
|
63 | 55 | self._rebuild_url() |
|
64 | 56 | self.on_trait_change(self._propagate_url, 'url') |
|
65 | 57 | self.on_trait_change(self._rebuild_url, 'ip') |
|
66 | 58 | self.on_trait_change(self._rebuild_url, 'transport') |
|
67 | 59 | self.on_trait_change(self._rebuild_url, 'regport') |
|
68 | 60 | |
|
69 | 61 | def _rebuild_url(self): |
|
70 | 62 | self.url = "%s://%s:%i"%(self.transport, self.ip, self.regport) |
|
71 | 63 | |
|
72 | 64 | def _propagate_url(self): |
|
73 | 65 | """Ensure self.url contains full transport://interface:port""" |
|
74 | 66 | if self.url: |
|
75 | 67 | iface = self.url.split('://',1) |
|
76 | 68 | if len(iface) == 2: |
|
77 | 69 | self.transport,iface = iface |
|
78 | 70 | iface = iface.split(':') |
|
79 | 71 | self.ip = iface[0] |
|
80 | 72 | if iface[1]: |
|
81 | 73 | self.regport = int(iface[1]) |
@@ -1,390 +1,388 b'' | |||
|
1 | 1 | """Some generic utilities for dealing with classes, urls, and serialization.""" |
|
2 | 2 | |
|
3 | 3 | # Copyright (c) IPython Development Team. |
|
4 | 4 | # Distributed under the terms of the Modified BSD License. |
|
5 | 5 | |
|
6 | 6 | import logging |
|
7 | 7 | import os |
|
8 | 8 | import re |
|
9 | 9 | import stat |
|
10 | 10 | import socket |
|
11 | 11 | import sys |
|
12 | 12 | import warnings |
|
13 | 13 | from signal import signal, SIGINT, SIGABRT, SIGTERM |
|
14 | 14 | try: |
|
15 | 15 | from signal import SIGKILL |
|
16 | 16 | except ImportError: |
|
17 | 17 | SIGKILL=None |
|
18 | 18 | from types import FunctionType |
|
19 | 19 | |
|
20 | 20 | try: |
|
21 | 21 | import cPickle |
|
22 | 22 | pickle = cPickle |
|
23 | 23 | except: |
|
24 | 24 | cPickle = None |
|
25 | 25 | import pickle |
|
26 | 26 | |
|
27 | 27 | import zmq |
|
28 | 28 | from zmq.log import handlers |
|
29 | 29 | |
|
30 | 30 | from IPython.external.decorator import decorator |
|
31 | 31 | |
|
32 | 32 | from IPython.config.application import Application |
|
33 | 33 | from IPython.utils.localinterfaces import localhost, is_public_ip, public_ips |
|
34 | 34 | from IPython.utils.py3compat import string_types, iteritems, itervalues |
|
35 | 35 | from IPython.kernel.zmq.log import EnginePUBHandler |
|
36 | from IPython.kernel.zmq.serialize import ( | |
|
37 | unserialize_object, serialize_object, pack_apply_message, unpack_apply_message | |
|
38 | ) | |
|
36 | ||
|
39 | 37 | |
|
40 | 38 | #----------------------------------------------------------------------------- |
|
41 | 39 | # Classes |
|
42 | 40 | #----------------------------------------------------------------------------- |
|
43 | 41 | |
|
44 | 42 | class Namespace(dict): |
|
45 | 43 | """Subclass of dict for attribute access to keys.""" |
|
46 | 44 | |
|
47 | 45 | def __getattr__(self, key): |
|
48 | 46 | """getattr aliased to getitem""" |
|
49 | 47 | if key in self: |
|
50 | 48 | return self[key] |
|
51 | 49 | else: |
|
52 | 50 | raise NameError(key) |
|
53 | 51 | |
|
54 | 52 | def __setattr__(self, key, value): |
|
55 | 53 | """setattr aliased to setitem, with strict""" |
|
56 | 54 | if hasattr(dict, key): |
|
57 | 55 | raise KeyError("Cannot override dict keys %r"%key) |
|
58 | 56 | self[key] = value |
|
59 | 57 | |
|
60 | 58 | |
|
61 | 59 | class ReverseDict(dict): |
|
62 | 60 | """simple double-keyed subset of dict methods.""" |
|
63 | 61 | |
|
64 | 62 | def __init__(self, *args, **kwargs): |
|
65 | 63 | dict.__init__(self, *args, **kwargs) |
|
66 | 64 | self._reverse = dict() |
|
67 | 65 | for key, value in iteritems(self): |
|
68 | 66 | self._reverse[value] = key |
|
69 | 67 | |
|
70 | 68 | def __getitem__(self, key): |
|
71 | 69 | try: |
|
72 | 70 | return dict.__getitem__(self, key) |
|
73 | 71 | except KeyError: |
|
74 | 72 | return self._reverse[key] |
|
75 | 73 | |
|
76 | 74 | def __setitem__(self, key, value): |
|
77 | 75 | if key in self._reverse: |
|
78 | 76 | raise KeyError("Can't have key %r on both sides!"%key) |
|
79 | 77 | dict.__setitem__(self, key, value) |
|
80 | 78 | self._reverse[value] = key |
|
81 | 79 | |
|
82 | 80 | def pop(self, key): |
|
83 | 81 | value = dict.pop(self, key) |
|
84 | 82 | self._reverse.pop(value) |
|
85 | 83 | return value |
|
86 | 84 | |
|
87 | 85 | def get(self, key, default=None): |
|
88 | 86 | try: |
|
89 | 87 | return self[key] |
|
90 | 88 | except KeyError: |
|
91 | 89 | return default |
|
92 | 90 | |
|
93 | 91 | #----------------------------------------------------------------------------- |
|
94 | 92 | # Functions |
|
95 | 93 | #----------------------------------------------------------------------------- |
|
96 | 94 | |
|
97 | 95 | @decorator |
|
98 | 96 | def log_errors(f, self, *args, **kwargs): |
|
99 | 97 | """decorator to log unhandled exceptions raised in a method. |
|
100 | 98 | |
|
101 | 99 | For use wrapping on_recv callbacks, so that exceptions |
|
102 | 100 | do not cause the stream to be closed. |
|
103 | 101 | """ |
|
104 | 102 | try: |
|
105 | 103 | return f(self, *args, **kwargs) |
|
106 | 104 | except Exception: |
|
107 | 105 | self.log.error("Uncaught exception in %r" % f, exc_info=True) |
|
108 | 106 | |
|
109 | 107 | |
|
110 | 108 | def is_url(url): |
|
111 | 109 | """boolean check for whether a string is a zmq url""" |
|
112 | 110 | if '://' not in url: |
|
113 | 111 | return False |
|
114 | 112 | proto, addr = url.split('://', 1) |
|
115 | 113 | if proto.lower() not in ['tcp','pgm','epgm','ipc','inproc']: |
|
116 | 114 | return False |
|
117 | 115 | return True |
|
118 | 116 | |
|
119 | 117 | def validate_url(url): |
|
120 | 118 | """validate a url for zeromq""" |
|
121 | 119 | if not isinstance(url, string_types): |
|
122 | 120 | raise TypeError("url must be a string, not %r"%type(url)) |
|
123 | 121 | url = url.lower() |
|
124 | 122 | |
|
125 | 123 | proto_addr = url.split('://') |
|
126 | 124 | assert len(proto_addr) == 2, 'Invalid url: %r'%url |
|
127 | 125 | proto, addr = proto_addr |
|
128 | 126 | assert proto in ['tcp','pgm','epgm','ipc','inproc'], "Invalid protocol: %r"%proto |
|
129 | 127 | |
|
130 | 128 | # domain pattern adapted from http://www.regexlib.com/REDetails.aspx?regexp_id=391 |
|
131 | 129 | # author: Remi Sabourin |
|
132 | 130 | pat = re.compile(r'^([\w\d]([\w\d\-]{0,61}[\w\d])?\.)*[\w\d]([\w\d\-]{0,61}[\w\d])?$') |
|
133 | 131 | |
|
134 | 132 | if proto == 'tcp': |
|
135 | 133 | lis = addr.split(':') |
|
136 | 134 | assert len(lis) == 2, 'Invalid url: %r'%url |
|
137 | 135 | addr,s_port = lis |
|
138 | 136 | try: |
|
139 | 137 | port = int(s_port) |
|
140 | 138 | except ValueError: |
|
141 | 139 | raise AssertionError("Invalid port %r in url: %r"%(port, url)) |
|
142 | 140 | |
|
143 | 141 | assert addr == '*' or pat.match(addr) is not None, 'Invalid url: %r'%url |
|
144 | 142 | |
|
145 | 143 | else: |
|
146 | 144 | # only validate tcp urls currently |
|
147 | 145 | pass |
|
148 | 146 | |
|
149 | 147 | return True |
|
150 | 148 | |
|
151 | 149 | |
|
152 | 150 | def validate_url_container(container): |
|
153 | 151 | """validate a potentially nested collection of urls.""" |
|
154 | 152 | if isinstance(container, string_types): |
|
155 | 153 | url = container |
|
156 | 154 | return validate_url(url) |
|
157 | 155 | elif isinstance(container, dict): |
|
158 | 156 | container = itervalues(container) |
|
159 | 157 | |
|
160 | 158 | for element in container: |
|
161 | 159 | validate_url_container(element) |
|
162 | 160 | |
|
163 | 161 | |
|
164 | 162 | def split_url(url): |
|
165 | 163 | """split a zmq url (tcp://ip:port) into ('tcp','ip','port').""" |
|
166 | 164 | proto_addr = url.split('://') |
|
167 | 165 | assert len(proto_addr) == 2, 'Invalid url: %r'%url |
|
168 | 166 | proto, addr = proto_addr |
|
169 | 167 | lis = addr.split(':') |
|
170 | 168 | assert len(lis) == 2, 'Invalid url: %r'%url |
|
171 | 169 | addr,s_port = lis |
|
172 | 170 | return proto,addr,s_port |
|
173 | 171 | |
|
174 | 172 | |
|
175 | 173 | def disambiguate_ip_address(ip, location=None): |
|
176 | 174 | """turn multi-ip interfaces '0.0.0.0' and '*' into a connectable address |
|
177 | 175 | |
|
178 | 176 | Explicit IP addresses are returned unmodified. |
|
179 | 177 | |
|
180 | 178 | Parameters |
|
181 | 179 | ---------- |
|
182 | 180 | |
|
183 | 181 | ip : IP address |
|
184 | 182 | An IP address, or the special values 0.0.0.0, or * |
|
185 | 183 | location: IP address, optional |
|
186 | 184 | A public IP of the target machine. |
|
187 | 185 | If location is an IP of the current machine, |
|
188 | 186 | localhost will be returned, |
|
189 | 187 | otherwise location will be returned. |
|
190 | 188 | """ |
|
191 | 189 | if ip in {'0.0.0.0', '*'}: |
|
192 | 190 | if not location: |
|
193 | 191 | # unspecified location, localhost is the only choice |
|
194 | 192 | ip = localhost() |
|
195 | 193 | elif is_public_ip(location): |
|
196 | 194 | # location is a public IP on this machine, use localhost |
|
197 | 195 | ip = localhost() |
|
198 | 196 | elif not public_ips(): |
|
199 | 197 | # this machine's public IPs cannot be determined, |
|
200 | 198 | # assume `location` is not this machine |
|
201 | 199 | warnings.warn("IPython could not determine public IPs", RuntimeWarning) |
|
202 | 200 | ip = location |
|
203 | 201 | else: |
|
204 | 202 | # location is not this machine, do not use loopback |
|
205 | 203 | ip = location |
|
206 | 204 | return ip |
|
207 | 205 | |
|
208 | 206 | |
|
209 | 207 | def disambiguate_url(url, location=None): |
|
210 | 208 | """turn multi-ip interfaces '0.0.0.0' and '*' into connectable |
|
211 | 209 | ones, based on the location (default interpretation is localhost). |
|
212 | 210 | |
|
213 | 211 | This is for zeromq urls, such as ``tcp://*:10101``. |
|
214 | 212 | """ |
|
215 | 213 | try: |
|
216 | 214 | proto,ip,port = split_url(url) |
|
217 | 215 | except AssertionError: |
|
218 | 216 | # probably not tcp url; could be ipc, etc. |
|
219 | 217 | return url |
|
220 | 218 | |
|
221 | 219 | ip = disambiguate_ip_address(ip,location) |
|
222 | 220 | |
|
223 | 221 | return "%s://%s:%s"%(proto,ip,port) |
|
224 | 222 | |
|
225 | 223 | |
|
226 | 224 | #-------------------------------------------------------------------------- |
|
227 | 225 | # helpers for implementing old MEC API via view.apply |
|
228 | 226 | #-------------------------------------------------------------------------- |
|
229 | 227 | |
|
230 | 228 | def interactive(f): |
|
231 | 229 | """decorator for making functions appear as interactively defined. |
|
232 | 230 | This results in the function being linked to the user_ns as globals() |
|
233 | 231 | instead of the module globals(). |
|
234 | 232 | """ |
|
235 | 233 | |
|
236 | 234 | # build new FunctionType, so it can have the right globals |
|
237 | 235 | # interactive functions never have closures, that's kind of the point |
|
238 | 236 | if isinstance(f, FunctionType): |
|
239 | 237 | mainmod = __import__('__main__') |
|
240 | 238 | f = FunctionType(f.__code__, mainmod.__dict__, |
|
241 | 239 | f.__name__, f.__defaults__, |
|
242 | 240 | ) |
|
243 | 241 | # associate with __main__ for uncanning |
|
244 | 242 | f.__module__ = '__main__' |
|
245 | 243 | return f |
|
246 | 244 | |
|
247 | 245 | @interactive |
|
248 | 246 | def _push(**ns): |
|
249 | 247 | """helper method for implementing `client.push` via `client.apply`""" |
|
250 | 248 | user_ns = globals() |
|
251 | 249 | tmp = '_IP_PUSH_TMP_' |
|
252 | 250 | while tmp in user_ns: |
|
253 | 251 | tmp = tmp + '_' |
|
254 | 252 | try: |
|
255 | 253 | for name, value in ns.items(): |
|
256 | 254 | user_ns[tmp] = value |
|
257 | 255 | exec("%s = %s" % (name, tmp), user_ns) |
|
258 | 256 | finally: |
|
259 | 257 | user_ns.pop(tmp, None) |
|
260 | 258 | |
|
261 | 259 | @interactive |
|
262 | 260 | def _pull(keys): |
|
263 | 261 | """helper method for implementing `client.pull` via `client.apply`""" |
|
264 | 262 | if isinstance(keys, (list,tuple, set)): |
|
265 | 263 | return [eval(key, globals()) for key in keys] |
|
266 | 264 | else: |
|
267 | 265 | return eval(keys, globals()) |
|
268 | 266 | |
|
269 | 267 | @interactive |
|
270 | 268 | def _execute(code): |
|
271 | 269 | """helper method for implementing `client.execute` via `client.apply`""" |
|
272 | 270 | exec(code, globals()) |
|
273 | 271 | |
|
274 | 272 | #-------------------------------------------------------------------------- |
|
275 | 273 | # extra process management utilities |
|
276 | 274 | #-------------------------------------------------------------------------- |
|
277 | 275 | |
|
278 | 276 | _random_ports = set() |
|
279 | 277 | |
|
280 | 278 | def select_random_ports(n): |
|
281 | 279 | """Selects and return n random ports that are available.""" |
|
282 | 280 | ports = [] |
|
283 | 281 | for i in range(n): |
|
284 | 282 | sock = socket.socket() |
|
285 | 283 | sock.bind(('', 0)) |
|
286 | 284 | while sock.getsockname()[1] in _random_ports: |
|
287 | 285 | sock.close() |
|
288 | 286 | sock = socket.socket() |
|
289 | 287 | sock.bind(('', 0)) |
|
290 | 288 | ports.append(sock) |
|
291 | 289 | for i, sock in enumerate(ports): |
|
292 | 290 | port = sock.getsockname()[1] |
|
293 | 291 | sock.close() |
|
294 | 292 | ports[i] = port |
|
295 | 293 | _random_ports.add(port) |
|
296 | 294 | return ports |
|
297 | 295 | |
|
298 | 296 | def signal_children(children): |
|
299 | 297 | """Relay interupt/term signals to children, for more solid process cleanup.""" |
|
300 | 298 | def terminate_children(sig, frame): |
|
301 | 299 | log = Application.instance().log |
|
302 | 300 | log.critical("Got signal %i, terminating children..."%sig) |
|
303 | 301 | for child in children: |
|
304 | 302 | child.terminate() |
|
305 | 303 | |
|
306 | 304 | sys.exit(sig != SIGINT) |
|
307 | 305 | # sys.exit(sig) |
|
308 | 306 | for sig in (SIGINT, SIGABRT, SIGTERM): |
|
309 | 307 | signal(sig, terminate_children) |
|
310 | 308 | |
|
311 | 309 | def generate_exec_key(keyfile): |
|
312 | 310 | import uuid |
|
313 | 311 | newkey = str(uuid.uuid4()) |
|
314 | 312 | with open(keyfile, 'w') as f: |
|
315 | 313 | # f.write('ipython-key ') |
|
316 | 314 | f.write(newkey+'\n') |
|
317 | 315 | # set user-only RW permissions (0600) |
|
318 | 316 | # this will have no effect on Windows |
|
319 | 317 | os.chmod(keyfile, stat.S_IRUSR|stat.S_IWUSR) |
|
320 | 318 | |
|
321 | 319 | |
|
322 | 320 | def integer_loglevel(loglevel): |
|
323 | 321 | try: |
|
324 | 322 | loglevel = int(loglevel) |
|
325 | 323 | except ValueError: |
|
326 | 324 | if isinstance(loglevel, str): |
|
327 | 325 | loglevel = getattr(logging, loglevel) |
|
328 | 326 | return loglevel |
|
329 | 327 | |
|
330 | 328 | def connect_logger(logname, context, iface, root="ip", loglevel=logging.DEBUG): |
|
331 | 329 | logger = logging.getLogger(logname) |
|
332 | 330 | if any([isinstance(h, handlers.PUBHandler) for h in logger.handlers]): |
|
333 | 331 | # don't add a second PUBHandler |
|
334 | 332 | return |
|
335 | 333 | loglevel = integer_loglevel(loglevel) |
|
336 | 334 | lsock = context.socket(zmq.PUB) |
|
337 | 335 | lsock.connect(iface) |
|
338 | 336 | handler = handlers.PUBHandler(lsock) |
|
339 | 337 | handler.setLevel(loglevel) |
|
340 | 338 | handler.root_topic = root |
|
341 | 339 | logger.addHandler(handler) |
|
342 | 340 | logger.setLevel(loglevel) |
|
343 | 341 | |
|
344 | 342 | def connect_engine_logger(context, iface, engine, loglevel=logging.DEBUG): |
|
345 | 343 | logger = logging.getLogger() |
|
346 | 344 | if any([isinstance(h, handlers.PUBHandler) for h in logger.handlers]): |
|
347 | 345 | # don't add a second PUBHandler |
|
348 | 346 | return |
|
349 | 347 | loglevel = integer_loglevel(loglevel) |
|
350 | 348 | lsock = context.socket(zmq.PUB) |
|
351 | 349 | lsock.connect(iface) |
|
352 | 350 | handler = EnginePUBHandler(engine, lsock) |
|
353 | 351 | handler.setLevel(loglevel) |
|
354 | 352 | logger.addHandler(handler) |
|
355 | 353 | logger.setLevel(loglevel) |
|
356 | 354 | return logger |
|
357 | 355 | |
|
358 | 356 | def local_logger(logname, loglevel=logging.DEBUG): |
|
359 | 357 | loglevel = integer_loglevel(loglevel) |
|
360 | 358 | logger = logging.getLogger(logname) |
|
361 | 359 | if any([isinstance(h, logging.StreamHandler) for h in logger.handlers]): |
|
362 | 360 | # don't add a second StreamHandler |
|
363 | 361 | return |
|
364 | 362 | handler = logging.StreamHandler() |
|
365 | 363 | handler.setLevel(loglevel) |
|
366 | 364 | formatter = logging.Formatter("%(asctime)s.%(msecs).03d [%(name)s] %(message)s", |
|
367 | 365 | datefmt="%Y-%m-%d %H:%M:%S") |
|
368 | 366 | handler.setFormatter(formatter) |
|
369 | 367 | |
|
370 | 368 | logger.addHandler(handler) |
|
371 | 369 | logger.setLevel(loglevel) |
|
372 | 370 | return logger |
|
373 | 371 | |
|
374 | 372 | def set_hwm(sock, hwm=0): |
|
375 | 373 | """set zmq High Water Mark on a socket |
|
376 | 374 | |
|
377 | 375 | in a way that always works for various pyzmq / libzmq versions. |
|
378 | 376 | """ |
|
379 | 377 | import zmq |
|
380 | 378 | |
|
381 | 379 | for key in ('HWM', 'SNDHWM', 'RCVHWM'): |
|
382 | 380 | opt = getattr(zmq, key, None) |
|
383 | 381 | if opt is None: |
|
384 | 382 | continue |
|
385 | 383 | try: |
|
386 | 384 | sock.setsockopt(opt, hwm) |
|
387 | 385 | except zmq.ZMQError: |
|
388 | 386 | pass |
|
389 | 387 | |
|
390 | No newline at end of file | |
|
388 |
@@ -1,395 +1,393 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # encoding: utf-8 |
|
3 | 3 | """ |
|
4 | 4 | The :class:`~IPython.core.application.Application` object for the command |
|
5 | 5 | line :command:`ipython` program. |
|
6 | 6 | |
|
7 | 7 | Authors |
|
8 | 8 | ------- |
|
9 | 9 | |
|
10 | 10 | * Brian Granger |
|
11 | 11 | * Fernando Perez |
|
12 | 12 | * Min Ragan-Kelley |
|
13 | 13 | """ |
|
14 | 14 | |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | # Copyright (C) 2008-2011 The IPython Development Team |
|
17 | 17 | # |
|
18 | 18 | # Distributed under the terms of the BSD License. The full license is in |
|
19 | 19 | # the file COPYING, distributed as part of this software. |
|
20 | 20 | #----------------------------------------------------------------------------- |
|
21 | 21 | |
|
22 | 22 | #----------------------------------------------------------------------------- |
|
23 | 23 | # Imports |
|
24 | 24 | #----------------------------------------------------------------------------- |
|
25 | 25 | |
|
26 | 26 | from __future__ import absolute_import |
|
27 | 27 | from __future__ import print_function |
|
28 | 28 | |
|
29 | 29 | import logging |
|
30 | 30 | import os |
|
31 | 31 | import sys |
|
32 | 32 | |
|
33 |
from IPython.config.loader import |
|
|
34 | Config, PyFileConfigLoader, ConfigFileNotFound | |
|
35 | ) | |
|
33 | from IPython.config.loader import Config | |
|
36 | 34 | from IPython.config.application import boolean_flag, catch_config_error, Application |
|
37 | 35 | from IPython.core import release |
|
38 | 36 | from IPython.core import usage |
|
39 | 37 | from IPython.core.completer import IPCompleter |
|
40 | 38 | from IPython.core.crashhandler import CrashHandler |
|
41 | 39 | from IPython.core.formatters import PlainTextFormatter |
|
42 | 40 | from IPython.core.history import HistoryManager |
|
43 | 41 | from IPython.core.prompts import PromptManager |
|
44 | 42 | from IPython.core.application import ( |
|
45 | 43 | ProfileDir, BaseIPythonApplication, base_flags, base_aliases |
|
46 | 44 | ) |
|
47 | 45 | from IPython.core.magics import ScriptMagics |
|
48 | 46 | from IPython.core.shellapp import ( |
|
49 | 47 | InteractiveShellApp, shell_flags, shell_aliases |
|
50 | 48 | ) |
|
51 | 49 | from IPython.extensions.storemagic import StoreMagics |
|
52 | 50 | from IPython.terminal.interactiveshell import TerminalInteractiveShell |
|
53 | 51 | from IPython.utils import warn |
|
54 | 52 | from IPython.utils.path import get_ipython_dir, check_for_old_config |
|
55 | 53 | from IPython.utils.traitlets import ( |
|
56 | 54 | Bool, List, Dict, |
|
57 | 55 | ) |
|
58 | 56 | |
|
59 | 57 | #----------------------------------------------------------------------------- |
|
60 | 58 | # Globals, utilities and helpers |
|
61 | 59 | #----------------------------------------------------------------------------- |
|
62 | 60 | |
|
63 | 61 | _examples = """ |
|
64 | 62 | ipython --matplotlib # enable matplotlib integration |
|
65 | 63 | ipython --matplotlib=qt # enable matplotlib integration with qt4 backend |
|
66 | 64 | |
|
67 | 65 | ipython --log-level=DEBUG # set logging to DEBUG |
|
68 | 66 | ipython --profile=foo # start with profile foo |
|
69 | 67 | |
|
70 | 68 | ipython qtconsole # start the qtconsole GUI application |
|
71 | 69 | ipython help qtconsole # show the help for the qtconsole subcmd |
|
72 | 70 | |
|
73 | 71 | ipython console # start the terminal-based console application |
|
74 | 72 | ipython help console # show the help for the console subcmd |
|
75 | 73 | |
|
76 | 74 | ipython notebook # start the IPython notebook |
|
77 | 75 | ipython help notebook # show the help for the notebook subcmd |
|
78 | 76 | |
|
79 | 77 | ipython profile create foo # create profile foo w/ default config files |
|
80 | 78 | ipython help profile # show the help for the profile subcmd |
|
81 | 79 | |
|
82 | 80 | ipython locate # print the path to the IPython directory |
|
83 | 81 | ipython locate profile foo # print the path to the directory for profile `foo` |
|
84 | 82 | |
|
85 | 83 | ipython nbconvert # convert notebooks to/from other formats |
|
86 | 84 | """ |
|
87 | 85 | |
|
88 | 86 | #----------------------------------------------------------------------------- |
|
89 | 87 | # Crash handler for this application |
|
90 | 88 | #----------------------------------------------------------------------------- |
|
91 | 89 | |
|
92 | 90 | class IPAppCrashHandler(CrashHandler): |
|
93 | 91 | """sys.excepthook for IPython itself, leaves a detailed report on disk.""" |
|
94 | 92 | |
|
95 | 93 | def __init__(self, app): |
|
96 | 94 | contact_name = release.author |
|
97 | 95 | contact_email = release.author_email |
|
98 | 96 | bug_tracker = 'https://github.com/ipython/ipython/issues' |
|
99 | 97 | super(IPAppCrashHandler,self).__init__( |
|
100 | 98 | app, contact_name, contact_email, bug_tracker |
|
101 | 99 | ) |
|
102 | 100 | |
|
103 | 101 | def make_report(self,traceback): |
|
104 | 102 | """Return a string containing a crash report.""" |
|
105 | 103 | |
|
106 | 104 | sec_sep = self.section_sep |
|
107 | 105 | # Start with parent report |
|
108 | 106 | report = [super(IPAppCrashHandler, self).make_report(traceback)] |
|
109 | 107 | # Add interactive-specific info we may have |
|
110 | 108 | rpt_add = report.append |
|
111 | 109 | try: |
|
112 | 110 | rpt_add(sec_sep+"History of session input:") |
|
113 | 111 | for line in self.app.shell.user_ns['_ih']: |
|
114 | 112 | rpt_add(line) |
|
115 | 113 | rpt_add('\n*** Last line of input (may not be in above history):\n') |
|
116 | 114 | rpt_add(self.app.shell._last_input_line+'\n') |
|
117 | 115 | except: |
|
118 | 116 | pass |
|
119 | 117 | |
|
120 | 118 | return ''.join(report) |
|
121 | 119 | |
|
122 | 120 | #----------------------------------------------------------------------------- |
|
123 | 121 | # Aliases and Flags |
|
124 | 122 | #----------------------------------------------------------------------------- |
|
125 | 123 | flags = dict(base_flags) |
|
126 | 124 | flags.update(shell_flags) |
|
127 | 125 | frontend_flags = {} |
|
128 | 126 | addflag = lambda *args: frontend_flags.update(boolean_flag(*args)) |
|
129 | 127 | addflag('autoedit-syntax', 'TerminalInteractiveShell.autoedit_syntax', |
|
130 | 128 | 'Turn on auto editing of files with syntax errors.', |
|
131 | 129 | 'Turn off auto editing of files with syntax errors.' |
|
132 | 130 | ) |
|
133 | 131 | addflag('banner', 'TerminalIPythonApp.display_banner', |
|
134 | 132 | "Display a banner upon starting IPython.", |
|
135 | 133 | "Don't display a banner upon starting IPython." |
|
136 | 134 | ) |
|
137 | 135 | addflag('confirm-exit', 'TerminalInteractiveShell.confirm_exit', |
|
138 | 136 | """Set to confirm when you try to exit IPython with an EOF (Control-D |
|
139 | 137 | in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit', |
|
140 | 138 | you can force a direct exit without any confirmation.""", |
|
141 | 139 | "Don't prompt the user when exiting." |
|
142 | 140 | ) |
|
143 | 141 | addflag('term-title', 'TerminalInteractiveShell.term_title', |
|
144 | 142 | "Enable auto setting the terminal title.", |
|
145 | 143 | "Disable auto setting the terminal title." |
|
146 | 144 | ) |
|
147 | 145 | classic_config = Config() |
|
148 | 146 | classic_config.InteractiveShell.cache_size = 0 |
|
149 | 147 | classic_config.PlainTextFormatter.pprint = False |
|
150 | 148 | classic_config.PromptManager.in_template = '>>> ' |
|
151 | 149 | classic_config.PromptManager.in2_template = '... ' |
|
152 | 150 | classic_config.PromptManager.out_template = '' |
|
153 | 151 | classic_config.InteractiveShell.separate_in = '' |
|
154 | 152 | classic_config.InteractiveShell.separate_out = '' |
|
155 | 153 | classic_config.InteractiveShell.separate_out2 = '' |
|
156 | 154 | classic_config.InteractiveShell.colors = 'NoColor' |
|
157 | 155 | classic_config.InteractiveShell.xmode = 'Plain' |
|
158 | 156 | |
|
159 | 157 | frontend_flags['classic']=( |
|
160 | 158 | classic_config, |
|
161 | 159 | "Gives IPython a similar feel to the classic Python prompt." |
|
162 | 160 | ) |
|
163 | 161 | # # log doesn't make so much sense this way anymore |
|
164 | 162 | # paa('--log','-l', |
|
165 | 163 | # action='store_true', dest='InteractiveShell.logstart', |
|
166 | 164 | # help="Start logging to the default log file (./ipython_log.py).") |
|
167 | 165 | # |
|
168 | 166 | # # quick is harder to implement |
|
169 | 167 | frontend_flags['quick']=( |
|
170 | 168 | {'TerminalIPythonApp' : {'quick' : True}}, |
|
171 | 169 | "Enable quick startup with no config files." |
|
172 | 170 | ) |
|
173 | 171 | |
|
174 | 172 | frontend_flags['i'] = ( |
|
175 | 173 | {'TerminalIPythonApp' : {'force_interact' : True}}, |
|
176 | 174 | """If running code from the command line, become interactive afterwards. |
|
177 | 175 | Note: can also be given simply as '-i'.""" |
|
178 | 176 | ) |
|
179 | 177 | flags.update(frontend_flags) |
|
180 | 178 | |
|
181 | 179 | aliases = dict(base_aliases) |
|
182 | 180 | aliases.update(shell_aliases) |
|
183 | 181 | |
|
184 | 182 | #----------------------------------------------------------------------------- |
|
185 | 183 | # Main classes and functions |
|
186 | 184 | #----------------------------------------------------------------------------- |
|
187 | 185 | |
|
188 | 186 | |
|
189 | 187 | class LocateIPythonApp(BaseIPythonApplication): |
|
190 | 188 | description = """print the path to the IPython dir""" |
|
191 | 189 | subcommands = Dict(dict( |
|
192 | 190 | profile=('IPython.core.profileapp.ProfileLocate', |
|
193 | 191 | "print the path to an IPython profile directory", |
|
194 | 192 | ), |
|
195 | 193 | )) |
|
196 | 194 | def start(self): |
|
197 | 195 | if self.subapp is not None: |
|
198 | 196 | return self.subapp.start() |
|
199 | 197 | else: |
|
200 | 198 | print(self.ipython_dir) |
|
201 | 199 | |
|
202 | 200 | |
|
203 | 201 | class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp): |
|
204 | 202 | name = u'ipython' |
|
205 | 203 | description = usage.cl_usage |
|
206 | 204 | crash_handler_class = IPAppCrashHandler |
|
207 | 205 | examples = _examples |
|
208 | 206 | |
|
209 | 207 | flags = Dict(flags) |
|
210 | 208 | aliases = Dict(aliases) |
|
211 | 209 | classes = List() |
|
212 | 210 | def _classes_default(self): |
|
213 | 211 | """This has to be in a method, for TerminalIPythonApp to be available.""" |
|
214 | 212 | return [ |
|
215 | 213 | InteractiveShellApp, # ShellApp comes before TerminalApp, because |
|
216 | 214 | self.__class__, # it will also affect subclasses (e.g. QtConsole) |
|
217 | 215 | TerminalInteractiveShell, |
|
218 | 216 | PromptManager, |
|
219 | 217 | HistoryManager, |
|
220 | 218 | ProfileDir, |
|
221 | 219 | PlainTextFormatter, |
|
222 | 220 | IPCompleter, |
|
223 | 221 | ScriptMagics, |
|
224 | 222 | StoreMagics, |
|
225 | 223 | ] |
|
226 | 224 | |
|
227 | 225 | subcommands = dict( |
|
228 | 226 | qtconsole=('IPython.qt.console.qtconsoleapp.IPythonQtConsoleApp', |
|
229 | 227 | """Launch the IPython Qt Console.""" |
|
230 | 228 | ), |
|
231 | 229 | notebook=('IPython.html.notebookapp.NotebookApp', |
|
232 | 230 | """Launch the IPython HTML Notebook Server.""" |
|
233 | 231 | ), |
|
234 | 232 | profile = ("IPython.core.profileapp.ProfileApp", |
|
235 | 233 | "Create and manage IPython profiles." |
|
236 | 234 | ), |
|
237 | 235 | kernel = ("IPython.kernel.zmq.kernelapp.IPKernelApp", |
|
238 | 236 | "Start a kernel without an attached frontend." |
|
239 | 237 | ), |
|
240 | 238 | console=('IPython.terminal.console.app.ZMQTerminalIPythonApp', |
|
241 | 239 | """Launch the IPython terminal-based Console.""" |
|
242 | 240 | ), |
|
243 | 241 | locate=('IPython.terminal.ipapp.LocateIPythonApp', |
|
244 | 242 | LocateIPythonApp.description |
|
245 | 243 | ), |
|
246 | 244 | history=('IPython.core.historyapp.HistoryApp', |
|
247 | 245 | "Manage the IPython history database." |
|
248 | 246 | ), |
|
249 | 247 | nbconvert=('IPython.nbconvert.nbconvertapp.NbConvertApp', |
|
250 | 248 | "Convert notebooks to/from other formats." |
|
251 | 249 | ), |
|
252 | 250 | trust=('IPython.nbformat.sign.TrustNotebookApp', |
|
253 | 251 | "Sign notebooks to trust their potentially unsafe contents at load." |
|
254 | 252 | ), |
|
255 | 253 | ) |
|
256 | 254 | subcommands['install-nbextension'] = ( |
|
257 | 255 | "IPython.html.nbextensions.NBExtensionApp", |
|
258 | 256 | "Install IPython notebook extension files" |
|
259 | 257 | ) |
|
260 | 258 | |
|
261 | 259 | # *do* autocreate requested profile, but don't create the config file. |
|
262 | 260 | auto_create=Bool(True) |
|
263 | 261 | # configurables |
|
264 | 262 | ignore_old_config=Bool(False, config=True, |
|
265 | 263 | help="Suppress warning messages about legacy config files" |
|
266 | 264 | ) |
|
267 | 265 | quick = Bool(False, config=True, |
|
268 | 266 | help="""Start IPython quickly by skipping the loading of config files.""" |
|
269 | 267 | ) |
|
270 | 268 | def _quick_changed(self, name, old, new): |
|
271 | 269 | if new: |
|
272 | 270 | self.load_config_file = lambda *a, **kw: None |
|
273 | 271 | self.ignore_old_config=True |
|
274 | 272 | |
|
275 | 273 | display_banner = Bool(True, config=True, |
|
276 | 274 | help="Whether to display a banner upon starting IPython." |
|
277 | 275 | ) |
|
278 | 276 | |
|
279 | 277 | # if there is code of files to run from the cmd line, don't interact |
|
280 | 278 | # unless the --i flag (App.force_interact) is true. |
|
281 | 279 | force_interact = Bool(False, config=True, |
|
282 | 280 | help="""If a command or file is given via the command-line, |
|
283 | 281 | e.g. 'ipython foo.py', start an interactive shell after executing the |
|
284 | 282 | file or command.""" |
|
285 | 283 | ) |
|
286 | 284 | def _force_interact_changed(self, name, old, new): |
|
287 | 285 | if new: |
|
288 | 286 | self.interact = True |
|
289 | 287 | |
|
290 | 288 | def _file_to_run_changed(self, name, old, new): |
|
291 | 289 | if new: |
|
292 | 290 | self.something_to_run = True |
|
293 | 291 | if new and not self.force_interact: |
|
294 | 292 | self.interact = False |
|
295 | 293 | _code_to_run_changed = _file_to_run_changed |
|
296 | 294 | _module_to_run_changed = _file_to_run_changed |
|
297 | 295 | |
|
298 | 296 | # internal, not-configurable |
|
299 | 297 | interact=Bool(True) |
|
300 | 298 | something_to_run=Bool(False) |
|
301 | 299 | |
|
302 | 300 | def parse_command_line(self, argv=None): |
|
303 | 301 | """override to allow old '-pylab' flag with deprecation warning""" |
|
304 | 302 | |
|
305 | 303 | argv = sys.argv[1:] if argv is None else argv |
|
306 | 304 | |
|
307 | 305 | if '-pylab' in argv: |
|
308 | 306 | # deprecated `-pylab` given, |
|
309 | 307 | # warn and transform into current syntax |
|
310 | 308 | argv = argv[:] # copy, don't clobber |
|
311 | 309 | idx = argv.index('-pylab') |
|
312 | 310 | warn.warn("`-pylab` flag has been deprecated.\n" |
|
313 | 311 | " Use `--matplotlib <backend>` and import pylab manually.") |
|
314 | 312 | argv[idx] = '--pylab' |
|
315 | 313 | |
|
316 | 314 | return super(TerminalIPythonApp, self).parse_command_line(argv) |
|
317 | 315 | |
|
318 | 316 | @catch_config_error |
|
319 | 317 | def initialize(self, argv=None): |
|
320 | 318 | """Do actions after construct, but before starting the app.""" |
|
321 | 319 | super(TerminalIPythonApp, self).initialize(argv) |
|
322 | 320 | if self.subapp is not None: |
|
323 | 321 | # don't bother initializing further, starting subapp |
|
324 | 322 | return |
|
325 | 323 | if not self.ignore_old_config: |
|
326 | 324 | check_for_old_config(self.ipython_dir) |
|
327 | 325 | # print self.extra_args |
|
328 | 326 | if self.extra_args and not self.something_to_run: |
|
329 | 327 | self.file_to_run = self.extra_args[0] |
|
330 | 328 | self.init_path() |
|
331 | 329 | # create the shell |
|
332 | 330 | self.init_shell() |
|
333 | 331 | # and draw the banner |
|
334 | 332 | self.init_banner() |
|
335 | 333 | # Now a variety of things that happen after the banner is printed. |
|
336 | 334 | self.init_gui_pylab() |
|
337 | 335 | self.init_extensions() |
|
338 | 336 | self.init_code() |
|
339 | 337 | |
|
340 | 338 | def init_shell(self): |
|
341 | 339 | """initialize the InteractiveShell instance""" |
|
342 | 340 | # Create an InteractiveShell instance. |
|
343 | 341 | # shell.display_banner should always be False for the terminal |
|
344 | 342 | # based app, because we call shell.show_banner() by hand below |
|
345 | 343 | # so the banner shows *before* all extension loading stuff. |
|
346 | 344 | self.shell = TerminalInteractiveShell.instance(parent=self, |
|
347 | 345 | display_banner=False, profile_dir=self.profile_dir, |
|
348 | 346 | ipython_dir=self.ipython_dir, user_ns=self.user_ns) |
|
349 | 347 | self.shell.configurables.append(self) |
|
350 | 348 | |
|
351 | 349 | def init_banner(self): |
|
352 | 350 | """optionally display the banner""" |
|
353 | 351 | if self.display_banner and self.interact: |
|
354 | 352 | self.shell.show_banner() |
|
355 | 353 | # Make sure there is a space below the banner. |
|
356 | 354 | if self.log_level <= logging.INFO: print() |
|
357 | 355 | |
|
358 | 356 | def _pylab_changed(self, name, old, new): |
|
359 | 357 | """Replace --pylab='inline' with --pylab='auto'""" |
|
360 | 358 | if new == 'inline': |
|
361 | 359 | warn.warn("'inline' not available as pylab backend, " |
|
362 | 360 | "using 'auto' instead.") |
|
363 | 361 | self.pylab = 'auto' |
|
364 | 362 | |
|
365 | 363 | def start(self): |
|
366 | 364 | if self.subapp is not None: |
|
367 | 365 | return self.subapp.start() |
|
368 | 366 | # perform any prexec steps: |
|
369 | 367 | if self.interact: |
|
370 | 368 | self.log.debug("Starting IPython's mainloop...") |
|
371 | 369 | self.shell.mainloop() |
|
372 | 370 | else: |
|
373 | 371 | self.log.debug("IPython not interactive...") |
|
374 | 372 | |
|
375 | 373 | def load_default_config(ipython_dir=None): |
|
376 | 374 | """Load the default config file from the default ipython_dir. |
|
377 | 375 | |
|
378 | 376 | This is useful for embedded shells. |
|
379 | 377 | """ |
|
380 | 378 | if ipython_dir is None: |
|
381 | 379 | ipython_dir = get_ipython_dir() |
|
382 | 380 | |
|
383 | 381 | profile_dir = os.path.join(ipython_dir, 'profile_default') |
|
384 | 382 | |
|
385 | 383 | config = Config() |
|
386 | 384 | for cf in Application._load_config_files("ipython_config", path=profile_dir): |
|
387 | 385 | config.update(cf) |
|
388 | 386 | |
|
389 | 387 | return config |
|
390 | 388 | |
|
391 | 389 | launch_new_instance = TerminalIPythonApp.launch_instance |
|
392 | 390 | |
|
393 | 391 | |
|
394 | 392 | if __name__ == '__main__': |
|
395 | 393 | launch_new_instance() |
@@ -1,325 +1,325 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | |
|
3 | 3 | """ PickleShare - a small 'shelve' like datastore with concurrency support |
|
4 | 4 | |
|
5 | 5 | Like shelve, a PickleShareDB object acts like a normal dictionary. Unlike |
|
6 | 6 | shelve, many processes can access the database simultaneously. Changing a |
|
7 | 7 | value in database is immediately visible to other processes accessing the |
|
8 | 8 | same database. |
|
9 | 9 | |
|
10 | 10 | Concurrency is possible because the values are stored in separate files. Hence |
|
11 | 11 | the "database" is a directory where *all* files are governed by PickleShare. |
|
12 | 12 | |
|
13 | 13 | Example usage:: |
|
14 | 14 | |
|
15 | 15 | from pickleshare import * |
|
16 | 16 | db = PickleShareDB('~/testpickleshare') |
|
17 | 17 | db.clear() |
|
18 | 18 | print "Should be empty:",db.items() |
|
19 | 19 | db['hello'] = 15 |
|
20 | 20 | db['aku ankka'] = [1,2,313] |
|
21 | 21 | db['paths/are/ok/key'] = [1,(5,46)] |
|
22 | 22 | print db.keys() |
|
23 | 23 | del db['aku ankka'] |
|
24 | 24 | |
|
25 | 25 | This module is certainly not ZODB, but can be used for low-load |
|
26 | 26 | (non-mission-critical) situations where tiny code size trumps the |
|
27 | 27 | advanced features of a "real" object database. |
|
28 | 28 | |
|
29 | 29 | Installation guide: easy_install pickleshare |
|
30 | 30 | |
|
31 | 31 | Author: Ville Vainio <vivainio@gmail.com> |
|
32 | 32 | License: MIT open source license. |
|
33 | 33 | |
|
34 | 34 | """ |
|
35 | 35 | from __future__ import print_function |
|
36 | 36 | |
|
37 | 37 | from IPython.external.path import path as Path |
|
38 |
import |
|
|
38 | import stat, time | |
|
39 | 39 | import collections |
|
40 | 40 | try: |
|
41 | 41 | import cPickle as pickle |
|
42 | 42 | except ImportError: |
|
43 | 43 | import pickle |
|
44 | 44 | import glob |
|
45 | 45 | |
|
46 | 46 | def gethashfile(key): |
|
47 | 47 | return ("%02x" % abs(hash(key) % 256))[-2:] |
|
48 | 48 | |
|
49 | 49 | _sentinel = object() |
|
50 | 50 | |
|
51 | 51 | class PickleShareDB(collections.MutableMapping): |
|
52 | 52 | """ The main 'connection' object for PickleShare database """ |
|
53 | 53 | def __init__(self,root): |
|
54 | 54 | """ Return a db object that will manage the specied directory""" |
|
55 | 55 | self.root = Path(root).expanduser().abspath() |
|
56 | 56 | if not self.root.isdir(): |
|
57 | 57 | self.root.makedirs() |
|
58 | 58 | # cache has { 'key' : (obj, orig_mod_time) } |
|
59 | 59 | self.cache = {} |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | def __getitem__(self,key): |
|
63 | 63 | """ db['key'] reading """ |
|
64 | 64 | fil = self.root / key |
|
65 | 65 | try: |
|
66 | 66 | mtime = (fil.stat()[stat.ST_MTIME]) |
|
67 | 67 | except OSError: |
|
68 | 68 | raise KeyError(key) |
|
69 | 69 | |
|
70 | 70 | if fil in self.cache and mtime == self.cache[fil][1]: |
|
71 | 71 | return self.cache[fil][0] |
|
72 | 72 | try: |
|
73 | 73 | # The cached item has expired, need to read |
|
74 | 74 | with fil.open("rb") as f: |
|
75 | 75 | obj = pickle.loads(f.read()) |
|
76 | 76 | except: |
|
77 | 77 | raise KeyError(key) |
|
78 | 78 | |
|
79 | 79 | self.cache[fil] = (obj,mtime) |
|
80 | 80 | return obj |
|
81 | 81 | |
|
82 | 82 | def __setitem__(self,key,value): |
|
83 | 83 | """ db['key'] = 5 """ |
|
84 | 84 | fil = self.root / key |
|
85 | 85 | parent = fil.parent |
|
86 | 86 | if parent and not parent.isdir(): |
|
87 | 87 | parent.makedirs() |
|
88 | 88 | # We specify protocol 2, so that we can mostly go between Python 2 |
|
89 | 89 | # and Python 3. We can upgrade to protocol 3 when Python 2 is obsolete. |
|
90 | 90 | with fil.open('wb') as f: |
|
91 | 91 | pickled = pickle.dump(value, f, protocol=2) |
|
92 | 92 | try: |
|
93 | 93 | self.cache[fil] = (value,fil.mtime) |
|
94 | 94 | except OSError as e: |
|
95 | 95 | if e.errno != 2: |
|
96 | 96 | raise |
|
97 | 97 | |
|
98 | 98 | def hset(self, hashroot, key, value): |
|
99 | 99 | """ hashed set """ |
|
100 | 100 | hroot = self.root / hashroot |
|
101 | 101 | if not hroot.isdir(): |
|
102 | 102 | hroot.makedirs() |
|
103 | 103 | hfile = hroot / gethashfile(key) |
|
104 | 104 | d = self.get(hfile, {}) |
|
105 | 105 | d.update( {key : value}) |
|
106 | 106 | self[hfile] = d |
|
107 | 107 | |
|
108 | 108 | |
|
109 | 109 | |
|
110 | 110 | def hget(self, hashroot, key, default = _sentinel, fast_only = True): |
|
111 | 111 | """ hashed get """ |
|
112 | 112 | hroot = self.root / hashroot |
|
113 | 113 | hfile = hroot / gethashfile(key) |
|
114 | 114 | |
|
115 | 115 | d = self.get(hfile, _sentinel ) |
|
116 | 116 | #print "got dict",d,"from",hfile |
|
117 | 117 | if d is _sentinel: |
|
118 | 118 | if fast_only: |
|
119 | 119 | if default is _sentinel: |
|
120 | 120 | raise KeyError(key) |
|
121 | 121 | |
|
122 | 122 | return default |
|
123 | 123 | |
|
124 | 124 | # slow mode ok, works even after hcompress() |
|
125 | 125 | d = self.hdict(hashroot) |
|
126 | 126 | |
|
127 | 127 | return d.get(key, default) |
|
128 | 128 | |
|
129 | 129 | def hdict(self, hashroot): |
|
130 | 130 | """ Get all data contained in hashed category 'hashroot' as dict """ |
|
131 | 131 | hfiles = self.keys(hashroot + "/*") |
|
132 | 132 | hfiles.sort() |
|
133 | 133 | last = len(hfiles) and hfiles[-1] or '' |
|
134 | 134 | if last.endswith('xx'): |
|
135 | 135 | # print "using xx" |
|
136 | 136 | hfiles = [last] + hfiles[:-1] |
|
137 | 137 | |
|
138 | 138 | all = {} |
|
139 | 139 | |
|
140 | 140 | for f in hfiles: |
|
141 | 141 | # print "using",f |
|
142 | 142 | try: |
|
143 | 143 | all.update(self[f]) |
|
144 | 144 | except KeyError: |
|
145 | 145 | print("Corrupt",f,"deleted - hset is not threadsafe!") |
|
146 | 146 | del self[f] |
|
147 | 147 | |
|
148 | 148 | self.uncache(f) |
|
149 | 149 | |
|
150 | 150 | return all |
|
151 | 151 | |
|
152 | 152 | def hcompress(self, hashroot): |
|
153 | 153 | """ Compress category 'hashroot', so hset is fast again |
|
154 | 154 | |
|
155 | 155 | hget will fail if fast_only is True for compressed items (that were |
|
156 | 156 | hset before hcompress). |
|
157 | 157 | |
|
158 | 158 | """ |
|
159 | 159 | hfiles = self.keys(hashroot + "/*") |
|
160 | 160 | all = {} |
|
161 | 161 | for f in hfiles: |
|
162 | 162 | # print "using",f |
|
163 | 163 | all.update(self[f]) |
|
164 | 164 | self.uncache(f) |
|
165 | 165 | |
|
166 | 166 | self[hashroot + '/xx'] = all |
|
167 | 167 | for f in hfiles: |
|
168 | 168 | p = self.root / f |
|
169 | 169 | if p.basename() == 'xx': |
|
170 | 170 | continue |
|
171 | 171 | p.remove() |
|
172 | 172 | |
|
173 | 173 | |
|
174 | 174 | |
|
175 | 175 | def __delitem__(self,key): |
|
176 | 176 | """ del db["key"] """ |
|
177 | 177 | fil = self.root / key |
|
178 | 178 | self.cache.pop(fil,None) |
|
179 | 179 | try: |
|
180 | 180 | fil.remove() |
|
181 | 181 | except OSError: |
|
182 | 182 | # notfound and permission denied are ok - we |
|
183 | 183 | # lost, the other process wins the conflict |
|
184 | 184 | pass |
|
185 | 185 | |
|
186 | 186 | def _normalized(self, p): |
|
187 | 187 | """ Make a key suitable for user's eyes """ |
|
188 | 188 | return str(self.root.relpathto(p)).replace('\\','/') |
|
189 | 189 | |
|
190 | 190 | def keys(self, globpat = None): |
|
191 | 191 | """ All keys in DB, or all keys matching a glob""" |
|
192 | 192 | |
|
193 | 193 | if globpat is None: |
|
194 | 194 | files = self.root.walkfiles() |
|
195 | 195 | else: |
|
196 | 196 | files = [Path(p) for p in glob.glob(self.root/globpat)] |
|
197 | 197 | return [self._normalized(p) for p in files if p.isfile()] |
|
198 | 198 | |
|
199 | 199 | def __iter__(self): |
|
200 | 200 | return iter(self.keys()) |
|
201 | 201 | |
|
202 | 202 | def __len__(self): |
|
203 | 203 | return len(self.keys()) |
|
204 | 204 | |
|
205 | 205 | def uncache(self,*items): |
|
206 | 206 | """ Removes all, or specified items from cache |
|
207 | 207 | |
|
208 | 208 | Use this after reading a large amount of large objects |
|
209 | 209 | to free up memory, when you won't be needing the objects |
|
210 | 210 | for a while. |
|
211 | 211 | |
|
212 | 212 | """ |
|
213 | 213 | if not items: |
|
214 | 214 | self.cache = {} |
|
215 | 215 | for it in items: |
|
216 | 216 | self.cache.pop(it,None) |
|
217 | 217 | |
|
218 | 218 | def waitget(self,key, maxwaittime = 60 ): |
|
219 | 219 | """ Wait (poll) for a key to get a value |
|
220 | 220 | |
|
221 | 221 | Will wait for `maxwaittime` seconds before raising a KeyError. |
|
222 | 222 | The call exits normally if the `key` field in db gets a value |
|
223 | 223 | within the timeout period. |
|
224 | 224 | |
|
225 | 225 | Use this for synchronizing different processes or for ensuring |
|
226 | 226 | that an unfortunately timed "db['key'] = newvalue" operation |
|
227 | 227 | in another process (which causes all 'get' operation to cause a |
|
228 | 228 | KeyError for the duration of pickling) won't screw up your program |
|
229 | 229 | logic. |
|
230 | 230 | """ |
|
231 | 231 | |
|
232 | 232 | wtimes = [0.2] * 3 + [0.5] * 2 + [1] |
|
233 | 233 | tries = 0 |
|
234 | 234 | waited = 0 |
|
235 | 235 | while 1: |
|
236 | 236 | try: |
|
237 | 237 | val = self[key] |
|
238 | 238 | return val |
|
239 | 239 | except KeyError: |
|
240 | 240 | pass |
|
241 | 241 | |
|
242 | 242 | if waited > maxwaittime: |
|
243 | 243 | raise KeyError(key) |
|
244 | 244 | |
|
245 | 245 | time.sleep(wtimes[tries]) |
|
246 | 246 | waited+=wtimes[tries] |
|
247 | 247 | if tries < len(wtimes) -1: |
|
248 | 248 | tries+=1 |
|
249 | 249 | |
|
250 | 250 | def getlink(self,folder): |
|
251 | 251 | """ Get a convenient link for accessing items """ |
|
252 | 252 | return PickleShareLink(self, folder) |
|
253 | 253 | |
|
254 | 254 | def __repr__(self): |
|
255 | 255 | return "PickleShareDB('%s')" % self.root |
|
256 | 256 | |
|
257 | 257 | |
|
258 | 258 | |
|
259 | 259 | class PickleShareLink: |
|
260 | 260 | """ A shortdand for accessing nested PickleShare data conveniently. |
|
261 | 261 | |
|
262 | 262 | Created through PickleShareDB.getlink(), example:: |
|
263 | 263 | |
|
264 | 264 | lnk = db.getlink('myobjects/test') |
|
265 | 265 | lnk.foo = 2 |
|
266 | 266 | lnk.bar = lnk.foo + 5 |
|
267 | 267 | |
|
268 | 268 | """ |
|
269 | 269 | def __init__(self, db, keydir ): |
|
270 | 270 | self.__dict__.update(locals()) |
|
271 | 271 | |
|
272 | 272 | def __getattr__(self,key): |
|
273 | 273 | return self.__dict__['db'][self.__dict__['keydir']+'/' + key] |
|
274 | 274 | def __setattr__(self,key,val): |
|
275 | 275 | self.db[self.keydir+'/' + key] = val |
|
276 | 276 | def __repr__(self): |
|
277 | 277 | db = self.__dict__['db'] |
|
278 | 278 | keys = db.keys( self.__dict__['keydir'] +"/*") |
|
279 | 279 | return "<PickleShareLink '%s': %s>" % ( |
|
280 | 280 | self.__dict__['keydir'], |
|
281 | 281 | ";".join([Path(k).basename() for k in keys])) |
|
282 | 282 | |
|
283 | 283 | def main(): |
|
284 | 284 | import textwrap |
|
285 | 285 | usage = textwrap.dedent("""\ |
|
286 | 286 | pickleshare - manage PickleShare databases |
|
287 | 287 | |
|
288 | 288 | Usage: |
|
289 | 289 | |
|
290 | 290 | pickleshare dump /path/to/db > dump.txt |
|
291 | 291 | pickleshare load /path/to/db < dump.txt |
|
292 | 292 | pickleshare test /path/to/db |
|
293 | 293 | """) |
|
294 | 294 | DB = PickleShareDB |
|
295 | 295 | import sys |
|
296 | 296 | if len(sys.argv) < 2: |
|
297 | 297 | print(usage) |
|
298 | 298 | return |
|
299 | 299 | |
|
300 | 300 | cmd = sys.argv[1] |
|
301 | 301 | args = sys.argv[2:] |
|
302 | 302 | if cmd == 'dump': |
|
303 | 303 | if not args: args= ['.'] |
|
304 | 304 | db = DB(args[0]) |
|
305 | 305 | import pprint |
|
306 | 306 | pprint.pprint(db.items()) |
|
307 | 307 | elif cmd == 'load': |
|
308 | 308 | cont = sys.stdin.read() |
|
309 | 309 | db = DB(args[0]) |
|
310 | 310 | data = eval(cont) |
|
311 | 311 | db.clear() |
|
312 | 312 | for k,v in db.items(): |
|
313 | 313 | db[k] = v |
|
314 | 314 | elif cmd == 'testwait': |
|
315 | 315 | db = DB(args[0]) |
|
316 | 316 | db.clear() |
|
317 | 317 | print(db.waitget('250')) |
|
318 | 318 | elif cmd == 'test': |
|
319 | 319 | test() |
|
320 | 320 | stress() |
|
321 | 321 | |
|
322 | 322 | if __name__== "__main__": |
|
323 | 323 | main() |
|
324 | 324 | |
|
325 | 325 |
@@ -1,124 +1,123 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """ |
|
3 | 3 | Utilities for working with external processes. |
|
4 | 4 | """ |
|
5 | 5 | |
|
6 | 6 | #----------------------------------------------------------------------------- |
|
7 | 7 | # Copyright (C) 2008-2011 The IPython Development Team |
|
8 | 8 | # |
|
9 | 9 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | 10 | # the file COPYING, distributed as part of this software. |
|
11 | 11 | #----------------------------------------------------------------------------- |
|
12 | 12 | |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | # Imports |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | from __future__ import print_function |
|
17 | 17 | |
|
18 | 18 | # Stdlib |
|
19 | 19 | import os |
|
20 | 20 | import sys |
|
21 | import shlex | |
|
22 | 21 | |
|
23 | 22 | # Our own |
|
24 | 23 | if sys.platform == 'win32': |
|
25 | 24 | from ._process_win32 import _find_cmd, system, getoutput, arg_split |
|
26 | 25 | elif sys.platform == 'cli': |
|
27 | 26 | from ._process_cli import _find_cmd, system, getoutput, arg_split |
|
28 | 27 | else: |
|
29 | 28 | from ._process_posix import _find_cmd, system, getoutput, arg_split |
|
30 | 29 | |
|
31 | 30 | from ._process_common import getoutputerror, get_output_error_code, process_handler |
|
32 | 31 | from . import py3compat |
|
33 | 32 | |
|
34 | 33 | #----------------------------------------------------------------------------- |
|
35 | 34 | # Code |
|
36 | 35 | #----------------------------------------------------------------------------- |
|
37 | 36 | |
|
38 | 37 | |
|
39 | 38 | class FindCmdError(Exception): |
|
40 | 39 | pass |
|
41 | 40 | |
|
42 | 41 | |
|
43 | 42 | def find_cmd(cmd): |
|
44 | 43 | """Find absolute path to executable cmd in a cross platform manner. |
|
45 | 44 | |
|
46 | 45 | This function tries to determine the full path to a command line program |
|
47 | 46 | using `which` on Unix/Linux/OS X and `win32api` on Windows. Most of the |
|
48 | 47 | time it will use the version that is first on the users `PATH`. |
|
49 | 48 | |
|
50 | 49 | Warning, don't use this to find IPython command line programs as there |
|
51 | 50 | is a risk you will find the wrong one. Instead find those using the |
|
52 | 51 | following code and looking for the application itself:: |
|
53 | 52 | |
|
54 | 53 | from IPython.utils.path import get_ipython_module_path |
|
55 | 54 | from IPython.utils.process import pycmd2argv |
|
56 | 55 | argv = pycmd2argv(get_ipython_module_path('IPython.terminal.ipapp')) |
|
57 | 56 | |
|
58 | 57 | Parameters |
|
59 | 58 | ---------- |
|
60 | 59 | cmd : str |
|
61 | 60 | The command line program to look for. |
|
62 | 61 | """ |
|
63 | 62 | try: |
|
64 | 63 | path = _find_cmd(cmd).rstrip() |
|
65 | 64 | except OSError: |
|
66 | 65 | raise FindCmdError('command could not be found: %s' % cmd) |
|
67 | 66 | # which returns empty if not found |
|
68 | 67 | if path == '': |
|
69 | 68 | raise FindCmdError('command could not be found: %s' % cmd) |
|
70 | 69 | return os.path.abspath(path) |
|
71 | 70 | |
|
72 | 71 | |
|
73 | 72 | def is_cmd_found(cmd): |
|
74 | 73 | """Check whether executable `cmd` exists or not and return a bool.""" |
|
75 | 74 | try: |
|
76 | 75 | find_cmd(cmd) |
|
77 | 76 | return True |
|
78 | 77 | except FindCmdError: |
|
79 | 78 | return False |
|
80 | 79 | |
|
81 | 80 | |
|
82 | 81 | def pycmd2argv(cmd): |
|
83 | 82 | r"""Take the path of a python command and return a list (argv-style). |
|
84 | 83 | |
|
85 | 84 | This only works on Python based command line programs and will find the |
|
86 | 85 | location of the ``python`` executable using ``sys.executable`` to make |
|
87 | 86 | sure the right version is used. |
|
88 | 87 | |
|
89 | 88 | For a given path ``cmd``, this returns [cmd] if cmd's extension is .exe, |
|
90 | 89 | .com or .bat, and [, cmd] otherwise. |
|
91 | 90 | |
|
92 | 91 | Parameters |
|
93 | 92 | ---------- |
|
94 | 93 | cmd : string |
|
95 | 94 | The path of the command. |
|
96 | 95 | |
|
97 | 96 | Returns |
|
98 | 97 | ------- |
|
99 | 98 | argv-style list. |
|
100 | 99 | """ |
|
101 | 100 | ext = os.path.splitext(cmd)[1] |
|
102 | 101 | if ext in ['.exe', '.com', '.bat']: |
|
103 | 102 | return [cmd] |
|
104 | 103 | else: |
|
105 | 104 | return [sys.executable, cmd] |
|
106 | 105 | |
|
107 | 106 | |
|
108 | 107 | def abbrev_cwd(): |
|
109 | 108 | """ Return abbreviated version of cwd, e.g. d:mydir """ |
|
110 | 109 | cwd = py3compat.getcwd().replace('\\','/') |
|
111 | 110 | drivepart = '' |
|
112 | 111 | tail = cwd |
|
113 | 112 | if sys.platform == 'win32': |
|
114 | 113 | if len(cwd) < 4: |
|
115 | 114 | return cwd |
|
116 | 115 | drivepart,tail = os.path.splitdrive(cwd) |
|
117 | 116 | |
|
118 | 117 | |
|
119 | 118 | parts = tail.split('/') |
|
120 | 119 | if len(parts) > 2: |
|
121 | 120 | tail = '/'.join(parts[-2:]) |
|
122 | 121 | |
|
123 | 122 | return (drivepart + ( |
|
124 | 123 | cwd == '/' and '/' or tail)) |
@@ -1,134 +1,133 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | """Script to commit the doc build outputs into the github-pages repo. |
|
3 | 3 | |
|
4 | 4 | Use: |
|
5 | 5 | |
|
6 | 6 | gh-pages.py [tag] |
|
7 | 7 | |
|
8 | 8 | If no tag is given, the current output of 'git describe' is used. If given, |
|
9 | 9 | that is how the resulting directory will be named. |
|
10 | 10 | |
|
11 | 11 | In practice, you should use either actual clean tags from a current build or |
|
12 | 12 | something like 'current' as a stable URL for the most current version of the """ |
|
13 | 13 | |
|
14 | 14 | #----------------------------------------------------------------------------- |
|
15 | 15 | # Imports |
|
16 | 16 | #----------------------------------------------------------------------------- |
|
17 | 17 | import os |
|
18 | import re | |
|
19 | 18 | import shutil |
|
20 | 19 | import sys |
|
21 | 20 | from os import chdir as cd |
|
22 | 21 | from os.path import join as pjoin |
|
23 | 22 | |
|
24 | 23 | from subprocess import Popen, PIPE, CalledProcessError, check_call |
|
25 | 24 | |
|
26 | 25 | #----------------------------------------------------------------------------- |
|
27 | 26 | # Globals |
|
28 | 27 | #----------------------------------------------------------------------------- |
|
29 | 28 | |
|
30 | 29 | pages_dir = 'gh-pages' |
|
31 | 30 | html_dir = 'build/html' |
|
32 | 31 | pdf_dir = 'build/latex' |
|
33 | 32 | pages_repo = 'git@github.com:ipython/ipython-doc.git' |
|
34 | 33 | |
|
35 | 34 | #----------------------------------------------------------------------------- |
|
36 | 35 | # Functions |
|
37 | 36 | #----------------------------------------------------------------------------- |
|
38 | 37 | def sh(cmd): |
|
39 | 38 | """Execute command in a subshell, return status code.""" |
|
40 | 39 | return check_call(cmd, shell=True) |
|
41 | 40 | |
|
42 | 41 | |
|
43 | 42 | def sh2(cmd): |
|
44 | 43 | """Execute command in a subshell, return stdout. |
|
45 | 44 | |
|
46 | 45 | Stderr is unbuffered from the subshell.x""" |
|
47 | 46 | p = Popen(cmd, stdout=PIPE, shell=True) |
|
48 | 47 | out = p.communicate()[0] |
|
49 | 48 | retcode = p.returncode |
|
50 | 49 | if retcode: |
|
51 | 50 | raise CalledProcessError(retcode, cmd) |
|
52 | 51 | else: |
|
53 | 52 | return out.rstrip() |
|
54 | 53 | |
|
55 | 54 | |
|
56 | 55 | def sh3(cmd): |
|
57 | 56 | """Execute command in a subshell, return stdout, stderr |
|
58 | 57 | |
|
59 | 58 | If anything appears in stderr, print it out to sys.stderr""" |
|
60 | 59 | p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) |
|
61 | 60 | out, err = p.communicate() |
|
62 | 61 | retcode = p.returncode |
|
63 | 62 | if retcode: |
|
64 | 63 | raise CalledProcessError(retcode, cmd) |
|
65 | 64 | else: |
|
66 | 65 | return out.rstrip(), err.rstrip() |
|
67 | 66 | |
|
68 | 67 | |
|
69 | 68 | def init_repo(path): |
|
70 | 69 | """clone the gh-pages repo if we haven't already.""" |
|
71 | 70 | sh("git clone %s %s"%(pages_repo, path)) |
|
72 | 71 | here = os.getcwdu() |
|
73 | 72 | cd(path) |
|
74 | 73 | sh('git checkout gh-pages') |
|
75 | 74 | cd(here) |
|
76 | 75 | |
|
77 | 76 | #----------------------------------------------------------------------------- |
|
78 | 77 | # Script starts |
|
79 | 78 | #----------------------------------------------------------------------------- |
|
80 | 79 | if __name__ == '__main__': |
|
81 | 80 | # The tag can be given as a positional argument |
|
82 | 81 | try: |
|
83 | 82 | tag = sys.argv[1] |
|
84 | 83 | except IndexError: |
|
85 | 84 | tag = "dev" |
|
86 | 85 | |
|
87 | 86 | startdir = os.getcwdu() |
|
88 | 87 | if not os.path.exists(pages_dir): |
|
89 | 88 | # init the repo |
|
90 | 89 | init_repo(pages_dir) |
|
91 | 90 | else: |
|
92 | 91 | # ensure up-to-date before operating |
|
93 | 92 | cd(pages_dir) |
|
94 | 93 | sh('git checkout gh-pages') |
|
95 | 94 | sh('git pull') |
|
96 | 95 | cd(startdir) |
|
97 | 96 | |
|
98 | 97 | dest = pjoin(pages_dir, tag) |
|
99 | 98 | |
|
100 | 99 | # don't `make html` here, because gh-pages already depends on html in Makefile |
|
101 | 100 | # sh('make html') |
|
102 | 101 | if tag != 'dev': |
|
103 | 102 | # only build pdf for non-dev targets |
|
104 | 103 | #sh2('make pdf') |
|
105 | 104 | pass |
|
106 | 105 | |
|
107 | 106 | # This is pretty unforgiving: we unconditionally nuke the destination |
|
108 | 107 | # directory, and then copy the html tree in there |
|
109 | 108 | shutil.rmtree(dest, ignore_errors=True) |
|
110 | 109 | shutil.copytree(html_dir, dest) |
|
111 | 110 | if tag != 'dev': |
|
112 | 111 | #shutil.copy(pjoin(pdf_dir, 'ipython.pdf'), pjoin(dest, 'ipython.pdf')) |
|
113 | 112 | pass |
|
114 | 113 | |
|
115 | 114 | try: |
|
116 | 115 | cd(pages_dir) |
|
117 | 116 | branch = sh2('git rev-parse --abbrev-ref HEAD').strip() |
|
118 | 117 | if branch != 'gh-pages': |
|
119 | 118 | e = 'On %r, git branch is %r, MUST be "gh-pages"' % (pages_dir, |
|
120 | 119 | branch) |
|
121 | 120 | raise RuntimeError(e) |
|
122 | 121 | |
|
123 | 122 | sh('git add -A %s' % tag) |
|
124 | 123 | sh('git commit -m"Updated doc release: %s"' % tag) |
|
125 | 124 | |
|
126 | 125 | print 'Most recent 3 commits:' |
|
127 | 126 | sys.stdout.flush() |
|
128 | 127 | sh('git --no-pager log --oneline HEAD~3..') |
|
129 | 128 | finally: |
|
130 | 129 | cd(startdir) |
|
131 | 130 | |
|
132 | 131 | |
|
133 | 132 | print 'Now verify the build in: %r' % dest |
|
134 | 133 | print "If everything looks good, 'git push'" |
@@ -1,99 +1,99 b'' | |||
|
1 | 1 | """A simple example of how to use IPython.config.application.Application. |
|
2 | 2 | |
|
3 | 3 | This should serve as a simple example that shows how the IPython config |
|
4 | 4 | system works. The main classes are: |
|
5 | 5 | |
|
6 | 6 | * IPython.config.configurable.Configurable |
|
7 | 7 | * IPython.config.configurable.SingletonConfigurable |
|
8 | 8 | * IPython.config.loader.Config |
|
9 | 9 | * IPython.config.application.Application |
|
10 | 10 | |
|
11 | 11 | To see the command line option help, run this program from the command line:: |
|
12 | 12 | |
|
13 | 13 | $ python appconfig.py -h |
|
14 | 14 | |
|
15 | 15 | To make one of your classes configurable (from the command line and config |
|
16 | 16 | files) inherit from Configurable and declare class attributes as traits (see |
|
17 | 17 | classes Foo and Bar below). To make the traits configurable, you will need |
|
18 | 18 | to set the following options: |
|
19 | 19 | |
|
20 | 20 | * ``config``: set to ``True`` to make the attribute configurable. |
|
21 | 21 | * ``shortname``: by default, configurable attributes are set using the syntax |
|
22 | 22 | "Classname.attributename". At the command line, this is a bit verbose, so |
|
23 | 23 | we allow "shortnames" to be declared. Setting a shortname is optional, but |
|
24 | 24 | when you do this, you can set the option at the command line using the |
|
25 | 25 | syntax: "shortname=value". |
|
26 | 26 | * ``help``: set the help string to display a help message when the ``-h`` |
|
27 | 27 | option is given at the command line. The help string should be valid ReST. |
|
28 | 28 | |
|
29 | 29 | When the config attribute of an Application is updated, it will fire all of |
|
30 | 30 | the trait's events for all of the config=True attributes. |
|
31 | 31 | """ |
|
32 | 32 | |
|
33 | 33 | from IPython.config.configurable import Configurable |
|
34 | 34 | from IPython.config.application import Application |
|
35 | 35 | from IPython.utils.traitlets import ( |
|
36 |
Bool, Unicode, Int, |
|
|
36 | Bool, Unicode, Int, List, Dict | |
|
37 | 37 | ) |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | class Foo(Configurable): |
|
41 | 41 | """A class that has configurable, typed attributes. |
|
42 | 42 | |
|
43 | 43 | """ |
|
44 | 44 | |
|
45 | 45 | i = Int(0, config=True, help="The integer i.") |
|
46 | 46 | j = Int(1, config=True, help="The integer j.") |
|
47 | 47 | name = Unicode(u'Brian', config=True, help="First name.") |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | class Bar(Configurable): |
|
51 | 51 | |
|
52 | 52 | enabled = Bool(True, config=True, help="Enable bar.") |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | class MyApp(Application): |
|
56 | 56 | |
|
57 | 57 | name = Unicode(u'myapp') |
|
58 | 58 | running = Bool(False, config=True, |
|
59 | 59 | help="Is the app running?") |
|
60 | 60 | classes = List([Bar, Foo]) |
|
61 | 61 | config_file = Unicode(u'', config=True, |
|
62 | 62 | help="Load this config file") |
|
63 | 63 | |
|
64 | 64 | aliases = Dict(dict(i='Foo.i',j='Foo.j',name='Foo.name', running='MyApp.running', |
|
65 | 65 | enabled='Bar.enabled', log_level='MyApp.log_level')) |
|
66 | 66 | |
|
67 | 67 | flags = Dict(dict(enable=({'Bar': {'enabled' : True}}, "Enable Bar"), |
|
68 | 68 | disable=({'Bar': {'enabled' : False}}, "Disable Bar"), |
|
69 | 69 | debug=({'MyApp':{'log_level':10}}, "Set loglevel to DEBUG") |
|
70 | 70 | )) |
|
71 | 71 | |
|
72 | 72 | def init_foo(self): |
|
73 | 73 | # Pass config to other classes for them to inherit the config. |
|
74 | 74 | self.foo = Foo(config=self.config) |
|
75 | 75 | |
|
76 | 76 | def init_bar(self): |
|
77 | 77 | # Pass config to other classes for them to inherit the config. |
|
78 | 78 | self.bar = Bar(config=self.config) |
|
79 | 79 | |
|
80 | 80 | def initialize(self, argv=None): |
|
81 | 81 | self.parse_command_line(argv) |
|
82 | 82 | if self.config_file: |
|
83 | 83 | self.load_config_file(self.config_file) |
|
84 | 84 | self.init_foo() |
|
85 | 85 | self.init_bar() |
|
86 | 86 | |
|
87 | 87 | def start(self): |
|
88 | 88 | print("app.config:") |
|
89 | 89 | print(self.config) |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | def main(): |
|
93 | 93 | app = MyApp() |
|
94 | 94 | app.initialize() |
|
95 | 95 | app.start() |
|
96 | 96 | |
|
97 | 97 | |
|
98 | 98 | if __name__ == "__main__": |
|
99 | 99 | main() |
@@ -1,177 +1,177 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | from __future__ import print_function |
|
3 | 3 | |
|
4 | 4 | __docformat__ = "restructuredtext en" |
|
5 | 5 | |
|
6 | 6 | #------------------------------------------------------------------------------- |
|
7 | 7 | # Copyright (C) 2008 The IPython Development Team |
|
8 | 8 | # |
|
9 | 9 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | 10 | # the file COPYING, distributed as part of this software. |
|
11 | 11 | #------------------------------------------------------------------------------- |
|
12 | 12 | |
|
13 | 13 | #------------------------------------------------------------------------------- |
|
14 | 14 | # Imports |
|
15 | 15 | #------------------------------------------------------------------------------- |
|
16 | 16 | |
|
17 |
import sys |
|
|
17 | import sys | |
|
18 | 18 | from textwrap import fill |
|
19 | 19 | |
|
20 | 20 | display_status=True |
|
21 | 21 | |
|
22 | 22 | def check_display(f): |
|
23 | 23 | """decorator to allow display methods to be muted by mod.display_status""" |
|
24 | 24 | def maybe_display(*args, **kwargs): |
|
25 | 25 | if display_status: |
|
26 | 26 | return f(*args, **kwargs) |
|
27 | 27 | return maybe_display |
|
28 | 28 | |
|
29 | 29 | @check_display |
|
30 | 30 | def print_line(char='='): |
|
31 | 31 | print(char * 76) |
|
32 | 32 | |
|
33 | 33 | @check_display |
|
34 | 34 | def print_status(package, status): |
|
35 | 35 | initial_indent = "%22s: " % package |
|
36 | 36 | indent = ' ' * 24 |
|
37 | 37 | print(fill(str(status), width=76, |
|
38 | 38 | initial_indent=initial_indent, |
|
39 | 39 | subsequent_indent=indent)) |
|
40 | 40 | |
|
41 | 41 | @check_display |
|
42 | 42 | def print_message(message): |
|
43 | 43 | indent = ' ' * 24 + "* " |
|
44 | 44 | print(fill(str(message), width=76, |
|
45 | 45 | initial_indent=indent, |
|
46 | 46 | subsequent_indent=indent)) |
|
47 | 47 | |
|
48 | 48 | @check_display |
|
49 | 49 | def print_raw(section): |
|
50 | 50 | print(section) |
|
51 | 51 | |
|
52 | 52 | #------------------------------------------------------------------------------- |
|
53 | 53 | # Tests for specific packages |
|
54 | 54 | #------------------------------------------------------------------------------- |
|
55 | 55 | |
|
56 | 56 | def check_for_ipython(): |
|
57 | 57 | try: |
|
58 | 58 | import IPython |
|
59 | 59 | except ImportError: |
|
60 | 60 | print_status("IPython", "Not found") |
|
61 | 61 | return False |
|
62 | 62 | else: |
|
63 | 63 | print_status("IPython", IPython.__version__) |
|
64 | 64 | return True |
|
65 | 65 | |
|
66 | 66 | def check_for_sphinx(): |
|
67 | 67 | try: |
|
68 | 68 | import sphinx |
|
69 | 69 | except ImportError: |
|
70 | 70 | print_status('sphinx', "Not found (required for docs and nbconvert)") |
|
71 | 71 | return False |
|
72 | 72 | else: |
|
73 | 73 | print_status('sphinx', sphinx.__version__) |
|
74 | 74 | return True |
|
75 | 75 | |
|
76 | 76 | def check_for_pygments(): |
|
77 | 77 | try: |
|
78 | 78 | import pygments |
|
79 | 79 | except ImportError: |
|
80 | 80 | print_status('pygments', "Not found (required for docs and nbconvert)") |
|
81 | 81 | return False |
|
82 | 82 | else: |
|
83 | 83 | print_status('pygments', pygments.__version__) |
|
84 | 84 | return True |
|
85 | 85 | |
|
86 | 86 | def check_for_jinja2(): |
|
87 | 87 | try: |
|
88 | 88 | import jinja2 |
|
89 | 89 | except ImportError: |
|
90 | 90 | print_status('jinja2', "Not found (required for notebook and nbconvert)") |
|
91 | 91 | return False |
|
92 | 92 | else: |
|
93 | 93 | print_status('jinja2', jinja2.__version__) |
|
94 | 94 | return True |
|
95 | 95 | |
|
96 | 96 | def check_for_nose(): |
|
97 | 97 | try: |
|
98 | 98 | import nose |
|
99 | 99 | except ImportError: |
|
100 | 100 | print_status('nose', "Not found (required for running the test suite)") |
|
101 | 101 | return False |
|
102 | 102 | else: |
|
103 | 103 | print_status('nose', nose.__version__) |
|
104 | 104 | return True |
|
105 | 105 | |
|
106 | 106 | def check_for_pexpect(): |
|
107 | 107 | try: |
|
108 | 108 | import pexpect |
|
109 | 109 | except ImportError: |
|
110 | 110 | print_status("pexpect", "no (will use bundled version in IPython.external)") |
|
111 | 111 | return False |
|
112 | 112 | else: |
|
113 | 113 | print_status("pexpect", pexpect.__version__) |
|
114 | 114 | return True |
|
115 | 115 | |
|
116 | 116 | def check_for_pyzmq(): |
|
117 | 117 | try: |
|
118 | 118 | import zmq |
|
119 | 119 | except ImportError: |
|
120 | 120 | print_status('pyzmq', "no (required for qtconsole, notebook, and parallel computing capabilities)") |
|
121 | 121 | return False |
|
122 | 122 | else: |
|
123 | 123 | # pyzmq 2.1.10 adds pyzmq_version_info funtion for returning |
|
124 | 124 | # version as a tuple |
|
125 | 125 | if hasattr(zmq, 'pyzmq_version_info') and zmq.pyzmq_version_info() >= (2,1,11): |
|
126 | 126 | print_status("pyzmq", zmq.__version__) |
|
127 | 127 | return True |
|
128 | 128 | else: |
|
129 | 129 | print_status('pyzmq', "no (have %s, but require >= 2.1.11 for" |
|
130 | 130 | " qtconsole, notebook, and parallel computing capabilities)" % zmq.__version__) |
|
131 | 131 | return False |
|
132 | 132 | |
|
133 | 133 | def check_for_tornado(): |
|
134 | 134 | try: |
|
135 | 135 | import tornado |
|
136 | 136 | except ImportError: |
|
137 | 137 | print_status('tornado', "no (required for notebook)") |
|
138 | 138 | return False |
|
139 | 139 | else: |
|
140 | 140 | if getattr(tornado, 'version_info', (0,)) < (3,1): |
|
141 | 141 | print_status('tornado', "no (have %s, but require >= 3.1.0)" % tornado.version) |
|
142 | 142 | return False |
|
143 | 143 | else: |
|
144 | 144 | print_status('tornado', tornado.version) |
|
145 | 145 | return True |
|
146 | 146 | |
|
147 | 147 | def check_for_readline(): |
|
148 | 148 | from distutils.version import LooseVersion |
|
149 | 149 | readline = None |
|
150 | 150 | try: |
|
151 | 151 | import gnureadline as readline |
|
152 | 152 | except ImportError: |
|
153 | 153 | pass |
|
154 | 154 | if readline is None: |
|
155 | 155 | try: |
|
156 | 156 | import readline |
|
157 | 157 | except ImportError: |
|
158 | 158 | pass |
|
159 | 159 | if readline is None: |
|
160 | 160 | try: |
|
161 | 161 | import pyreadline |
|
162 | 162 | vs = pyreadline.release.version |
|
163 | 163 | except (ImportError, AttributeError): |
|
164 | 164 | print_status('readline', "no (required for good interactive behavior)") |
|
165 | 165 | return False |
|
166 | 166 | if LooseVersion(vs).version >= [1,7,1]: |
|
167 | 167 | print_status('readline', "yes pyreadline-" + vs) |
|
168 | 168 | return True |
|
169 | 169 | else: |
|
170 | 170 | print_status('readline', "no pyreadline-%s < 1.7.1" % vs) |
|
171 | 171 | return False |
|
172 | 172 | else: |
|
173 | 173 | if sys.platform == 'darwin' and 'libedit' in readline.__doc__: |
|
174 | 174 | print_status('readline', "no (libedit detected)") |
|
175 | 175 | return False |
|
176 | 176 | print_status('readline', "yes") |
|
177 | 177 | return True |
@@ -1,213 +1,211 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | """Simple tools to query github.com and gather stats about issues. |
|
3 | 3 | |
|
4 | 4 | To generate a report for IPython 2.0, run: |
|
5 | 5 | |
|
6 | 6 | python github_stats.py --milestone 2.0 --since-tag rel-1.0.0 |
|
7 | 7 | """ |
|
8 | 8 | #----------------------------------------------------------------------------- |
|
9 | 9 | # Imports |
|
10 | 10 | #----------------------------------------------------------------------------- |
|
11 | 11 | |
|
12 | 12 | from __future__ import print_function |
|
13 | 13 | |
|
14 | 14 | import codecs |
|
15 | import json | |
|
16 | import re | |
|
17 | 15 | import sys |
|
18 | 16 | |
|
19 | 17 | from argparse import ArgumentParser |
|
20 | 18 | from datetime import datetime, timedelta |
|
21 | 19 | from subprocess import check_output |
|
22 | 20 | from gh_api import ( |
|
23 | 21 | get_paged_request, make_auth_header, get_pull_request, is_pull_request, |
|
24 | 22 | get_milestone_id, get_issues_list, |
|
25 | 23 | ) |
|
26 | 24 | #----------------------------------------------------------------------------- |
|
27 | 25 | # Globals |
|
28 | 26 | #----------------------------------------------------------------------------- |
|
29 | 27 | |
|
30 | 28 | ISO8601 = "%Y-%m-%dT%H:%M:%SZ" |
|
31 | 29 | PER_PAGE = 100 |
|
32 | 30 | |
|
33 | 31 | #----------------------------------------------------------------------------- |
|
34 | 32 | # Functions |
|
35 | 33 | #----------------------------------------------------------------------------- |
|
36 | 34 | |
|
37 | 35 | def round_hour(dt): |
|
38 | 36 | return dt.replace(minute=0,second=0,microsecond=0) |
|
39 | 37 | |
|
40 | 38 | def _parse_datetime(s): |
|
41 | 39 | """Parse dates in the format returned by the Github API.""" |
|
42 | 40 | if s: |
|
43 | 41 | return datetime.strptime(s, ISO8601) |
|
44 | 42 | else: |
|
45 | 43 | return datetime.fromtimestamp(0) |
|
46 | 44 | |
|
47 | 45 | def issues2dict(issues): |
|
48 | 46 | """Convert a list of issues to a dict, keyed by issue number.""" |
|
49 | 47 | idict = {} |
|
50 | 48 | for i in issues: |
|
51 | 49 | idict[i['number']] = i |
|
52 | 50 | return idict |
|
53 | 51 | |
|
54 | 52 | def split_pulls(all_issues, project="ipython/ipython"): |
|
55 | 53 | """split a list of closed issues into non-PR Issues and Pull Requests""" |
|
56 | 54 | pulls = [] |
|
57 | 55 | issues = [] |
|
58 | 56 | for i in all_issues: |
|
59 | 57 | if is_pull_request(i): |
|
60 | 58 | pull = get_pull_request(project, i['number'], auth=True) |
|
61 | 59 | pulls.append(pull) |
|
62 | 60 | else: |
|
63 | 61 | issues.append(i) |
|
64 | 62 | return issues, pulls |
|
65 | 63 | |
|
66 | 64 | |
|
67 | 65 | def issues_closed_since(period=timedelta(days=365), project="ipython/ipython", pulls=False): |
|
68 | 66 | """Get all issues closed since a particular point in time. period |
|
69 | 67 | can either be a datetime object, or a timedelta object. In the |
|
70 | 68 | latter case, it is used as a time before the present. |
|
71 | 69 | """ |
|
72 | 70 | |
|
73 | 71 | which = 'pulls' if pulls else 'issues' |
|
74 | 72 | |
|
75 | 73 | if isinstance(period, timedelta): |
|
76 | 74 | since = round_hour(datetime.utcnow() - period) |
|
77 | 75 | else: |
|
78 | 76 | since = period |
|
79 | 77 | url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, since.strftime(ISO8601), PER_PAGE) |
|
80 | 78 | allclosed = get_paged_request(url, headers=make_auth_header()) |
|
81 | 79 | |
|
82 | 80 | filtered = [ i for i in allclosed if _parse_datetime(i['closed_at']) > since ] |
|
83 | 81 | if pulls: |
|
84 | 82 | filtered = [ i for i in filtered if _parse_datetime(i['merged_at']) > since ] |
|
85 | 83 | # filter out PRs not against master (backports) |
|
86 | 84 | filtered = [ i for i in filtered if i['base']['ref'] == 'master' ] |
|
87 | 85 | else: |
|
88 | 86 | filtered = [ i for i in filtered if not is_pull_request(i) ] |
|
89 | 87 | |
|
90 | 88 | return filtered |
|
91 | 89 | |
|
92 | 90 | |
|
93 | 91 | def sorted_by_field(issues, field='closed_at', reverse=False): |
|
94 | 92 | """Return a list of issues sorted by closing date date.""" |
|
95 | 93 | return sorted(issues, key = lambda i:i[field], reverse=reverse) |
|
96 | 94 | |
|
97 | 95 | |
|
98 | 96 | def report(issues, show_urls=False): |
|
99 | 97 | """Summary report about a list of issues, printing number and title. |
|
100 | 98 | """ |
|
101 | 99 | # titles may have unicode in them, so we must encode everything below |
|
102 | 100 | if show_urls: |
|
103 | 101 | for i in issues: |
|
104 | 102 | role = 'ghpull' if 'merged_at' in i else 'ghissue' |
|
105 | 103 | print('* :%s:`%d`: %s' % (role, i['number'], |
|
106 | 104 | i['title'].encode('utf-8'))) |
|
107 | 105 | else: |
|
108 | 106 | for i in issues: |
|
109 | 107 | print('* %d: %s' % (i['number'], i['title'].encode('utf-8'))) |
|
110 | 108 | |
|
111 | 109 | #----------------------------------------------------------------------------- |
|
112 | 110 | # Main script |
|
113 | 111 | #----------------------------------------------------------------------------- |
|
114 | 112 | |
|
115 | 113 | if __name__ == "__main__": |
|
116 | 114 | # deal with unicode |
|
117 | 115 | sys.stdout = codecs.getwriter('utf8')(sys.stdout) |
|
118 | 116 | |
|
119 | 117 | # Whether to add reST urls for all issues in printout. |
|
120 | 118 | show_urls = True |
|
121 | 119 | |
|
122 | 120 | parser = ArgumentParser() |
|
123 | 121 | parser.add_argument('--since-tag', type=str, |
|
124 | 122 | help="The git tag to use for the starting point (typically the last major release)." |
|
125 | 123 | ) |
|
126 | 124 | parser.add_argument('--milestone', type=str, |
|
127 | 125 | help="The GitHub milestone to use for filtering issues [optional]." |
|
128 | 126 | ) |
|
129 | 127 | parser.add_argument('--days', type=int, |
|
130 | 128 | help="The number of days of data to summarize (use this or --since-tag)." |
|
131 | 129 | ) |
|
132 | 130 | parser.add_argument('--project', type=str, default="ipython/ipython", |
|
133 | 131 | help="The project to summarize." |
|
134 | 132 | ) |
|
135 | 133 | |
|
136 | 134 | opts = parser.parse_args() |
|
137 | 135 | tag = opts.since_tag |
|
138 | 136 | |
|
139 | 137 | # set `since` from days or git tag |
|
140 | 138 | if opts.days: |
|
141 | 139 | since = datetime.utcnow() - timedelta(days=opts.days) |
|
142 | 140 | else: |
|
143 | 141 | if not tag: |
|
144 | 142 | tag = check_output(['git', 'describe', '--abbrev=0']).strip() |
|
145 | 143 | cmd = ['git', 'log', '-1', '--format=%ai', tag] |
|
146 | 144 | tagday, tz = check_output(cmd).strip().rsplit(' ', 1) |
|
147 | 145 | since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S") |
|
148 | 146 | h = int(tz[1:3]) |
|
149 | 147 | m = int(tz[3:]) |
|
150 | 148 | td = timedelta(hours=h, minutes=m) |
|
151 | 149 | if tz[0] == '-': |
|
152 | 150 | since += td |
|
153 | 151 | else: |
|
154 | 152 | since -= td |
|
155 | 153 | |
|
156 | 154 | since = round_hour(since) |
|
157 | 155 | |
|
158 | 156 | milestone = opts.milestone |
|
159 | 157 | project = opts.project |
|
160 | 158 | |
|
161 | 159 | print("fetching GitHub stats since %s (tag: %s, milestone: %s)" % (since, tag, milestone), file=sys.stderr) |
|
162 | 160 | if milestone: |
|
163 | 161 | milestone_id = get_milestone_id(project=project, milestone=milestone, |
|
164 | 162 | auth=True) |
|
165 | 163 | issues = get_issues_list(project=project, |
|
166 | 164 | milestone=milestone_id, |
|
167 | 165 | state='closed', |
|
168 | 166 | auth=True, |
|
169 | 167 | ) |
|
170 | 168 | else: |
|
171 | 169 | issues = issues_closed_since(since, project=project, pulls=False) |
|
172 | 170 | pulls = issues_closed_since(since, project=project, pulls=True) |
|
173 | 171 | |
|
174 | 172 | # For regular reports, it's nice to show them in reverse chronological order |
|
175 | 173 | issues = sorted_by_field(issues, reverse=True) |
|
176 | 174 | pulls = sorted_by_field(pulls, reverse=True) |
|
177 | 175 | |
|
178 | 176 | n_issues, n_pulls = map(len, (issues, pulls)) |
|
179 | 177 | n_total = n_issues + n_pulls |
|
180 | 178 | |
|
181 | 179 | # Print summary report we can directly include into release notes. |
|
182 | 180 | |
|
183 | 181 | print() |
|
184 | 182 | since_day = since.strftime("%Y/%m/%d") |
|
185 | 183 | today = datetime.today().strftime("%Y/%m/%d") |
|
186 | 184 | print("GitHub stats for %s - %s (tag: %s)" % (since_day, today, tag)) |
|
187 | 185 | print() |
|
188 | 186 | print("These lists are automatically generated, and may be incomplete or contain duplicates.") |
|
189 | 187 | print() |
|
190 | 188 | if tag: |
|
191 | 189 | # print git info, in addition to GitHub info: |
|
192 | 190 | since_tag = tag+'..' |
|
193 | 191 | cmd = ['git', 'log', '--oneline', since_tag] |
|
194 | 192 | ncommits = len(check_output(cmd).splitlines()) |
|
195 | 193 | |
|
196 | 194 | author_cmd = ['git', 'log', '--use-mailmap', "--format=* %aN", since_tag] |
|
197 | 195 | all_authors = check_output(author_cmd).decode('utf-8', 'replace').splitlines() |
|
198 | 196 | unique_authors = sorted(set(all_authors), key=lambda s: s.lower()) |
|
199 | 197 | print("The following %i authors contributed %i commits." % (len(unique_authors), ncommits)) |
|
200 | 198 | print() |
|
201 | 199 | print('\n'.join(unique_authors)) |
|
202 | 200 | print() |
|
203 | 201 | |
|
204 | 202 | print() |
|
205 | 203 | print("We closed a total of %d issues, %d pull requests and %d regular issues;\n" |
|
206 | 204 | "this is the full list (generated with the script \n" |
|
207 | 205 | ":file:`tools/github_stats.py`):" % (n_total, n_pulls, n_issues)) |
|
208 | 206 | print() |
|
209 | 207 | print('Pull Requests (%d):\n' % n_pulls) |
|
210 | 208 | report(pulls, show_urls) |
|
211 | 209 | print() |
|
212 | 210 | print('Issues (%d):\n' % n_issues) |
|
213 | 211 | report(issues, show_urls) |
@@ -1,25 +1,23 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | """Simple script to create a tarball with proper git info. |
|
3 | 3 | """ |
|
4 | 4 | |
|
5 | 5 | import commands |
|
6 | 6 | import os |
|
7 | import sys | |
|
8 | import shutil | |
|
9 | 7 | |
|
10 |
from |
|
|
8 | from toollib import cd, sh | |
|
11 | 9 | |
|
12 | 10 | tag = commands.getoutput('git describe --tags') |
|
13 | 11 | base_name = 'ipython-%s' % tag |
|
14 | 12 | tar_name = '%s.tgz' % base_name |
|
15 | 13 | |
|
16 | 14 | # git archive is weird: Even if I give it a specific path, it still won't |
|
17 | 15 | # archive the whole tree. It seems the only way to get the whole tree is to cd |
|
18 | 16 | # to the top of the tree. There are long threads (since 2007) on the git list |
|
19 | 17 | # about this and it still doesn't work in a sensible way... |
|
20 | 18 | |
|
21 | 19 | start_dir = os.getcwdu() |
|
22 | 20 | cd('..') |
|
23 | 21 | git_tpl = 'git archive --format=tar --prefix={0}/ HEAD | gzip > {1}' |
|
24 | 22 | sh(git_tpl.format(base_name, tar_name)) |
|
25 | 23 | sh('mv {0} tools/'.format(tar_name)) |
@@ -1,79 +1,77 b'' | |||
|
1 | 1 | """Various utilities common to IPython release and maintenance tools. |
|
2 | 2 | """ |
|
3 | 3 | from __future__ import print_function |
|
4 | 4 | |
|
5 | 5 | # Library imports |
|
6 | 6 | import os |
|
7 | 7 | import sys |
|
8 | 8 | |
|
9 | from distutils.dir_util import remove_tree | |
|
10 | ||
|
11 | 9 | # Useful shorthands |
|
12 | 10 | pjoin = os.path.join |
|
13 | 11 | cd = os.chdir |
|
14 | 12 | |
|
15 | 13 | # Constants |
|
16 | 14 | |
|
17 | 15 | # SSH root address of the archive site |
|
18 | 16 | archive_user = 'ipython@archive.ipython.org' |
|
19 | 17 | archive_dir = 'archive.ipython.org' |
|
20 | 18 | archive = '%s:%s' % (archive_user, archive_dir) |
|
21 | 19 | |
|
22 | 20 | # Build commands |
|
23 | 21 | # Source dists |
|
24 | 22 | sdists = './setup.py sdist --formats=gztar,zip' |
|
25 | 23 | # Eggs |
|
26 | 24 | eggs = './setupegg.py bdist_egg' |
|
27 | 25 | |
|
28 | 26 | # Windows builds. |
|
29 | 27 | # We do them separately, so that the extra Windows scripts don't get pulled |
|
30 | 28 | # into Unix builds (setup.py has code which checks for bdist_wininst). Note |
|
31 | 29 | # that the install scripts args are added to the main distutils call in |
|
32 | 30 | # setup.py, so they don't need to be passed here. |
|
33 | 31 | # |
|
34 | 32 | # The Windows 64-bit installer can't be built by a Linux/Mac Python because ofa |
|
35 | 33 | # bug in distutils: http://bugs.python.org/issue6792. |
|
36 | 34 | # So we have to build it with a wine-installed native Windows Python... |
|
37 | 35 | win_builds = ["python setup.py bdist_wininst " |
|
38 | 36 | "--install-script=ipython_win_post_install.py", |
|
39 | 37 | r"%s/.wine/dosdevices/c\:/Python32/python.exe setup.py build " |
|
40 | 38 | "--plat-name=win-amd64 bdist_wininst " |
|
41 | 39 | "--install-script=ipython_win_post_install.py" % |
|
42 | 40 | os.environ['HOME'] ] |
|
43 | 41 | |
|
44 | 42 | # Utility functions |
|
45 | 43 | def sh(cmd): |
|
46 | 44 | """Run system command in shell, raise SystemExit if it returns an error.""" |
|
47 | 45 | print("$", cmd) |
|
48 | 46 | stat = os.system(cmd) |
|
49 | 47 | #stat = 0 # Uncomment this and comment previous to run in debug mode |
|
50 | 48 | if stat: |
|
51 | 49 | raise SystemExit("Command %s failed with code: %s" % (cmd, stat)) |
|
52 | 50 | |
|
53 | 51 | # Backwards compatibility |
|
54 | 52 | c = sh |
|
55 | 53 | |
|
56 | 54 | def get_ipdir(): |
|
57 | 55 | """Get IPython directory from command line, or assume it's the one above.""" |
|
58 | 56 | |
|
59 | 57 | # Initialize arguments and check location |
|
60 | 58 | try: |
|
61 | 59 | ipdir = sys.argv[1] |
|
62 | 60 | except IndexError: |
|
63 | 61 | ipdir = '..' |
|
64 | 62 | |
|
65 | 63 | ipdir = os.path.abspath(ipdir) |
|
66 | 64 | |
|
67 | 65 | cd(ipdir) |
|
68 | 66 | if not os.path.isdir('IPython') and os.path.isfile('setup.py'): |
|
69 | 67 | raise SystemExit('Invalid ipython directory: %s' % ipdir) |
|
70 | 68 | return ipdir |
|
71 | 69 | |
|
72 | 70 | |
|
73 | 71 | def compile_tree(): |
|
74 | 72 | """Compile all Python files below current directory.""" |
|
75 | 73 | stat = os.system('python -m compileall .') |
|
76 | 74 | if stat: |
|
77 | 75 | msg = '*** ERROR: Some Python files in tree do NOT compile! ***\n' |
|
78 | 76 | msg += 'See messages above for the actual file that produced it.\n' |
|
79 | 77 | raise SystemExit(msg) |
General Comments 0
You need to be logged in to leave comments.
Login now