Show More
The requested changes are too big and content was truncated. Show full diff
@@ -0,0 +1,26 b'' | |||
|
1 | name: Build docs | |
|
2 | ||
|
3 | on: [push, pull_request] | |
|
4 | ||
|
5 | jobs: | |
|
6 | build: | |
|
7 | runs-on: ubuntu-latest | |
|
8 | ||
|
9 | steps: | |
|
10 | - uses: actions/checkout@v2 | |
|
11 | - name: Set up Python 3.8 | |
|
12 | uses: actions/setup-python@v2 | |
|
13 | with: | |
|
14 | python-version: 3.8 | |
|
15 | - name: Install Graphviz | |
|
16 | run: | | |
|
17 | sudo apt-get update | |
|
18 | sudo apt-get install graphviz | |
|
19 | - name: Install Python dependencies | |
|
20 | run: | | |
|
21 | python -m pip install --upgrade pip setuptools | |
|
22 | pip install -r docs/requirements.txt | |
|
23 | - name: Build docs | |
|
24 | run: | | |
|
25 | python tools/fixup_whats_new_pr.py | |
|
26 | make -C docs/ html SPHINXOPTS="-W" |
@@ -0,0 +1,23 b'' | |||
|
1 | name: Run tests on OSX | |
|
2 | ||
|
3 | on: [push, pull_request] | |
|
4 | ||
|
5 | jobs: | |
|
6 | test: | |
|
7 | runs-on: macos-latest | |
|
8 | ||
|
9 | steps: | |
|
10 | - uses: actions/checkout@v2 | |
|
11 | - name: Set up Python 3.7 | |
|
12 | uses: actions/setup-python@v2 | |
|
13 | with: | |
|
14 | python-version: 3.7 | |
|
15 | - name: Install and update Python dependencies | |
|
16 | run: | | |
|
17 | python -m pip install --upgrade pip setuptools wheel | |
|
18 | python -m pip install --upgrade -e file://$PWD#egg=ipython[test] | |
|
19 | python -m pip install --upgrade --upgrade-strategy eager trio curio | |
|
20 | python -m pip install --upgrade pytest pytest-trio 'matplotlib!=3.2.0' | |
|
21 | python -m pip install --upgrade anyio | |
|
22 | - name: pytest | |
|
23 | run: pytest |
@@ -0,0 +1,36 b'' | |||
|
1 | name: Run tests | |
|
2 | ||
|
3 | on: [push, pull_request] | |
|
4 | ||
|
5 | jobs: | |
|
6 | test: | |
|
7 | runs-on: ubuntu-latest | |
|
8 | strategy: | |
|
9 | matrix: | |
|
10 | python-version: [3.7, 3.8, 3.9] | |
|
11 | ||
|
12 | steps: | |
|
13 | - uses: actions/checkout@v2 | |
|
14 | - name: Set up Python ${{ matrix.python-version }} | |
|
15 | uses: actions/setup-python@v2 | |
|
16 | with: | |
|
17 | python-version: ${{ matrix.python-version }} | |
|
18 | - name: Install and update Python dependencies | |
|
19 | run: | | |
|
20 | python -m pip install --upgrade pip setuptools wheel | |
|
21 | python -m pip install --upgrade -e file://$PWD#egg=ipython[test] | |
|
22 | python -m pip install --upgrade --upgrade-strategy eager trio curio | |
|
23 | python -m pip install --upgrade pytest pytest-trio 'matplotlib!=3.2.0' | |
|
24 | python -m pip install --upgrade check-manifest pytest-cov anyio | |
|
25 | - name: Check manifest | |
|
26 | run: check-manifest | |
|
27 | - name: iptest | |
|
28 | run: | | |
|
29 | cd /tmp && iptest --coverage xml && cd - | |
|
30 | cp /tmp/ipy_coverage.xml ./ | |
|
31 | cp /tmp/.coverage ./ | |
|
32 | - name: pytest | |
|
33 | run: | | |
|
34 | pytest | |
|
35 | - name: Upload coverage to Codecov | |
|
36 | uses: codecov/codecov-action@v1 |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,15 +1,21 b'' | |||
|
1 | 1 | # When making commits that are strictly formatting/style changes, add the |
|
2 | 2 | # commit hash here, so git blame can ignore the change. See docs for more |
|
3 | 3 | # details: |
|
4 | 4 | # https://git-scm.com/docs/git-config#Documentation/git-config.txt-blameignoreRevsFile |
|
5 | 5 | # |
|
6 | 6 | # |
|
7 | 7 | # You should be able to execute either |
|
8 | 8 | # ./tools/configure-git-blame-ignore-revs.bat or |
|
9 | 9 | # ./tools/configure-git-blame-ignore-revs.sh |
|
10 | 10 | # |
|
11 | 11 | # Example entries: |
|
12 | 12 | # |
|
13 | 13 | # <full commit hash> # initial black-format |
|
14 | 14 | # <full commit hash> # rename something internal |
|
15 | 15 | 6e748726282d1acb9a4f9f264ee679c474c4b8f5 # Apply pygrade --36plus on IPython/core/tests/test_inputtransformer.py. |
|
16 | 0233e65d8086d0ec34acb8685b7a5411633f0899 # apply pyupgrade to IPython/extensions/tests/test_autoreload.py | |
|
17 | a6a7e4dd7e51b892147895006d3a2a6c34b79ae6 # apply black to IPython/extensions/tests/test_autoreload.py | |
|
18 | c5ca5a8f25432dfd6b9eccbbe446a8348bf37cfa # apply pyupgrade to IPython/extensions/autoreload.py | |
|
19 | 50624b84ccdece781750f5eb635a9efbf2fe30d6 # apply black to IPython/extensions/autoreload.py | |
|
20 | b7aaa47412b96379198705955004930c57f9d74a # apply pyupgrade to IPython/extensions/autoreload.py | |
|
21 | 9c7476a88af3e567426b412f1b3c778401d8f6aa # apply black to IPython/extensions/autoreload.py |
@@ -1,19 +1,16 b'' | |||
|
1 | 1 | --- |
|
2 | 2 | name: Bug report / Question / Feature |
|
3 | 3 | about: Anything related to IPython itsel |
|
4 | 4 | title: '' |
|
5 | 5 | labels: '' |
|
6 | 6 | assignees: '' |
|
7 | 7 | |
|
8 | 8 | --- |
|
9 | 9 | |
|
10 | 10 | <!-- This is the repository for IPython command line, if you can try to make sure this question/bug/feature belong here and not on one of the Jupyter repositories. |
|
11 | 11 | |
|
12 | 12 | If it's a generic Python/Jupyter question, try other forums or discourse.jupyter.org. |
|
13 | 13 | |
|
14 | 14 | If you are unsure, it's ok to post here, though, there are few maintainer so you might not get a fast response. |
|
15 | 15 | |
|
16 | Ability of maintainers to spend time and resources on project like IPython is heavily influenced by US politics, and the current government policies have been harmful to the IPython Maintainers and Community. | |
|
17 | ||
|
18 | If you are on the fence on who to vote for or wether to vote, please cast your vote in for the democrat party in the US. | |
|
19 | 16 | --> |
@@ -1,33 +1,34 b'' | |||
|
1 | 1 | name: Run MyPy |
|
2 | 2 | |
|
3 | 3 | on: |
|
4 | 4 | push: |
|
5 | branches: [ master ] | |
|
5 | branches: [ master, 7.x] | |
|
6 | 6 | pull_request: |
|
7 | branches: [ master ] | |
|
7 | branches: [ master, 7.x] | |
|
8 | 8 | |
|
9 | 9 | jobs: |
|
10 | 10 | build: |
|
11 | 11 | |
|
12 | 12 | runs-on: ubuntu-latest |
|
13 | 13 | strategy: |
|
14 | 14 | matrix: |
|
15 | 15 | python-version: [3.8] |
|
16 | 16 | |
|
17 | 17 | steps: |
|
18 | 18 | - uses: actions/checkout@v2 |
|
19 | 19 | - name: Set up Python ${{ matrix.python-version }} |
|
20 | 20 | uses: actions/setup-python@v2 |
|
21 | 21 | with: |
|
22 | 22 | python-version: ${{ matrix.python-version }} |
|
23 | 23 | - name: Install dependencies |
|
24 | 24 | run: | |
|
25 | 25 | python -m pip install --upgrade pip |
|
26 | 26 | pip install mypy pyflakes flake8 |
|
27 | 27 | - name: Lint with mypy |
|
28 | 28 | run: | |
|
29 |
mypy IPython |
|
|
30 |
mypy IPython |
|
|
29 | mypy -p IPython.terminal | |
|
30 | mypy -p IPython.core.magics | |
|
31 | 31 | - name: Lint with pyflakes |
|
32 | 32 | run: | |
|
33 | 33 | flake8 IPython/core/magics/script.py |
|
34 | flake8 IPython/core/magics/packaging.py |
@@ -1,39 +1,39 b'' | |||
|
1 | 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions |
|
2 | 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions |
|
3 | 3 | |
|
4 | 4 | name: Python package |
|
5 | 5 | |
|
6 | 6 | on: |
|
7 | 7 | push: |
|
8 | branches: [ master ] | |
|
8 | branches: [ master, 7.x ] | |
|
9 | 9 | pull_request: |
|
10 | branches: [ master ] | |
|
10 | branches: [ master, 7.x ] | |
|
11 | 11 | |
|
12 | 12 | jobs: |
|
13 | 13 | build: |
|
14 | 14 | |
|
15 | 15 | runs-on: ubuntu-latest |
|
16 | 16 | strategy: |
|
17 | 17 | matrix: |
|
18 | 18 | python-version: [3.8] |
|
19 | 19 | |
|
20 | 20 | steps: |
|
21 | 21 | - uses: actions/checkout@v2 |
|
22 | 22 | with: |
|
23 | 23 | fetch-depth: 0 |
|
24 | 24 | - name: Set up Python ${{ matrix.python-version }} |
|
25 | 25 | uses: actions/setup-python@v2 |
|
26 | 26 | with: |
|
27 | 27 | python-version: ${{ matrix.python-version }} |
|
28 | 28 | - name: Install dependencies |
|
29 | 29 | run: | |
|
30 | 30 | python -m pip install --upgrade pip |
|
31 | pip install darker | |
|
31 | pip install darker isort | |
|
32 | 32 | - name: Lint with darker |
|
33 | 33 | run: | |
|
34 | 34 | darker -r 60625f241f298b5039cb2debc365db38aa7bb522 --check --diff . || ( |
|
35 | 35 | echo "Changes need auto-formatting. Run:" |
|
36 | 36 | echo " darker -r 60625f241f298b5039cb2debc365db38aa7bb522" |
|
37 | 37 | echo "then commit and push changes to fix." |
|
38 | 38 | exit 1 |
|
39 | 39 | ) |
@@ -1,30 +1,32 b'' | |||
|
1 | 1 | MANIFEST |
|
2 | 2 | build |
|
3 | 3 | dist |
|
4 | 4 | _build |
|
5 | 5 | docs/man/*.gz |
|
6 | 6 | docs/source/api/generated |
|
7 | 7 | docs/source/config/options |
|
8 | 8 | docs/source/config/shortcuts/*.csv |
|
9 | docs/source/savefig | |
|
9 | 10 | docs/source/interactive/magics-generated.txt |
|
10 | 11 | docs/gh-pages |
|
11 | 12 | jupyter_notebook/notebook/static/mathjax |
|
12 | 13 | jupyter_notebook/static/style/*.map |
|
13 | 14 | *.py[co] |
|
14 | 15 | __pycache__ |
|
15 | 16 | *.egg-info |
|
16 | 17 | *~ |
|
17 | 18 | *.bak |
|
18 | 19 | .ipynb_checkpoints |
|
19 | 20 | .tox |
|
20 | 21 | .DS_Store |
|
21 | 22 | \#*# |
|
22 | 23 | .#* |
|
23 | 24 | .cache |
|
24 | 25 | .coverage |
|
25 | 26 | *.swp |
|
26 | 27 | .vscode |
|
27 | 28 | .pytest_cache |
|
28 | 29 | .python-version |
|
29 | 30 | venv*/ |
|
30 | 31 | .idea/ |
|
32 | .mypy_cache/ |
@@ -1,152 +1,149 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """ |
|
3 | 3 | IPython: tools for interactive and parallel computing in Python. |
|
4 | 4 | |
|
5 | 5 | https://ipython.org |
|
6 | 6 | """ |
|
7 | 7 | #----------------------------------------------------------------------------- |
|
8 | 8 | # Copyright (c) 2008-2011, IPython Development Team. |
|
9 | 9 | # Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu> |
|
10 | 10 | # Copyright (c) 2001, Janko Hauser <jhauser@zscout.de> |
|
11 | 11 | # Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu> |
|
12 | 12 | # |
|
13 | 13 | # Distributed under the terms of the Modified BSD License. |
|
14 | 14 | # |
|
15 | 15 | # The full license is in the file COPYING.txt, distributed with this software. |
|
16 | 16 | #----------------------------------------------------------------------------- |
|
17 | 17 | |
|
18 | 18 | #----------------------------------------------------------------------------- |
|
19 | 19 | # Imports |
|
20 | 20 | #----------------------------------------------------------------------------- |
|
21 | 21 | |
|
22 | 22 | import os |
|
23 | 23 | import sys |
|
24 | 24 | |
|
25 | 25 | #----------------------------------------------------------------------------- |
|
26 | 26 | # Setup everything |
|
27 | 27 | #----------------------------------------------------------------------------- |
|
28 | 28 | |
|
29 | 29 | # Don't forget to also update setup.py when this changes! |
|
30 | 30 | if sys.version_info < (3, 6): |
|
31 | 31 | raise ImportError( |
|
32 | 32 | """ |
|
33 | 33 | IPython 7.10+ supports Python 3.6 and above. |
|
34 | 34 | When using Python 2.7, please install IPython 5.x LTS Long Term Support version. |
|
35 | 35 | Python 3.3 and 3.4 were supported up to IPython 6.x. |
|
36 | 36 | Python 3.5 was supported with IPython 7.0 to 7.9. |
|
37 | 37 | |
|
38 | 38 | See IPython `README.rst` file for more information: |
|
39 | 39 | |
|
40 | 40 | https://github.com/ipython/ipython/blob/master/README.rst |
|
41 | 41 | |
|
42 | 42 | """) |
|
43 | 43 | |
|
44 | 44 | # Make it easy to import extensions - they are always directly on pythonpath. |
|
45 | 45 | # Therefore, non-IPython modules can be added to extensions directory. |
|
46 | 46 | # This should probably be in ipapp.py. |
|
47 | 47 | sys.path.append(os.path.join(os.path.dirname(__file__), "extensions")) |
|
48 | 48 | |
|
49 | 49 | #----------------------------------------------------------------------------- |
|
50 | 50 | # Setup the top level names |
|
51 | 51 | #----------------------------------------------------------------------------- |
|
52 | 52 | |
|
53 | 53 | from .core.getipython import get_ipython |
|
54 | 54 | from .core import release |
|
55 | 55 | from .core.application import Application |
|
56 | 56 | from .terminal.embed import embed |
|
57 | 57 | |
|
58 | 58 | from .core.interactiveshell import InteractiveShell |
|
59 | 59 | from .testing import test |
|
60 | 60 | from .utils.sysinfo import sys_info |
|
61 | 61 | from .utils.frame import extract_module_locals |
|
62 | 62 | |
|
63 | 63 | # Release data |
|
64 | 64 | __author__ = '%s <%s>' % (release.author, release.author_email) |
|
65 | 65 | __license__ = release.license |
|
66 | 66 | __version__ = release.version |
|
67 | 67 | version_info = release.version_info |
|
68 | 68 | |
|
69 | 69 | def embed_kernel(module=None, local_ns=None, **kwargs): |
|
70 | 70 | """Embed and start an IPython kernel in a given scope. |
|
71 | 71 | |
|
72 | 72 | If you don't want the kernel to initialize the namespace |
|
73 | 73 | from the scope of the surrounding function, |
|
74 | 74 | and/or you want to load full IPython configuration, |
|
75 | 75 | you probably want `IPython.start_kernel()` instead. |
|
76 | 76 | |
|
77 | 77 | Parameters |
|
78 | 78 | ---------- |
|
79 | 79 | module : types.ModuleType, optional |
|
80 | 80 | The module to load into IPython globals (default: caller) |
|
81 | 81 | local_ns : dict, optional |
|
82 | 82 | The namespace to load into IPython user namespace (default: caller) |
|
83 | ||
|
84 | kwargs : various, optional | |
|
83 | **kwargs : various, optional | |
|
85 | 84 | Further keyword args are relayed to the IPKernelApp constructor, |
|
86 | 85 | allowing configuration of the Kernel. Will only have an effect |
|
87 | 86 | on the first embed_kernel call for a given process. |
|
88 | 87 | """ |
|
89 | 88 | |
|
90 | 89 | (caller_module, caller_locals) = extract_module_locals(1) |
|
91 | 90 | if module is None: |
|
92 | 91 | module = caller_module |
|
93 | 92 | if local_ns is None: |
|
94 | 93 | local_ns = caller_locals |
|
95 | 94 | |
|
96 | 95 | # Only import .zmq when we really need it |
|
97 | 96 | from ipykernel.embed import embed_kernel as real_embed_kernel |
|
98 | 97 | real_embed_kernel(module=module, local_ns=local_ns, **kwargs) |
|
99 | 98 | |
|
100 | 99 | def start_ipython(argv=None, **kwargs): |
|
101 | 100 | """Launch a normal IPython instance (as opposed to embedded) |
|
102 | 101 | |
|
103 | 102 | `IPython.embed()` puts a shell in a particular calling scope, |
|
104 | 103 | such as a function or method for debugging purposes, |
|
105 | 104 | which is often not desirable. |
|
106 | 105 | |
|
107 | 106 | `start_ipython()` does full, regular IPython initialization, |
|
108 | 107 | including loading startup files, configuration, etc. |
|
109 | 108 | much of which is skipped by `embed()`. |
|
110 | 109 | |
|
111 | 110 | This is a public API method, and will survive implementation changes. |
|
112 | 111 | |
|
113 | 112 | Parameters |
|
114 | 113 | ---------- |
|
115 | ||
|
116 | 114 | argv : list or None, optional |
|
117 | 115 | If unspecified or None, IPython will parse command-line options from sys.argv. |
|
118 | 116 | To prevent any command-line parsing, pass an empty list: `argv=[]`. |
|
119 | 117 | user_ns : dict, optional |
|
120 | 118 | specify this dictionary to initialize the IPython user namespace with particular values. |
|
121 | kwargs : various, optional | |
|
119 | **kwargs : various, optional | |
|
122 | 120 | Any other kwargs will be passed to the Application constructor, |
|
123 | 121 | such as `config`. |
|
124 | 122 | """ |
|
125 | 123 | from IPython.terminal.ipapp import launch_new_instance |
|
126 | 124 | return launch_new_instance(argv=argv, **kwargs) |
|
127 | 125 | |
|
128 | 126 | def start_kernel(argv=None, **kwargs): |
|
129 | 127 | """Launch a normal IPython kernel instance (as opposed to embedded) |
|
130 | 128 | |
|
131 | 129 | `IPython.embed_kernel()` puts a shell in a particular calling scope, |
|
132 | 130 | such as a function or method for debugging purposes, |
|
133 | 131 | which is often not desirable. |
|
134 | 132 | |
|
135 | 133 | `start_kernel()` does full, regular IPython initialization, |
|
136 | 134 | including loading startup files, configuration, etc. |
|
137 | 135 | much of which is skipped by `embed()`. |
|
138 | 136 | |
|
139 | 137 | Parameters |
|
140 | 138 | ---------- |
|
141 | ||
|
142 | 139 | argv : list or None, optional |
|
143 | 140 | If unspecified or None, IPython will parse command-line options from sys.argv. |
|
144 | 141 | To prevent any command-line parsing, pass an empty list: `argv=[]`. |
|
145 | 142 | user_ns : dict, optional |
|
146 | 143 | specify this dictionary to initialize the IPython user namespace with particular values. |
|
147 | kwargs : various, optional | |
|
144 | **kwargs : various, optional | |
|
148 | 145 | Any other kwargs will be passed to the Application constructor, |
|
149 | 146 | such as `config`. |
|
150 | 147 | """ |
|
151 | 148 | from IPython.kernel.zmq.kernelapp import launch_new_instance |
|
152 | 149 | return launch_new_instance(argv=argv, **kwargs) |
@@ -1,464 +1,486 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """ |
|
3 | 3 | An application for IPython. |
|
4 | 4 | |
|
5 | 5 | All top-level applications should use the classes in this module for |
|
6 | 6 | handling configuration and creating configurables. |
|
7 | 7 | |
|
8 | 8 | The job of an :class:`Application` is to create the master configuration |
|
9 | 9 | object and then create the configurable objects, passing the config to them. |
|
10 | 10 | """ |
|
11 | 11 | |
|
12 | 12 | # Copyright (c) IPython Development Team. |
|
13 | 13 | # Distributed under the terms of the Modified BSD License. |
|
14 | 14 | |
|
15 | 15 | import atexit |
|
16 | 16 | from copy import deepcopy |
|
17 | 17 | import glob |
|
18 | 18 | import logging |
|
19 | 19 | import os |
|
20 | 20 | import shutil |
|
21 | 21 | import sys |
|
22 | 22 | |
|
23 | 23 | from pathlib import Path |
|
24 | 24 | |
|
25 | 25 | from traitlets.config.application import Application, catch_config_error |
|
26 | 26 | from traitlets.config.loader import ConfigFileNotFound, PyFileConfigLoader |
|
27 | 27 | from IPython.core import release, crashhandler |
|
28 | 28 | from IPython.core.profiledir import ProfileDir, ProfileDirError |
|
29 | 29 | from IPython.paths import get_ipython_dir, get_ipython_package_dir |
|
30 | 30 | from IPython.utils.path import ensure_dir_exists |
|
31 | 31 | from traitlets import ( |
|
32 | 32 | List, Unicode, Type, Bool, Set, Instance, Undefined, |
|
33 | 33 | default, observe, |
|
34 | 34 | ) |
|
35 | 35 | |
|
36 | 36 | if os.name == "nt": |
|
37 | 37 | programdata = Path(os.environ.get("PROGRAMDATA", None)) |
|
38 | 38 | if programdata: |
|
39 | 39 | SYSTEM_CONFIG_DIRS = [str(programdata / "ipython")] |
|
40 | 40 | else: # PROGRAMDATA is not defined by default on XP. |
|
41 | 41 | SYSTEM_CONFIG_DIRS = [] |
|
42 | 42 | else: |
|
43 | 43 | SYSTEM_CONFIG_DIRS = [ |
|
44 | 44 | "/usr/local/etc/ipython", |
|
45 | 45 | "/etc/ipython", |
|
46 | 46 | ] |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | ENV_CONFIG_DIRS = [] |
|
50 | 50 | _env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython') |
|
51 | 51 | if _env_config_dir not in SYSTEM_CONFIG_DIRS: |
|
52 | 52 | # only add ENV_CONFIG if sys.prefix is not already included |
|
53 | 53 | ENV_CONFIG_DIRS.append(_env_config_dir) |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | _envvar = os.environ.get('IPYTHON_SUPPRESS_CONFIG_ERRORS') |
|
57 | 57 | if _envvar in {None, ''}: |
|
58 | 58 | IPYTHON_SUPPRESS_CONFIG_ERRORS = None |
|
59 | 59 | else: |
|
60 | 60 | if _envvar.lower() in {'1','true'}: |
|
61 | 61 | IPYTHON_SUPPRESS_CONFIG_ERRORS = True |
|
62 | 62 | elif _envvar.lower() in {'0','false'} : |
|
63 | 63 | IPYTHON_SUPPRESS_CONFIG_ERRORS = False |
|
64 | 64 | else: |
|
65 | 65 | sys.exit("Unsupported value for environment variable: 'IPYTHON_SUPPRESS_CONFIG_ERRORS' is set to '%s' which is none of {'0', '1', 'false', 'true', ''}."% _envvar ) |
|
66 | 66 | |
|
67 | 67 | # aliases and flags |
|
68 | 68 | |
|
69 | base_aliases = { | |
|
70 | 'profile-dir' : 'ProfileDir.location', | |
|
71 | 'profile' : 'BaseIPythonApplication.profile', | |
|
72 | 'ipython-dir' : 'BaseIPythonApplication.ipython_dir', | |
|
73 | 'log-level' : 'Application.log_level', | |
|
74 | 'config' : 'BaseIPythonApplication.extra_config_file', | |
|
69 | base_aliases = {} | |
|
70 | if isinstance(Application.aliases, dict): | |
|
71 | # traitlets 5 | |
|
72 | base_aliases.update(Application.aliases) | |
|
73 | base_aliases.update( | |
|
74 | { | |
|
75 | "profile-dir": "ProfileDir.location", | |
|
76 | "profile": "BaseIPythonApplication.profile", | |
|
77 | "ipython-dir": "BaseIPythonApplication.ipython_dir", | |
|
78 | "log-level": "Application.log_level", | |
|
79 | "config": "BaseIPythonApplication.extra_config_file", | |
|
75 | 80 | } |
|
81 | ) | |
|
76 | 82 | |
|
77 | base_flags = dict( | |
|
78 | debug = ({'Application' : {'log_level' : logging.DEBUG}}, | |
|
79 | "set log level to logging.DEBUG (maximize logging output)"), | |
|
80 | quiet = ({'Application' : {'log_level' : logging.CRITICAL}}, | |
|
81 | "set log level to logging.CRITICAL (minimize logging output)"), | |
|
82 | init = ({'BaseIPythonApplication' : { | |
|
83 | 'copy_config_files' : True, | |
|
84 | 'auto_create' : True} | |
|
85 | }, """Initialize profile with default config files. This is equivalent | |
|
83 | base_flags = dict() | |
|
84 | if isinstance(Application.flags, dict): | |
|
85 | # traitlets 5 | |
|
86 | base_flags.update(Application.flags) | |
|
87 | base_flags.update( | |
|
88 | dict( | |
|
89 | debug=( | |
|
90 | {"Application": {"log_level": logging.DEBUG}}, | |
|
91 | "set log level to logging.DEBUG (maximize logging output)", | |
|
92 | ), | |
|
93 | quiet=( | |
|
94 | {"Application": {"log_level": logging.CRITICAL}}, | |
|
95 | "set log level to logging.CRITICAL (minimize logging output)", | |
|
96 | ), | |
|
97 | init=( | |
|
98 | { | |
|
99 | "BaseIPythonApplication": { | |
|
100 | "copy_config_files": True, | |
|
101 | "auto_create": True, | |
|
102 | } | |
|
103 | }, | |
|
104 | """Initialize profile with default config files. This is equivalent | |
|
86 | 105 |
|
|
87 |
|
|
|
106 | """, | |
|
107 | ), | |
|
88 | 108 | ) |
|
109 | ) | |
|
110 | ||
|
89 | 111 | |
|
90 | 112 | class ProfileAwareConfigLoader(PyFileConfigLoader): |
|
91 | 113 | """A Python file config loader that is aware of IPython profiles.""" |
|
92 | 114 | def load_subconfig(self, fname, path=None, profile=None): |
|
93 | 115 | if profile is not None: |
|
94 | 116 | try: |
|
95 | 117 | profile_dir = ProfileDir.find_profile_dir_by_name( |
|
96 | 118 | get_ipython_dir(), |
|
97 | 119 | profile, |
|
98 | 120 | ) |
|
99 | 121 | except ProfileDirError: |
|
100 | 122 | return |
|
101 | 123 | path = profile_dir.location |
|
102 | 124 | return super(ProfileAwareConfigLoader, self).load_subconfig(fname, path=path) |
|
103 | 125 | |
|
104 | 126 | class BaseIPythonApplication(Application): |
|
105 | 127 | |
|
106 | 128 | name = u'ipython' |
|
107 | 129 | description = Unicode(u'IPython: an enhanced interactive Python shell.') |
|
108 | 130 | version = Unicode(release.version) |
|
109 | 131 | |
|
110 | 132 | aliases = base_aliases |
|
111 | 133 | flags = base_flags |
|
112 | 134 | classes = List([ProfileDir]) |
|
113 | 135 | |
|
114 | 136 | # enable `load_subconfig('cfg.py', profile='name')` |
|
115 | 137 | python_config_loader_class = ProfileAwareConfigLoader |
|
116 | 138 | |
|
117 | 139 | # Track whether the config_file has changed, |
|
118 | 140 | # because some logic happens only if we aren't using the default. |
|
119 | 141 | config_file_specified = Set() |
|
120 | 142 | |
|
121 | 143 | config_file_name = Unicode() |
|
122 | 144 | @default('config_file_name') |
|
123 | 145 | def _config_file_name_default(self): |
|
124 | 146 | return self.name.replace('-','_') + u'_config.py' |
|
125 | 147 | @observe('config_file_name') |
|
126 | 148 | def _config_file_name_changed(self, change): |
|
127 | 149 | if change['new'] != change['old']: |
|
128 | 150 | self.config_file_specified.add(change['new']) |
|
129 | 151 | |
|
130 | 152 | # The directory that contains IPython's builtin profiles. |
|
131 | 153 | builtin_profile_dir = Unicode( |
|
132 | 154 | os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default') |
|
133 | 155 | ) |
|
134 | 156 | |
|
135 | 157 | config_file_paths = List(Unicode()) |
|
136 | 158 | @default('config_file_paths') |
|
137 | 159 | def _config_file_paths_default(self): |
|
138 | 160 | return [os.getcwd()] |
|
139 | 161 | |
|
140 | 162 | extra_config_file = Unicode( |
|
141 | 163 | help="""Path to an extra config file to load. |
|
142 | 164 | |
|
143 | 165 | If specified, load this config file in addition to any other IPython config. |
|
144 | 166 | """).tag(config=True) |
|
145 | 167 | @observe('extra_config_file') |
|
146 | 168 | def _extra_config_file_changed(self, change): |
|
147 | 169 | old = change['old'] |
|
148 | 170 | new = change['new'] |
|
149 | 171 | try: |
|
150 | 172 | self.config_files.remove(old) |
|
151 | 173 | except ValueError: |
|
152 | 174 | pass |
|
153 | 175 | self.config_file_specified.add(new) |
|
154 | 176 | self.config_files.append(new) |
|
155 | 177 | |
|
156 | 178 | profile = Unicode(u'default', |
|
157 | 179 | help="""The IPython profile to use.""" |
|
158 | 180 | ).tag(config=True) |
|
159 | 181 | |
|
160 | 182 | @observe('profile') |
|
161 | 183 | def _profile_changed(self, change): |
|
162 | 184 | self.builtin_profile_dir = os.path.join( |
|
163 | 185 | get_ipython_package_dir(), u'config', u'profile', change['new'] |
|
164 | 186 | ) |
|
165 | 187 | |
|
166 | 188 | ipython_dir = Unicode( |
|
167 | 189 | help=""" |
|
168 | 190 | The name of the IPython directory. This directory is used for logging |
|
169 | 191 | configuration (through profiles), history storage, etc. The default |
|
170 | 192 | is usually $HOME/.ipython. This option can also be specified through |
|
171 | 193 | the environment variable IPYTHONDIR. |
|
172 | 194 | """ |
|
173 | 195 | ).tag(config=True) |
|
174 | 196 | @default('ipython_dir') |
|
175 | 197 | def _ipython_dir_default(self): |
|
176 | 198 | d = get_ipython_dir() |
|
177 | 199 | self._ipython_dir_changed({ |
|
178 | 200 | 'name': 'ipython_dir', |
|
179 | 201 | 'old': d, |
|
180 | 202 | 'new': d, |
|
181 | 203 | }) |
|
182 | 204 | return d |
|
183 | 205 | |
|
184 | 206 | _in_init_profile_dir = False |
|
185 | 207 | profile_dir = Instance(ProfileDir, allow_none=True) |
|
186 | 208 | @default('profile_dir') |
|
187 | 209 | def _profile_dir_default(self): |
|
188 | 210 | # avoid recursion |
|
189 | 211 | if self._in_init_profile_dir: |
|
190 | 212 | return |
|
191 | 213 | # profile_dir requested early, force initialization |
|
192 | 214 | self.init_profile_dir() |
|
193 | 215 | return self.profile_dir |
|
194 | 216 | |
|
195 | 217 | overwrite = Bool(False, |
|
196 | 218 | help="""Whether to overwrite existing config files when copying""" |
|
197 | 219 | ).tag(config=True) |
|
198 | 220 | auto_create = Bool(False, |
|
199 | 221 | help="""Whether to create profile dir if it doesn't exist""" |
|
200 | 222 | ).tag(config=True) |
|
201 | 223 | |
|
202 | 224 | config_files = List(Unicode()) |
|
203 | 225 | @default('config_files') |
|
204 | 226 | def _config_files_default(self): |
|
205 | 227 | return [self.config_file_name] |
|
206 | 228 | |
|
207 | 229 | copy_config_files = Bool(False, |
|
208 | 230 | help="""Whether to install the default config files into the profile dir. |
|
209 | 231 | If a new profile is being created, and IPython contains config files for that |
|
210 | 232 | profile, then they will be staged into the new directory. Otherwise, |
|
211 | 233 | default config files will be automatically generated. |
|
212 | 234 | """).tag(config=True) |
|
213 | 235 | |
|
214 | 236 | verbose_crash = Bool(False, |
|
215 | 237 | help="""Create a massive crash report when IPython encounters what may be an |
|
216 | 238 | internal error. The default is to append a short message to the |
|
217 | 239 | usual traceback""").tag(config=True) |
|
218 | 240 | |
|
219 | 241 | # The class to use as the crash handler. |
|
220 | 242 | crash_handler_class = Type(crashhandler.CrashHandler) |
|
221 | 243 | |
|
222 | 244 | @catch_config_error |
|
223 | 245 | def __init__(self, **kwargs): |
|
224 | 246 | super(BaseIPythonApplication, self).__init__(**kwargs) |
|
225 | 247 | # ensure current working directory exists |
|
226 | 248 | try: |
|
227 | 249 | os.getcwd() |
|
228 | 250 | except: |
|
229 | 251 | # exit if cwd doesn't exist |
|
230 | 252 | self.log.error("Current working directory doesn't exist.") |
|
231 | 253 | self.exit(1) |
|
232 | 254 | |
|
233 | 255 | #------------------------------------------------------------------------- |
|
234 | 256 | # Various stages of Application creation |
|
235 | 257 | #------------------------------------------------------------------------- |
|
236 | 258 | |
|
237 | 259 | deprecated_subcommands = {} |
|
238 | 260 | |
|
239 | 261 | def initialize_subcommand(self, subc, argv=None): |
|
240 | 262 | if subc in self.deprecated_subcommands: |
|
241 | 263 | self.log.warning("Subcommand `ipython {sub}` is deprecated and will be removed " |
|
242 | 264 | "in future versions.".format(sub=subc)) |
|
243 | 265 | self.log.warning("You likely want to use `jupyter {sub}` in the " |
|
244 | 266 | "future".format(sub=subc)) |
|
245 | 267 | return super(BaseIPythonApplication, self).initialize_subcommand(subc, argv) |
|
246 | 268 | |
|
247 | 269 | def init_crash_handler(self): |
|
248 | 270 | """Create a crash handler, typically setting sys.excepthook to it.""" |
|
249 | 271 | self.crash_handler = self.crash_handler_class(self) |
|
250 | 272 | sys.excepthook = self.excepthook |
|
251 | 273 | def unset_crashhandler(): |
|
252 | 274 | sys.excepthook = sys.__excepthook__ |
|
253 | 275 | atexit.register(unset_crashhandler) |
|
254 | 276 | |
|
255 | 277 | def excepthook(self, etype, evalue, tb): |
|
256 | 278 | """this is sys.excepthook after init_crashhandler |
|
257 | 279 | |
|
258 | 280 | set self.verbose_crash=True to use our full crashhandler, instead of |
|
259 | 281 | a regular traceback with a short message (crash_handler_lite) |
|
260 | 282 | """ |
|
261 | 283 | |
|
262 | 284 | if self.verbose_crash: |
|
263 | 285 | return self.crash_handler(etype, evalue, tb) |
|
264 | 286 | else: |
|
265 | 287 | return crashhandler.crash_handler_lite(etype, evalue, tb) |
|
266 | 288 | |
|
267 | 289 | @observe('ipython_dir') |
|
268 | 290 | def _ipython_dir_changed(self, change): |
|
269 | 291 | old = change['old'] |
|
270 | 292 | new = change['new'] |
|
271 | 293 | if old is not Undefined: |
|
272 | 294 | str_old = os.path.abspath(old) |
|
273 | 295 | if str_old in sys.path: |
|
274 | 296 | sys.path.remove(str_old) |
|
275 | 297 | str_path = os.path.abspath(new) |
|
276 | 298 | sys.path.append(str_path) |
|
277 | 299 | ensure_dir_exists(new) |
|
278 | 300 | readme = os.path.join(new, 'README') |
|
279 | 301 | readme_src = os.path.join(get_ipython_package_dir(), u'config', u'profile', 'README') |
|
280 | 302 | if not os.path.exists(readme) and os.path.exists(readme_src): |
|
281 | 303 | shutil.copy(readme_src, readme) |
|
282 | 304 | for d in ('extensions', 'nbextensions'): |
|
283 | 305 | path = os.path.join(new, d) |
|
284 | 306 | try: |
|
285 | 307 | ensure_dir_exists(path) |
|
286 | 308 | except OSError as e: |
|
287 | 309 | # this will not be EEXIST |
|
288 | 310 | self.log.error("couldn't create path %s: %s", path, e) |
|
289 | 311 | self.log.debug("IPYTHONDIR set to: %s" % new) |
|
290 | 312 | |
|
291 | 313 | def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS): |
|
292 | 314 | """Load the config file. |
|
293 | 315 | |
|
294 | 316 | By default, errors in loading config are handled, and a warning |
|
295 | 317 | printed on screen. For testing, the suppress_errors option is set |
|
296 | 318 | to False, so errors will make tests fail. |
|
297 | 319 | |
|
298 | 320 | `suppress_errors` default value is to be `None` in which case the |
|
299 | 321 | behavior default to the one of `traitlets.Application`. |
|
300 | 322 | |
|
301 | 323 | The default value can be set : |
|
302 | 324 | - to `False` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '0', or 'false' (case insensitive). |
|
303 | 325 | - to `True` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '1' or 'true' (case insensitive). |
|
304 | 326 | - to `None` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '' (empty string) or leaving it unset. |
|
305 | 327 | |
|
306 | 328 | Any other value are invalid, and will make IPython exit with a non-zero return code. |
|
307 | 329 | """ |
|
308 | 330 | |
|
309 | 331 | |
|
310 | 332 | self.log.debug("Searching path %s for config files", self.config_file_paths) |
|
311 | 333 | base_config = 'ipython_config.py' |
|
312 | 334 | self.log.debug("Attempting to load config file: %s" % |
|
313 | 335 | base_config) |
|
314 | 336 | try: |
|
315 | 337 | if suppress_errors is not None: |
|
316 | 338 | old_value = Application.raise_config_file_errors |
|
317 | 339 | Application.raise_config_file_errors = not suppress_errors; |
|
318 | 340 | Application.load_config_file( |
|
319 | 341 | self, |
|
320 | 342 | base_config, |
|
321 | 343 | path=self.config_file_paths |
|
322 | 344 | ) |
|
323 | 345 | except ConfigFileNotFound: |
|
324 | 346 | # ignore errors loading parent |
|
325 | 347 | self.log.debug("Config file %s not found", base_config) |
|
326 | 348 | pass |
|
327 | 349 | if suppress_errors is not None: |
|
328 | 350 | Application.raise_config_file_errors = old_value |
|
329 | 351 | |
|
330 | 352 | for config_file_name in self.config_files: |
|
331 | 353 | if not config_file_name or config_file_name == base_config: |
|
332 | 354 | continue |
|
333 | 355 | self.log.debug("Attempting to load config file: %s" % |
|
334 | 356 | self.config_file_name) |
|
335 | 357 | try: |
|
336 | 358 | Application.load_config_file( |
|
337 | 359 | self, |
|
338 | 360 | config_file_name, |
|
339 | 361 | path=self.config_file_paths |
|
340 | 362 | ) |
|
341 | 363 | except ConfigFileNotFound: |
|
342 | 364 | # Only warn if the default config file was NOT being used. |
|
343 | 365 | if config_file_name in self.config_file_specified: |
|
344 | 366 | msg = self.log.warning |
|
345 | 367 | else: |
|
346 | 368 | msg = self.log.debug |
|
347 | 369 | msg("Config file not found, skipping: %s", config_file_name) |
|
348 | 370 | except Exception: |
|
349 | 371 | # For testing purposes. |
|
350 | 372 | if not suppress_errors: |
|
351 | 373 | raise |
|
352 | 374 | self.log.warning("Error loading config file: %s" % |
|
353 | 375 | self.config_file_name, exc_info=True) |
|
354 | 376 | |
|
355 | 377 | def init_profile_dir(self): |
|
356 | 378 | """initialize the profile dir""" |
|
357 | 379 | self._in_init_profile_dir = True |
|
358 | 380 | if self.profile_dir is not None: |
|
359 | 381 | # already ran |
|
360 | 382 | return |
|
361 | 383 | if 'ProfileDir.location' not in self.config: |
|
362 | 384 | # location not specified, find by profile name |
|
363 | 385 | try: |
|
364 | 386 | p = ProfileDir.find_profile_dir_by_name(self.ipython_dir, self.profile, self.config) |
|
365 | 387 | except ProfileDirError: |
|
366 | 388 | # not found, maybe create it (always create default profile) |
|
367 | 389 | if self.auto_create or self.profile == 'default': |
|
368 | 390 | try: |
|
369 | 391 | p = ProfileDir.create_profile_dir_by_name(self.ipython_dir, self.profile, self.config) |
|
370 | 392 | except ProfileDirError: |
|
371 | 393 | self.log.fatal("Could not create profile: %r"%self.profile) |
|
372 | 394 | self.exit(1) |
|
373 | 395 | else: |
|
374 | 396 | self.log.info("Created profile dir: %r"%p.location) |
|
375 | 397 | else: |
|
376 | 398 | self.log.fatal("Profile %r not found."%self.profile) |
|
377 | 399 | self.exit(1) |
|
378 | 400 | else: |
|
379 | 401 | self.log.debug("Using existing profile dir: %r"%p.location) |
|
380 | 402 | else: |
|
381 | 403 | location = self.config.ProfileDir.location |
|
382 | 404 | # location is fully specified |
|
383 | 405 | try: |
|
384 | 406 | p = ProfileDir.find_profile_dir(location, self.config) |
|
385 | 407 | except ProfileDirError: |
|
386 | 408 | # not found, maybe create it |
|
387 | 409 | if self.auto_create: |
|
388 | 410 | try: |
|
389 | 411 | p = ProfileDir.create_profile_dir(location, self.config) |
|
390 | 412 | except ProfileDirError: |
|
391 | 413 | self.log.fatal("Could not create profile directory: %r"%location) |
|
392 | 414 | self.exit(1) |
|
393 | 415 | else: |
|
394 | 416 | self.log.debug("Creating new profile dir: %r"%location) |
|
395 | 417 | else: |
|
396 | 418 | self.log.fatal("Profile directory %r not found."%location) |
|
397 | 419 | self.exit(1) |
|
398 | 420 | else: |
|
399 | 421 | self.log.info("Using existing profile dir: %r"%location) |
|
400 | 422 | # if profile_dir is specified explicitly, set profile name |
|
401 | 423 | dir_name = os.path.basename(p.location) |
|
402 | 424 | if dir_name.startswith('profile_'): |
|
403 | 425 | self.profile = dir_name[8:] |
|
404 | 426 | |
|
405 | 427 | self.profile_dir = p |
|
406 | 428 | self.config_file_paths.append(p.location) |
|
407 | 429 | self._in_init_profile_dir = False |
|
408 | 430 | |
|
409 | 431 | def init_config_files(self): |
|
410 | 432 | """[optionally] copy default config files into profile dir.""" |
|
411 | 433 | self.config_file_paths.extend(ENV_CONFIG_DIRS) |
|
412 | 434 | self.config_file_paths.extend(SYSTEM_CONFIG_DIRS) |
|
413 | 435 | # copy config files |
|
414 | 436 | path = Path(self.builtin_profile_dir) |
|
415 | 437 | if self.copy_config_files: |
|
416 | 438 | src = self.profile |
|
417 | 439 | |
|
418 | 440 | cfg = self.config_file_name |
|
419 | 441 | if path and (path / cfg).exists(): |
|
420 | 442 | self.log.warning( |
|
421 | 443 | "Staging %r from %s into %r [overwrite=%s]" |
|
422 | 444 | % (cfg, src, self.profile_dir.location, self.overwrite) |
|
423 | 445 | ) |
|
424 | 446 | self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite) |
|
425 | 447 | else: |
|
426 | 448 | self.stage_default_config_file() |
|
427 | 449 | else: |
|
428 | 450 | # Still stage *bundled* config files, but not generated ones |
|
429 | 451 | # This is necessary for `ipython profile=sympy` to load the profile |
|
430 | 452 | # on the first go |
|
431 | 453 | files = path.glob("*.py") |
|
432 | 454 | for fullpath in files: |
|
433 | 455 | cfg = fullpath.name |
|
434 | 456 | if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False): |
|
435 | 457 | # file was copied |
|
436 | 458 | self.log.warning("Staging bundled %s from %s into %r"%( |
|
437 | 459 | cfg, self.profile, self.profile_dir.location) |
|
438 | 460 | ) |
|
439 | 461 | |
|
440 | 462 | |
|
441 | 463 | def stage_default_config_file(self): |
|
442 | 464 | """auto generate default config file, and stage it into the profile.""" |
|
443 | 465 | s = self.generate_config_file() |
|
444 | 466 | config_file = Path(self.profile_dir.location) / self.config_file_name |
|
445 | 467 | if self.overwrite or not config_file.exists(): |
|
446 | 468 | self.log.warning("Generating default config file: %r" % (config_file)) |
|
447 | 469 | config_file.write_text(s) |
|
448 | 470 | |
|
449 | 471 | @catch_config_error |
|
450 | 472 | def initialize(self, argv=None): |
|
451 | 473 | # don't hook up crash handler before parsing command-line |
|
452 | 474 | self.parse_command_line(argv) |
|
453 | 475 | self.init_crash_handler() |
|
454 | 476 | if self.subapp is not None: |
|
455 | 477 | # stop here if subapp is taking over |
|
456 | 478 | return |
|
457 | 479 | # save a copy of CLI config to re-load after config files |
|
458 | 480 | # so that it has highest priority |
|
459 | 481 | cl_config = deepcopy(self.config) |
|
460 | 482 | self.init_profile_dir() |
|
461 | 483 | self.init_config_files() |
|
462 | 484 | self.load_config_file() |
|
463 | 485 | # enforce cl-opts override configfile opts: |
|
464 | 486 | self.update_config(cl_config) |
|
1 | NO CONTENT: modified file |
@@ -1,188 +1,196 b'' | |||
|
1 | 1 | """Compiler tools with improved interactive support. |
|
2 | 2 | |
|
3 | 3 | Provides compilation machinery similar to codeop, but with caching support so |
|
4 | 4 | we can provide interactive tracebacks. |
|
5 | 5 | |
|
6 | 6 | Authors |
|
7 | 7 | ------- |
|
8 | 8 | * Robert Kern |
|
9 | 9 | * Fernando Perez |
|
10 | 10 | * Thomas Kluyver |
|
11 | 11 | """ |
|
12 | 12 | |
|
13 | 13 | # Note: though it might be more natural to name this module 'compiler', that |
|
14 | 14 | # name is in the stdlib and name collisions with the stdlib tend to produce |
|
15 | 15 | # weird problems (often with third-party tools). |
|
16 | 16 | |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | 18 | # Copyright (C) 2010-2011 The IPython Development Team. |
|
19 | 19 | # |
|
20 | 20 | # Distributed under the terms of the BSD License. |
|
21 | 21 | # |
|
22 | 22 | # The full license is in the file COPYING.txt, distributed with this software. |
|
23 | 23 | #----------------------------------------------------------------------------- |
|
24 | 24 | |
|
25 | 25 | #----------------------------------------------------------------------------- |
|
26 | 26 | # Imports |
|
27 | 27 | #----------------------------------------------------------------------------- |
|
28 | 28 | |
|
29 | 29 | # Stdlib imports |
|
30 | 30 | import __future__ |
|
31 | 31 | from ast import PyCF_ONLY_AST |
|
32 | 32 | import codeop |
|
33 | 33 | import functools |
|
34 | 34 | import hashlib |
|
35 | 35 | import linecache |
|
36 | 36 | import operator |
|
37 | 37 | import time |
|
38 | 38 | from contextlib import contextmanager |
|
39 | 39 | |
|
40 | 40 | #----------------------------------------------------------------------------- |
|
41 | 41 | # Constants |
|
42 | 42 | #----------------------------------------------------------------------------- |
|
43 | 43 | |
|
44 | 44 | # Roughly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h, |
|
45 | 45 | # this is used as a bitmask to extract future-related code flags. |
|
46 | 46 | PyCF_MASK = functools.reduce(operator.or_, |
|
47 | 47 | (getattr(__future__, fname).compiler_flag |
|
48 | 48 | for fname in __future__.all_feature_names)) |
|
49 | 49 | |
|
50 | 50 | #----------------------------------------------------------------------------- |
|
51 | 51 | # Local utilities |
|
52 | 52 | #----------------------------------------------------------------------------- |
|
53 | 53 | |
|
54 | 54 | def code_name(code, number=0): |
|
55 | 55 | """ Compute a (probably) unique name for code for caching. |
|
56 | 56 | |
|
57 | 57 | This now expects code to be unicode. |
|
58 | 58 | """ |
|
59 | 59 | hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest() |
|
60 | 60 | # Include the number and 12 characters of the hash in the name. It's |
|
61 | 61 | # pretty much impossible that in a single session we'll have collisions |
|
62 | 62 | # even with truncated hashes, and the full one makes tracebacks too long |
|
63 | 63 | return '<ipython-input-{0}-{1}>'.format(number, hash_digest[:12]) |
|
64 | 64 | |
|
65 | 65 | #----------------------------------------------------------------------------- |
|
66 | 66 | # Classes and functions |
|
67 | 67 | #----------------------------------------------------------------------------- |
|
68 | 68 | |
|
69 | 69 | class CachingCompiler(codeop.Compile): |
|
70 | 70 | """A compiler that caches code compiled from interactive statements. |
|
71 | 71 | """ |
|
72 | 72 | |
|
73 | 73 | def __init__(self): |
|
74 | 74 | codeop.Compile.__init__(self) |
|
75 | 75 | |
|
76 | 76 | # This is ugly, but it must be done this way to allow multiple |
|
77 | 77 | # simultaneous ipython instances to coexist. Since Python itself |
|
78 | 78 | # directly accesses the data structures in the linecache module, and |
|
79 | 79 | # the cache therein is global, we must work with that data structure. |
|
80 | 80 | # We must hold a reference to the original checkcache routine and call |
|
81 | 81 | # that in our own check_cache() below, but the special IPython cache |
|
82 | 82 | # must also be shared by all IPython instances. If we were to hold |
|
83 | 83 | # separate caches (one in each CachingCompiler instance), any call made |
|
84 | 84 | # by Python itself to linecache.checkcache() would obliterate the |
|
85 | 85 | # cached data from the other IPython instances. |
|
86 | 86 | if not hasattr(linecache, '_ipython_cache'): |
|
87 | 87 | linecache._ipython_cache = {} |
|
88 | 88 | if not hasattr(linecache, '_checkcache_ori'): |
|
89 | 89 | linecache._checkcache_ori = linecache.checkcache |
|
90 | 90 | # Now, we must monkeypatch the linecache directly so that parts of the |
|
91 | 91 | # stdlib that call it outside our control go through our codepath |
|
92 | 92 | # (otherwise we'd lose our tracebacks). |
|
93 | 93 | linecache.checkcache = check_linecache_ipython |
|
94 | 94 | |
|
95 | # Caching a dictionary { filename: execution_count } for nicely | |
|
96 | # rendered tracebacks. The filename corresponds to the filename | |
|
97 | # argument used for the builtins.compile function. | |
|
98 | self._filename_map = {} | |
|
95 | 99 | |
|
96 | 100 | def ast_parse(self, source, filename='<unknown>', symbol='exec'): |
|
97 | 101 | """Parse code to an AST with the current compiler flags active. |
|
98 | 102 | |
|
99 | 103 | Arguments are exactly the same as ast.parse (in the standard library), |
|
100 | 104 | and are passed to the built-in compile function.""" |
|
101 | 105 | return compile(source, filename, symbol, self.flags | PyCF_ONLY_AST, 1) |
|
102 | 106 | |
|
103 | 107 | def reset_compiler_flags(self): |
|
104 | 108 | """Reset compiler flags to default state.""" |
|
105 | 109 | # This value is copied from codeop.Compile.__init__, so if that ever |
|
106 | 110 | # changes, it will need to be updated. |
|
107 | 111 | self.flags = codeop.PyCF_DONT_IMPLY_DEDENT |
|
108 | 112 | |
|
109 | 113 | @property |
|
110 | 114 | def compiler_flags(self): |
|
111 | 115 | """Flags currently active in the compilation process. |
|
112 | 116 | """ |
|
113 | 117 | return self.flags |
|
114 | 118 | |
|
115 | 119 | def get_code_name(self, raw_code, transformed_code, number): |
|
116 | 120 | """Compute filename given the code, and the cell number. |
|
117 | 121 | |
|
118 | 122 | Parameters |
|
119 | 123 | ---------- |
|
120 | 124 | raw_code : str |
|
121 | 125 | The raw cell code. |
|
122 | 126 | transformed_code : str |
|
123 | 127 | The executable Python source code to cache and compile. |
|
124 | 128 | number : int |
|
125 | 129 | A number which forms part of the code's name. Used for the execution |
|
126 | 130 | counter. |
|
127 | 131 | |
|
128 | 132 | Returns |
|
129 | 133 | ------- |
|
130 | 134 | The computed filename. |
|
131 | 135 | """ |
|
132 | 136 | return code_name(transformed_code, number) |
|
133 | 137 | |
|
134 | 138 | def cache(self, transformed_code, number=0, raw_code=None): |
|
135 | 139 | """Make a name for a block of code, and cache the code. |
|
136 | 140 | |
|
137 | 141 | Parameters |
|
138 | 142 | ---------- |
|
139 | 143 | transformed_code : str |
|
140 | 144 | The executable Python source code to cache and compile. |
|
141 | 145 | number : int |
|
142 | 146 | A number which forms part of the code's name. Used for the execution |
|
143 | 147 | counter. |
|
144 | 148 | raw_code : str |
|
145 | 149 | The raw code before transformation, if None, set to `transformed_code`. |
|
146 | 150 | |
|
147 | 151 | Returns |
|
148 | 152 | ------- |
|
149 | 153 | The name of the cached code (as a string). Pass this as the filename |
|
150 | 154 | argument to compilation, so that tracebacks are correctly hooked up. |
|
151 | 155 | """ |
|
152 | 156 | if raw_code is None: |
|
153 | 157 | raw_code = transformed_code |
|
154 | 158 | |
|
155 | 159 | name = self.get_code_name(raw_code, transformed_code, number) |
|
160 | ||
|
161 | # Save the execution count | |
|
162 | self._filename_map[name] = number | |
|
163 | ||
|
156 | 164 | entry = ( |
|
157 | 165 | len(transformed_code), |
|
158 | 166 | time.time(), |
|
159 | 167 | [line + "\n" for line in transformed_code.splitlines()], |
|
160 | 168 | name, |
|
161 | 169 | ) |
|
162 | 170 | linecache.cache[name] = entry |
|
163 | 171 | linecache._ipython_cache[name] = entry |
|
164 | 172 | return name |
|
165 | 173 | |
|
166 | 174 | @contextmanager |
|
167 | 175 | def extra_flags(self, flags): |
|
168 | 176 | ## bits that we'll set to 1 |
|
169 | 177 | turn_on_bits = ~self.flags & flags |
|
170 | 178 | |
|
171 | 179 | |
|
172 | 180 | self.flags = self.flags | flags |
|
173 | 181 | try: |
|
174 | 182 | yield |
|
175 | 183 | finally: |
|
176 | 184 | # turn off only the bits we turned on so that something like |
|
177 | 185 | # __future__ that set flags stays. |
|
178 | 186 | self.flags &= ~turn_on_bits |
|
179 | 187 | |
|
180 | 188 | |
|
181 | 189 | def check_linecache_ipython(*args): |
|
182 | 190 | """Call linecache.checkcache() safely protecting our cached values. |
|
183 | 191 | """ |
|
184 | 192 | # First call the original checkcache as intended |
|
185 | 193 | linecache._checkcache_ori(*args) |
|
186 | 194 | # Then, update back the cache with our data, so that tracebacks related |
|
187 | 195 | # to our compiled codes can be produced. |
|
188 | 196 | linecache.cache.update(linecache._ipython_cache) |
@@ -1,2239 +1,2239 b'' | |||
|
1 | 1 | """Completion for IPython. |
|
2 | 2 | |
|
3 | 3 | This module started as fork of the rlcompleter module in the Python standard |
|
4 | 4 | library. The original enhancements made to rlcompleter have been sent |
|
5 | 5 | upstream and were accepted as of Python 2.3, |
|
6 | 6 | |
|
7 | 7 | This module now support a wide variety of completion mechanism both available |
|
8 | 8 | for normal classic Python code, as well as completer for IPython specific |
|
9 | 9 | Syntax like magics. |
|
10 | 10 | |
|
11 | 11 | Latex and Unicode completion |
|
12 | 12 | ============================ |
|
13 | 13 | |
|
14 | 14 | IPython and compatible frontends not only can complete your code, but can help |
|
15 | 15 | you to input a wide range of characters. In particular we allow you to insert |
|
16 | 16 | a unicode character using the tab completion mechanism. |
|
17 | 17 | |
|
18 | 18 | Forward latex/unicode completion |
|
19 | 19 | -------------------------------- |
|
20 | 20 | |
|
21 | 21 | Forward completion allows you to easily type a unicode character using its latex |
|
22 | 22 | name, or unicode long description. To do so type a backslash follow by the |
|
23 | 23 | relevant name and press tab: |
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | Using latex completion: |
|
27 | 27 | |
|
28 | 28 | .. code:: |
|
29 | 29 | |
|
30 | 30 | \\alpha<tab> |
|
31 | 31 | α |
|
32 | 32 | |
|
33 | 33 | or using unicode completion: |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | .. code:: |
|
37 | 37 | |
|
38 | 38 | \\GREEK SMALL LETTER ALPHA<tab> |
|
39 | 39 | α |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | Only valid Python identifiers will complete. Combining characters (like arrow or |
|
43 | 43 | dots) are also available, unlike latex they need to be put after the their |
|
44 | 44 | counterpart that is to say, `F\\\\vec<tab>` is correct, not `\\\\vec<tab>F`. |
|
45 | 45 | |
|
46 | 46 | Some browsers are known to display combining characters incorrectly. |
|
47 | 47 | |
|
48 | 48 | Backward latex completion |
|
49 | 49 | ------------------------- |
|
50 | 50 | |
|
51 | 51 | It is sometime challenging to know how to type a character, if you are using |
|
52 | 52 | IPython, or any compatible frontend you can prepend backslash to the character |
|
53 | 53 | and press `<tab>` to expand it to its latex form. |
|
54 | 54 | |
|
55 | 55 | .. code:: |
|
56 | 56 | |
|
57 | 57 | \\α<tab> |
|
58 | 58 | \\alpha |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | Both forward and backward completions can be deactivated by setting the |
|
62 | 62 | ``Completer.backslash_combining_completions`` option to ``False``. |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | Experimental |
|
66 | 66 | ============ |
|
67 | 67 | |
|
68 | 68 | Starting with IPython 6.0, this module can make use of the Jedi library to |
|
69 | 69 | generate completions both using static analysis of the code, and dynamically |
|
70 | 70 | inspecting multiple namespaces. Jedi is an autocompletion and static analysis |
|
71 | 71 | for Python. The APIs attached to this new mechanism is unstable and will |
|
72 | 72 | raise unless use in an :any:`provisionalcompleter` context manager. |
|
73 | 73 | |
|
74 | 74 | You will find that the following are experimental: |
|
75 | 75 | |
|
76 | 76 | - :any:`provisionalcompleter` |
|
77 | 77 | - :any:`IPCompleter.completions` |
|
78 | 78 | - :any:`Completion` |
|
79 | 79 | - :any:`rectify_completions` |
|
80 | 80 | |
|
81 | 81 | .. note:: |
|
82 | 82 | |
|
83 | 83 | better name for :any:`rectify_completions` ? |
|
84 | 84 | |
|
85 | 85 | We welcome any feedback on these new API, and we also encourage you to try this |
|
86 | 86 | module in debug mode (start IPython with ``--Completer.debug=True``) in order |
|
87 | 87 | to have extra logging information if :any:`jedi` is crashing, or if current |
|
88 | 88 | IPython completer pending deprecations are returning results not yet handled |
|
89 | 89 | by :any:`jedi` |
|
90 | 90 | |
|
91 | 91 | Using Jedi for tab completion allow snippets like the following to work without |
|
92 | 92 | having to execute any code: |
|
93 | 93 | |
|
94 | 94 | >>> myvar = ['hello', 42] |
|
95 | 95 | ... myvar[1].bi<tab> |
|
96 | 96 | |
|
97 | 97 | Tab completion will be able to infer that ``myvar[1]`` is a real number without |
|
98 | 98 | executing any code unlike the previously available ``IPCompleter.greedy`` |
|
99 | 99 | option. |
|
100 | 100 | |
|
101 | 101 | Be sure to update :any:`jedi` to the latest stable version or to try the |
|
102 | 102 | current development version to get better completions. |
|
103 | 103 | """ |
|
104 | 104 | |
|
105 | 105 | |
|
106 | 106 | # Copyright (c) IPython Development Team. |
|
107 | 107 | # Distributed under the terms of the Modified BSD License. |
|
108 | 108 | # |
|
109 | 109 | # Some of this code originated from rlcompleter in the Python standard library |
|
110 | 110 | # Copyright (C) 2001 Python Software Foundation, www.python.org |
|
111 | 111 | |
|
112 | 112 | |
|
113 | 113 | import builtins as builtin_mod |
|
114 | 114 | import glob |
|
115 | 115 | import inspect |
|
116 | 116 | import itertools |
|
117 | 117 | import keyword |
|
118 | 118 | import os |
|
119 | 119 | import re |
|
120 | 120 | import string |
|
121 | 121 | import sys |
|
122 | 122 | import time |
|
123 | 123 | import unicodedata |
|
124 | 124 | import uuid |
|
125 | 125 | import warnings |
|
126 | 126 | from contextlib import contextmanager |
|
127 | 127 | from importlib import import_module |
|
128 | 128 | from types import SimpleNamespace |
|
129 | 129 | from typing import Iterable, Iterator, List, Tuple, Union, Any, Sequence, Dict, NamedTuple, Pattern, Optional |
|
130 | 130 | |
|
131 | 131 | from IPython.core.error import TryNext |
|
132 | 132 | from IPython.core.inputtransformer2 import ESC_MAGIC |
|
133 | 133 | from IPython.core.latex_symbols import latex_symbols, reverse_latex_symbol |
|
134 | 134 | from IPython.core.oinspect import InspectColors |
|
135 | 135 | from IPython.utils import generics |
|
136 | 136 | from IPython.utils.dir2 import dir2, get_real_method |
|
137 | 137 | from IPython.utils.path import ensure_dir_exists |
|
138 | 138 | from IPython.utils.process import arg_split |
|
139 | 139 | from traitlets import Bool, Enum, Int, List as ListTrait, Unicode, default, observe |
|
140 | 140 | from traitlets.config.configurable import Configurable |
|
141 | 141 | |
|
142 | 142 | import __main__ |
|
143 | 143 | |
|
144 | 144 | # skip module docstests |
|
145 | 145 | skip_doctest = True |
|
146 | 146 | |
|
147 | 147 | try: |
|
148 | 148 | import jedi |
|
149 | 149 | jedi.settings.case_insensitive_completion = False |
|
150 | 150 | import jedi.api.helpers |
|
151 | 151 | import jedi.api.classes |
|
152 | 152 | JEDI_INSTALLED = True |
|
153 | 153 | except ImportError: |
|
154 | 154 | JEDI_INSTALLED = False |
|
155 | 155 | #----------------------------------------------------------------------------- |
|
156 | 156 | # Globals |
|
157 | 157 | #----------------------------------------------------------------------------- |
|
158 | 158 | |
|
159 | 159 | # ranges where we have most of the valid unicode names. We could be more finer |
|
160 | 160 | # grained but is it worth it for performace While unicode have character in the |
|
161 | 161 | # rage 0, 0x110000, we seem to have name for about 10% of those. (131808 as I |
|
162 | 162 | # write this). With below range we cover them all, with a density of ~67% |
|
163 | 163 | # biggest next gap we consider only adds up about 1% density and there are 600 |
|
164 | 164 | # gaps that would need hard coding. |
|
165 | 165 | _UNICODE_RANGES = [(32, 0x3134b), (0xe0001, 0xe01f0)] |
|
166 | 166 | |
|
167 | 167 | # Public API |
|
168 | 168 | __all__ = ['Completer','IPCompleter'] |
|
169 | 169 | |
|
170 | 170 | if sys.platform == 'win32': |
|
171 | 171 | PROTECTABLES = ' ' |
|
172 | 172 | else: |
|
173 | 173 | PROTECTABLES = ' ()[]{}?=\\|;:\'#*"^&' |
|
174 | 174 | |
|
175 | 175 | # Protect against returning an enormous number of completions which the frontend |
|
176 | 176 | # may have trouble processing. |
|
177 | 177 | MATCHES_LIMIT = 500 |
|
178 | 178 | |
|
179 | 179 | _deprecation_readline_sentinel = object() |
|
180 | 180 | |
|
181 | 181 | |
|
182 | 182 | class ProvisionalCompleterWarning(FutureWarning): |
|
183 | 183 | """ |
|
184 | 184 | Exception raise by an experimental feature in this module. |
|
185 | 185 | |
|
186 | 186 | Wrap code in :any:`provisionalcompleter` context manager if you |
|
187 | 187 | are certain you want to use an unstable feature. |
|
188 | 188 | """ |
|
189 | 189 | pass |
|
190 | 190 | |
|
191 | 191 | warnings.filterwarnings('error', category=ProvisionalCompleterWarning) |
|
192 | 192 | |
|
193 | 193 | @contextmanager |
|
194 | 194 | def provisionalcompleter(action='ignore'): |
|
195 | 195 | """ |
|
196 | 196 | This context manager has to be used in any place where unstable completer |
|
197 | 197 | behavior and API may be called. |
|
198 | 198 | |
|
199 | 199 | >>> with provisionalcompleter(): |
|
200 | 200 | ... completer.do_experimental_things() # works |
|
201 | 201 | |
|
202 | 202 | >>> completer.do_experimental_things() # raises. |
|
203 | 203 | |
|
204 | 204 | .. note:: |
|
205 | 205 | |
|
206 | 206 | Unstable |
|
207 | 207 | |
|
208 | 208 | By using this context manager you agree that the API in use may change |
|
209 | 209 | without warning, and that you won't complain if they do so. |
|
210 | 210 | |
|
211 | 211 | You also understand that, if the API is not to your liking, you should report |
|
212 | 212 | a bug to explain your use case upstream. |
|
213 | 213 | |
|
214 | 214 | We'll be happy to get your feedback, feature requests, and improvements on |
|
215 | 215 | any of the unstable APIs! |
|
216 | 216 | """ |
|
217 | 217 | with warnings.catch_warnings(): |
|
218 | 218 | warnings.filterwarnings(action, category=ProvisionalCompleterWarning) |
|
219 | 219 | yield |
|
220 | 220 | |
|
221 | 221 | |
|
222 | 222 | def has_open_quotes(s): |
|
223 | 223 | """Return whether a string has open quotes. |
|
224 | 224 | |
|
225 | 225 | This simply counts whether the number of quote characters of either type in |
|
226 | 226 | the string is odd. |
|
227 | 227 | |
|
228 | 228 | Returns |
|
229 | 229 | ------- |
|
230 | 230 | If there is an open quote, the quote character is returned. Else, return |
|
231 | 231 | False. |
|
232 | 232 | """ |
|
233 | 233 | # We check " first, then ', so complex cases with nested quotes will get |
|
234 | 234 | # the " to take precedence. |
|
235 | 235 | if s.count('"') % 2: |
|
236 | 236 | return '"' |
|
237 | 237 | elif s.count("'") % 2: |
|
238 | 238 | return "'" |
|
239 | 239 | else: |
|
240 | 240 | return False |
|
241 | 241 | |
|
242 | 242 | |
|
243 | 243 | def protect_filename(s, protectables=PROTECTABLES): |
|
244 | 244 | """Escape a string to protect certain characters.""" |
|
245 | 245 | if set(s) & set(protectables): |
|
246 | 246 | if sys.platform == "win32": |
|
247 | 247 | return '"' + s + '"' |
|
248 | 248 | else: |
|
249 | 249 | return "".join(("\\" + c if c in protectables else c) for c in s) |
|
250 | 250 | else: |
|
251 | 251 | return s |
|
252 | 252 | |
|
253 | 253 | |
|
254 | 254 | def expand_user(path:str) -> Tuple[str, bool, str]: |
|
255 | 255 | """Expand ``~``-style usernames in strings. |
|
256 | 256 | |
|
257 | 257 | This is similar to :func:`os.path.expanduser`, but it computes and returns |
|
258 | 258 | extra information that will be useful if the input was being used in |
|
259 | 259 | computing completions, and you wish to return the completions with the |
|
260 | 260 | original '~' instead of its expanded value. |
|
261 | 261 | |
|
262 | 262 | Parameters |
|
263 | 263 | ---------- |
|
264 | 264 | path : str |
|
265 | 265 | String to be expanded. If no ~ is present, the output is the same as the |
|
266 | 266 | input. |
|
267 | 267 | |
|
268 | 268 | Returns |
|
269 | 269 | ------- |
|
270 | 270 | newpath : str |
|
271 | 271 | Result of ~ expansion in the input path. |
|
272 | 272 | tilde_expand : bool |
|
273 | 273 | Whether any expansion was performed or not. |
|
274 | 274 | tilde_val : str |
|
275 | 275 | The value that ~ was replaced with. |
|
276 | 276 | """ |
|
277 | 277 | # Default values |
|
278 | 278 | tilde_expand = False |
|
279 | 279 | tilde_val = '' |
|
280 | 280 | newpath = path |
|
281 | 281 | |
|
282 | 282 | if path.startswith('~'): |
|
283 | 283 | tilde_expand = True |
|
284 | 284 | rest = len(path)-1 |
|
285 | 285 | newpath = os.path.expanduser(path) |
|
286 | 286 | if rest: |
|
287 | 287 | tilde_val = newpath[:-rest] |
|
288 | 288 | else: |
|
289 | 289 | tilde_val = newpath |
|
290 | 290 | |
|
291 | 291 | return newpath, tilde_expand, tilde_val |
|
292 | 292 | |
|
293 | 293 | |
|
294 | 294 | def compress_user(path:str, tilde_expand:bool, tilde_val:str) -> str: |
|
295 | 295 | """Does the opposite of expand_user, with its outputs. |
|
296 | 296 | """ |
|
297 | 297 | if tilde_expand: |
|
298 | 298 | return path.replace(tilde_val, '~') |
|
299 | 299 | else: |
|
300 | 300 | return path |
|
301 | 301 | |
|
302 | 302 | |
|
303 | 303 | def completions_sorting_key(word): |
|
304 | 304 | """key for sorting completions |
|
305 | 305 | |
|
306 | 306 | This does several things: |
|
307 | 307 | |
|
308 | 308 | - Demote any completions starting with underscores to the end |
|
309 | 309 | - Insert any %magic and %%cellmagic completions in the alphabetical order |
|
310 | 310 | by their name |
|
311 | 311 | """ |
|
312 | 312 | prio1, prio2 = 0, 0 |
|
313 | 313 | |
|
314 | 314 | if word.startswith('__'): |
|
315 | 315 | prio1 = 2 |
|
316 | 316 | elif word.startswith('_'): |
|
317 | 317 | prio1 = 1 |
|
318 | 318 | |
|
319 | 319 | if word.endswith('='): |
|
320 | 320 | prio1 = -1 |
|
321 | 321 | |
|
322 | 322 | if word.startswith('%%'): |
|
323 | 323 | # If there's another % in there, this is something else, so leave it alone |
|
324 | 324 | if not "%" in word[2:]: |
|
325 | 325 | word = word[2:] |
|
326 | 326 | prio2 = 2 |
|
327 | 327 | elif word.startswith('%'): |
|
328 | 328 | if not "%" in word[1:]: |
|
329 | 329 | word = word[1:] |
|
330 | 330 | prio2 = 1 |
|
331 | 331 | |
|
332 | 332 | return prio1, word, prio2 |
|
333 | 333 | |
|
334 | 334 | |
|
335 | 335 | class _FakeJediCompletion: |
|
336 | 336 | """ |
|
337 | 337 | This is a workaround to communicate to the UI that Jedi has crashed and to |
|
338 | 338 | report a bug. Will be used only id :any:`IPCompleter.debug` is set to true. |
|
339 | 339 | |
|
340 | 340 | Added in IPython 6.0 so should likely be removed for 7.0 |
|
341 | 341 | |
|
342 | 342 | """ |
|
343 | 343 | |
|
344 | 344 | def __init__(self, name): |
|
345 | 345 | |
|
346 | 346 | self.name = name |
|
347 | 347 | self.complete = name |
|
348 | 348 | self.type = 'crashed' |
|
349 | 349 | self.name_with_symbols = name |
|
350 | 350 | self.signature = '' |
|
351 | 351 | self._origin = 'fake' |
|
352 | 352 | |
|
353 | 353 | def __repr__(self): |
|
354 | 354 | return '<Fake completion object jedi has crashed>' |
|
355 | 355 | |
|
356 | 356 | |
|
357 | 357 | class Completion: |
|
358 | 358 | """ |
|
359 | 359 | Completion object used and return by IPython completers. |
|
360 | 360 | |
|
361 | 361 | .. warning:: |
|
362 | 362 | |
|
363 | 363 | Unstable |
|
364 | 364 | |
|
365 | 365 | This function is unstable, API may change without warning. |
|
366 | 366 | It will also raise unless use in proper context manager. |
|
367 | 367 | |
|
368 | 368 | This act as a middle ground :any:`Completion` object between the |
|
369 | 369 | :any:`jedi.api.classes.Completion` object and the Prompt Toolkit completion |
|
370 | 370 | object. While Jedi need a lot of information about evaluator and how the |
|
371 | 371 | code should be ran/inspected, PromptToolkit (and other frontend) mostly |
|
372 | 372 | need user facing information. |
|
373 | 373 | |
|
374 | 374 | - Which range should be replaced replaced by what. |
|
375 | 375 | - Some metadata (like completion type), or meta information to displayed to |
|
376 | 376 | the use user. |
|
377 | 377 | |
|
378 | 378 | For debugging purpose we can also store the origin of the completion (``jedi``, |
|
379 | 379 | ``IPython.python_matches``, ``IPython.magics_matches``...). |
|
380 | 380 | """ |
|
381 | 381 | |
|
382 | 382 | __slots__ = ['start', 'end', 'text', 'type', 'signature', '_origin'] |
|
383 | 383 | |
|
384 | 384 | def __init__(self, start: int, end: int, text: str, *, type: str=None, _origin='', signature='') -> None: |
|
385 | 385 | warnings.warn("``Completion`` is a provisional API (as of IPython 6.0). " |
|
386 | 386 | "It may change without warnings. " |
|
387 | 387 | "Use in corresponding context manager.", |
|
388 | 388 | category=ProvisionalCompleterWarning, stacklevel=2) |
|
389 | 389 | |
|
390 | 390 | self.start = start |
|
391 | 391 | self.end = end |
|
392 | 392 | self.text = text |
|
393 | 393 | self.type = type |
|
394 | 394 | self.signature = signature |
|
395 | 395 | self._origin = _origin |
|
396 | 396 | |
|
397 | 397 | def __repr__(self): |
|
398 | 398 | return '<Completion start=%s end=%s text=%r type=%r, signature=%r,>' % \ |
|
399 | 399 | (self.start, self.end, self.text, self.type or '?', self.signature or '?') |
|
400 | 400 | |
|
401 | 401 | def __eq__(self, other)->Bool: |
|
402 | 402 | """ |
|
403 | 403 | Equality and hash do not hash the type (as some completer may not be |
|
404 | 404 | able to infer the type), but are use to (partially) de-duplicate |
|
405 | 405 | completion. |
|
406 | 406 | |
|
407 | 407 | Completely de-duplicating completion is a bit tricker that just |
|
408 | 408 | comparing as it depends on surrounding text, which Completions are not |
|
409 | 409 | aware of. |
|
410 | 410 | """ |
|
411 | 411 | return self.start == other.start and \ |
|
412 | 412 | self.end == other.end and \ |
|
413 | 413 | self.text == other.text |
|
414 | 414 | |
|
415 | 415 | def __hash__(self): |
|
416 | 416 | return hash((self.start, self.end, self.text)) |
|
417 | 417 | |
|
418 | 418 | |
|
419 | 419 | _IC = Iterable[Completion] |
|
420 | 420 | |
|
421 | 421 | |
|
422 | 422 | def _deduplicate_completions(text: str, completions: _IC)-> _IC: |
|
423 | 423 | """ |
|
424 | 424 | Deduplicate a set of completions. |
|
425 | 425 | |
|
426 | 426 | .. warning:: |
|
427 | 427 | |
|
428 | 428 | Unstable |
|
429 | 429 | |
|
430 | 430 | This function is unstable, API may change without warning. |
|
431 | 431 | |
|
432 | 432 | Parameters |
|
433 | 433 | ---------- |
|
434 | 434 | text: str |
|
435 | 435 | text that should be completed. |
|
436 | 436 | completions: Iterator[Completion] |
|
437 | 437 | iterator over the completions to deduplicate |
|
438 | 438 | |
|
439 | 439 | Yields |
|
440 | 440 | ------ |
|
441 | 441 | `Completions` objects |
|
442 | 442 | Completions coming from multiple sources, may be different but end up having |
|
443 | 443 | the same effect when applied to ``text``. If this is the case, this will |
|
444 | 444 | consider completions as equal and only emit the first encountered. |
|
445 | 445 | Not folded in `completions()` yet for debugging purpose, and to detect when |
|
446 | 446 | the IPython completer does return things that Jedi does not, but should be |
|
447 | 447 | at some point. |
|
448 | 448 | """ |
|
449 | 449 | completions = list(completions) |
|
450 | 450 | if not completions: |
|
451 | 451 | return |
|
452 | 452 | |
|
453 | 453 | new_start = min(c.start for c in completions) |
|
454 | 454 | new_end = max(c.end for c in completions) |
|
455 | 455 | |
|
456 | 456 | seen = set() |
|
457 | 457 | for c in completions: |
|
458 | 458 | new_text = text[new_start:c.start] + c.text + text[c.end:new_end] |
|
459 | 459 | if new_text not in seen: |
|
460 | 460 | yield c |
|
461 | 461 | seen.add(new_text) |
|
462 | 462 | |
|
463 | 463 | |
|
464 | 464 | def rectify_completions(text: str, completions: _IC, *, _debug=False)->_IC: |
|
465 | 465 | """ |
|
466 | 466 | Rectify a set of completions to all have the same ``start`` and ``end`` |
|
467 | 467 | |
|
468 | 468 | .. warning:: |
|
469 | 469 | |
|
470 | 470 | Unstable |
|
471 | 471 | |
|
472 | 472 | This function is unstable, API may change without warning. |
|
473 | 473 | It will also raise unless use in proper context manager. |
|
474 | 474 | |
|
475 | 475 | Parameters |
|
476 | 476 | ---------- |
|
477 | 477 | text: str |
|
478 | 478 | text that should be completed. |
|
479 | 479 | completions: Iterator[Completion] |
|
480 | 480 | iterator over the completions to rectify |
|
481 | 481 | |
|
482 | 482 | Notes |
|
483 | 483 | ----- |
|
484 | 484 | :any:`jedi.api.classes.Completion` s returned by Jedi may not have the same start and end, though |
|
485 | 485 | the Jupyter Protocol requires them to behave like so. This will readjust |
|
486 | 486 | the completion to have the same ``start`` and ``end`` by padding both |
|
487 | 487 | extremities with surrounding text. |
|
488 | 488 | |
|
489 | 489 | During stabilisation should support a ``_debug`` option to log which |
|
490 | 490 | completion are return by the IPython completer and not found in Jedi in |
|
491 | 491 | order to make upstream bug report. |
|
492 | 492 | """ |
|
493 | 493 | warnings.warn("`rectify_completions` is a provisional API (as of IPython 6.0). " |
|
494 | 494 | "It may change without warnings. " |
|
495 | 495 | "Use in corresponding context manager.", |
|
496 | 496 | category=ProvisionalCompleterWarning, stacklevel=2) |
|
497 | 497 | |
|
498 | 498 | completions = list(completions) |
|
499 | 499 | if not completions: |
|
500 | 500 | return |
|
501 | 501 | starts = (c.start for c in completions) |
|
502 | 502 | ends = (c.end for c in completions) |
|
503 | 503 | |
|
504 | 504 | new_start = min(starts) |
|
505 | 505 | new_end = max(ends) |
|
506 | 506 | |
|
507 | 507 | seen_jedi = set() |
|
508 | 508 | seen_python_matches = set() |
|
509 | 509 | for c in completions: |
|
510 | 510 | new_text = text[new_start:c.start] + c.text + text[c.end:new_end] |
|
511 | 511 | if c._origin == 'jedi': |
|
512 | 512 | seen_jedi.add(new_text) |
|
513 | 513 | elif c._origin == 'IPCompleter.python_matches': |
|
514 | 514 | seen_python_matches.add(new_text) |
|
515 | 515 | yield Completion(new_start, new_end, new_text, type=c.type, _origin=c._origin, signature=c.signature) |
|
516 | 516 | diff = seen_python_matches.difference(seen_jedi) |
|
517 | 517 | if diff and _debug: |
|
518 | 518 | print('IPython.python matches have extras:', diff) |
|
519 | 519 | |
|
520 | 520 | |
|
521 | 521 | if sys.platform == 'win32': |
|
522 | 522 | DELIMS = ' \t\n`!@#$^&*()=+[{]}|;\'",<>?' |
|
523 | 523 | else: |
|
524 | 524 | DELIMS = ' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?' |
|
525 | 525 | |
|
526 | 526 | GREEDY_DELIMS = ' =\r\n' |
|
527 | 527 | |
|
528 | 528 | |
|
529 | 529 | class CompletionSplitter(object): |
|
530 | 530 | """An object to split an input line in a manner similar to readline. |
|
531 | 531 | |
|
532 | 532 | By having our own implementation, we can expose readline-like completion in |
|
533 | 533 | a uniform manner to all frontends. This object only needs to be given the |
|
534 | 534 | line of text to be split and the cursor position on said line, and it |
|
535 | 535 | returns the 'word' to be completed on at the cursor after splitting the |
|
536 | 536 | entire line. |
|
537 | 537 | |
|
538 | 538 | What characters are used as splitting delimiters can be controlled by |
|
539 | 539 | setting the ``delims`` attribute (this is a property that internally |
|
540 | 540 | automatically builds the necessary regular expression)""" |
|
541 | 541 | |
|
542 | 542 | # Private interface |
|
543 | 543 | |
|
544 | 544 | # A string of delimiter characters. The default value makes sense for |
|
545 | 545 | # IPython's most typical usage patterns. |
|
546 | 546 | _delims = DELIMS |
|
547 | 547 | |
|
548 | 548 | # The expression (a normal string) to be compiled into a regular expression |
|
549 | 549 | # for actual splitting. We store it as an attribute mostly for ease of |
|
550 | 550 | # debugging, since this type of code can be so tricky to debug. |
|
551 | 551 | _delim_expr = None |
|
552 | 552 | |
|
553 | 553 | # The regular expression that does the actual splitting |
|
554 | 554 | _delim_re = None |
|
555 | 555 | |
|
556 | 556 | def __init__(self, delims=None): |
|
557 | 557 | delims = CompletionSplitter._delims if delims is None else delims |
|
558 | 558 | self.delims = delims |
|
559 | 559 | |
|
560 | 560 | @property |
|
561 | 561 | def delims(self): |
|
562 | 562 | """Return the string of delimiter characters.""" |
|
563 | 563 | return self._delims |
|
564 | 564 | |
|
565 | 565 | @delims.setter |
|
566 | 566 | def delims(self, delims): |
|
567 | 567 | """Set the delimiters for line splitting.""" |
|
568 | 568 | expr = '[' + ''.join('\\'+ c for c in delims) + ']' |
|
569 | 569 | self._delim_re = re.compile(expr) |
|
570 | 570 | self._delims = delims |
|
571 | 571 | self._delim_expr = expr |
|
572 | 572 | |
|
573 | 573 | def split_line(self, line, cursor_pos=None): |
|
574 | 574 | """Split a line of text with a cursor at the given position. |
|
575 | 575 | """ |
|
576 | 576 | l = line if cursor_pos is None else line[:cursor_pos] |
|
577 | 577 | return self._delim_re.split(l)[-1] |
|
578 | 578 | |
|
579 | 579 | |
|
580 | 580 | |
|
581 | 581 | class Completer(Configurable): |
|
582 | 582 | |
|
583 | 583 | greedy = Bool(False, |
|
584 | 584 | help="""Activate greedy completion |
|
585 | 585 | PENDING DEPRECTION. this is now mostly taken care of with Jedi. |
|
586 | 586 | |
|
587 | 587 | This will enable completion on elements of lists, results of function calls, etc., |
|
588 | 588 | but can be unsafe because the code is actually evaluated on TAB. |
|
589 | 589 | """ |
|
590 | 590 | ).tag(config=True) |
|
591 | 591 | |
|
592 | 592 | use_jedi = Bool(default_value=JEDI_INSTALLED, |
|
593 | 593 | help="Experimental: Use Jedi to generate autocompletions. " |
|
594 | 594 | "Default to True if jedi is installed.").tag(config=True) |
|
595 | 595 | |
|
596 | 596 | jedi_compute_type_timeout = Int(default_value=400, |
|
597 | 597 | help="""Experimental: restrict time (in milliseconds) during which Jedi can compute types. |
|
598 | 598 | Set to 0 to stop computing types. Non-zero value lower than 100ms may hurt |
|
599 | 599 | performance by preventing jedi to build its cache. |
|
600 | 600 | """).tag(config=True) |
|
601 | 601 | |
|
602 | 602 | debug = Bool(default_value=False, |
|
603 | 603 | help='Enable debug for the Completer. Mostly print extra ' |
|
604 | 604 | 'information for experimental jedi integration.')\ |
|
605 | 605 | .tag(config=True) |
|
606 | 606 | |
|
607 | 607 | backslash_combining_completions = Bool(True, |
|
608 | 608 | help="Enable unicode completions, e.g. \\alpha<tab> . " |
|
609 | 609 | "Includes completion of latex commands, unicode names, and expanding " |
|
610 | 610 | "unicode characters back to latex commands.").tag(config=True) |
|
611 | 611 | |
|
612 | 612 | |
|
613 | 613 | |
|
614 | 614 | def __init__(self, namespace=None, global_namespace=None, **kwargs): |
|
615 | 615 | """Create a new completer for the command line. |
|
616 | 616 | |
|
617 | 617 | Completer(namespace=ns, global_namespace=ns2) -> completer instance. |
|
618 | 618 | |
|
619 | 619 | If unspecified, the default namespace where completions are performed |
|
620 | 620 | is __main__ (technically, __main__.__dict__). Namespaces should be |
|
621 | 621 | given as dictionaries. |
|
622 | 622 | |
|
623 | 623 | An optional second namespace can be given. This allows the completer |
|
624 | 624 | to handle cases where both the local and global scopes need to be |
|
625 | 625 | distinguished. |
|
626 | 626 | """ |
|
627 | 627 | |
|
628 | 628 | # Don't bind to namespace quite yet, but flag whether the user wants a |
|
629 | 629 | # specific namespace or to use __main__.__dict__. This will allow us |
|
630 | 630 | # to bind to __main__.__dict__ at completion time, not now. |
|
631 | 631 | if namespace is None: |
|
632 | 632 | self.use_main_ns = True |
|
633 | 633 | else: |
|
634 | 634 | self.use_main_ns = False |
|
635 | 635 | self.namespace = namespace |
|
636 | 636 | |
|
637 | 637 | # The global namespace, if given, can be bound directly |
|
638 | 638 | if global_namespace is None: |
|
639 | 639 | self.global_namespace = {} |
|
640 | 640 | else: |
|
641 | 641 | self.global_namespace = global_namespace |
|
642 | 642 | |
|
643 | 643 | self.custom_matchers = [] |
|
644 | 644 | |
|
645 | 645 | super(Completer, self).__init__(**kwargs) |
|
646 | 646 | |
|
647 | 647 | def complete(self, text, state): |
|
648 | 648 | """Return the next possible completion for 'text'. |
|
649 | 649 | |
|
650 | 650 | This is called successively with state == 0, 1, 2, ... until it |
|
651 | 651 | returns None. The completion should begin with 'text'. |
|
652 | 652 | |
|
653 | 653 | """ |
|
654 | 654 | if self.use_main_ns: |
|
655 | 655 | self.namespace = __main__.__dict__ |
|
656 | 656 | |
|
657 | 657 | if state == 0: |
|
658 | 658 | if "." in text: |
|
659 | 659 | self.matches = self.attr_matches(text) |
|
660 | 660 | else: |
|
661 | 661 | self.matches = self.global_matches(text) |
|
662 | 662 | try: |
|
663 | 663 | return self.matches[state] |
|
664 | 664 | except IndexError: |
|
665 | 665 | return None |
|
666 | 666 | |
|
667 | 667 | def global_matches(self, text): |
|
668 | 668 | """Compute matches when text is a simple name. |
|
669 | 669 | |
|
670 | 670 | Return a list of all keywords, built-in functions and names currently |
|
671 | 671 | defined in self.namespace or self.global_namespace that match. |
|
672 | 672 | |
|
673 | 673 | """ |
|
674 | 674 | matches = [] |
|
675 | 675 | match_append = matches.append |
|
676 | 676 | n = len(text) |
|
677 | 677 | for lst in [keyword.kwlist, |
|
678 | 678 | builtin_mod.__dict__.keys(), |
|
679 | 679 | self.namespace.keys(), |
|
680 | 680 | self.global_namespace.keys()]: |
|
681 | 681 | for word in lst: |
|
682 | 682 | if word[:n] == text and word != "__builtins__": |
|
683 | 683 | match_append(word) |
|
684 | 684 | |
|
685 | 685 | snake_case_re = re.compile(r"[^_]+(_[^_]+)+?\Z") |
|
686 | 686 | for lst in [self.namespace.keys(), |
|
687 | 687 | self.global_namespace.keys()]: |
|
688 | 688 | shortened = {"_".join([sub[0] for sub in word.split('_')]) : word |
|
689 | 689 | for word in lst if snake_case_re.match(word)} |
|
690 | 690 | for word in shortened.keys(): |
|
691 | 691 | if word[:n] == text and word != "__builtins__": |
|
692 | 692 | match_append(shortened[word]) |
|
693 | 693 | return matches |
|
694 | 694 | |
|
695 | 695 | def attr_matches(self, text): |
|
696 | 696 | """Compute matches when text contains a dot. |
|
697 | 697 | |
|
698 | 698 | Assuming the text is of the form NAME.NAME....[NAME], and is |
|
699 | 699 | evaluatable in self.namespace or self.global_namespace, it will be |
|
700 | 700 | evaluated and its attributes (as revealed by dir()) are used as |
|
701 | 701 | possible completions. (For class instances, class members are |
|
702 | 702 | also considered.) |
|
703 | 703 | |
|
704 | 704 | WARNING: this can still invoke arbitrary C code, if an object |
|
705 | 705 | with a __getattr__ hook is evaluated. |
|
706 | 706 | |
|
707 | 707 | """ |
|
708 | 708 | |
|
709 | 709 | # Another option, seems to work great. Catches things like ''.<tab> |
|
710 | 710 | m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) |
|
711 | 711 | |
|
712 | 712 | if m: |
|
713 | 713 | expr, attr = m.group(1, 3) |
|
714 | 714 | elif self.greedy: |
|
715 | 715 | m2 = re.match(r"(.+)\.(\w*)$", self.line_buffer) |
|
716 | 716 | if not m2: |
|
717 | 717 | return [] |
|
718 | 718 | expr, attr = m2.group(1,2) |
|
719 | 719 | else: |
|
720 | 720 | return [] |
|
721 | 721 | |
|
722 | 722 | try: |
|
723 | 723 | obj = eval(expr, self.namespace) |
|
724 | 724 | except: |
|
725 | 725 | try: |
|
726 | 726 | obj = eval(expr, self.global_namespace) |
|
727 | 727 | except: |
|
728 | 728 | return [] |
|
729 | 729 | |
|
730 | 730 | if self.limit_to__all__ and hasattr(obj, '__all__'): |
|
731 | 731 | words = get__all__entries(obj) |
|
732 | 732 | else: |
|
733 | 733 | words = dir2(obj) |
|
734 | 734 | |
|
735 | 735 | try: |
|
736 | 736 | words = generics.complete_object(obj, words) |
|
737 | 737 | except TryNext: |
|
738 | 738 | pass |
|
739 | 739 | except AssertionError: |
|
740 | 740 | raise |
|
741 | 741 | except Exception: |
|
742 | 742 | # Silence errors from completion function |
|
743 | 743 | #raise # dbg |
|
744 | 744 | pass |
|
745 | 745 | # Build match list to return |
|
746 | 746 | n = len(attr) |
|
747 | 747 | return [u"%s.%s" % (expr, w) for w in words if w[:n] == attr ] |
|
748 | 748 | |
|
749 | 749 | |
|
750 | 750 | def get__all__entries(obj): |
|
751 | 751 | """returns the strings in the __all__ attribute""" |
|
752 | 752 | try: |
|
753 | 753 | words = getattr(obj, '__all__') |
|
754 | 754 | except: |
|
755 | 755 | return [] |
|
756 | 756 | |
|
757 | 757 | return [w for w in words if isinstance(w, str)] |
|
758 | 758 | |
|
759 | 759 | |
|
760 | 760 | def match_dict_keys(keys: List[Union[str, bytes, Tuple[Union[str, bytes]]]], prefix: str, delims: str, |
|
761 | 761 | extra_prefix: Optional[Tuple[str, bytes]]=None) -> Tuple[str, int, List[str]]: |
|
762 | 762 | """Used by dict_key_matches, matching the prefix to a list of keys |
|
763 | 763 | |
|
764 | 764 | Parameters |
|
765 | 765 | ---------- |
|
766 |
keys |
|
|
766 | keys | |
|
767 | 767 | list of keys in dictionary currently being completed. |
|
768 |
prefix |
|
|
768 | prefix | |
|
769 | 769 | Part of the text already typed by the user. E.g. `mydict[b'fo` |
|
770 |
delims |
|
|
770 | delims | |
|
771 | 771 | String of delimiters to consider when finding the current key. |
|
772 | 772 | extra_prefix: optional |
|
773 | 773 | Part of the text already typed in multi-key index cases. E.g. for |
|
774 | 774 | `mydict['foo', "bar", 'b`, this would be `('foo', 'bar')`. |
|
775 | 775 | |
|
776 | 776 | Returns |
|
777 | 777 | ------- |
|
778 | 778 | A tuple of three elements: ``quote``, ``token_start``, ``matched``, with |
|
779 | 779 | ``quote`` being the quote that need to be used to close current string. |
|
780 | 780 | ``token_start`` the position where the replacement should start occurring, |
|
781 | 781 | ``matches`` a list of replacement/completion |
|
782 | 782 | |
|
783 | 783 | """ |
|
784 | 784 | prefix_tuple = extra_prefix if extra_prefix else () |
|
785 | 785 | Nprefix = len(prefix_tuple) |
|
786 | 786 | def filter_prefix_tuple(key): |
|
787 | 787 | # Reject too short keys |
|
788 | 788 | if len(key) <= Nprefix: |
|
789 | 789 | return False |
|
790 | 790 | # Reject keys with non str/bytes in it |
|
791 | 791 | for k in key: |
|
792 | 792 | if not isinstance(k, (str, bytes)): |
|
793 | 793 | return False |
|
794 | 794 | # Reject keys that do not match the prefix |
|
795 | 795 | for k, pt in zip(key, prefix_tuple): |
|
796 | 796 | if k != pt: |
|
797 | 797 | return False |
|
798 | 798 | # All checks passed! |
|
799 | 799 | return True |
|
800 | 800 | |
|
801 | 801 | filtered_keys:List[Union[str,bytes]] = [] |
|
802 | 802 | def _add_to_filtered_keys(key): |
|
803 | 803 | if isinstance(key, (str, bytes)): |
|
804 | 804 | filtered_keys.append(key) |
|
805 | 805 | |
|
806 | 806 | for k in keys: |
|
807 | 807 | if isinstance(k, tuple): |
|
808 | 808 | if filter_prefix_tuple(k): |
|
809 | 809 | _add_to_filtered_keys(k[Nprefix]) |
|
810 | 810 | else: |
|
811 | 811 | _add_to_filtered_keys(k) |
|
812 | 812 | |
|
813 | 813 | if not prefix: |
|
814 | 814 | return '', 0, [repr(k) for k in filtered_keys] |
|
815 | 815 | quote_match = re.search('["\']', prefix) |
|
816 | 816 | assert quote_match is not None # silence mypy |
|
817 | 817 | quote = quote_match.group() |
|
818 | 818 | try: |
|
819 | 819 | prefix_str = eval(prefix + quote, {}) |
|
820 | 820 | except Exception: |
|
821 | 821 | return '', 0, [] |
|
822 | 822 | |
|
823 | 823 | pattern = '[^' + ''.join('\\' + c for c in delims) + ']*$' |
|
824 | 824 | token_match = re.search(pattern, prefix, re.UNICODE) |
|
825 | 825 | assert token_match is not None # silence mypy |
|
826 | 826 | token_start = token_match.start() |
|
827 | 827 | token_prefix = token_match.group() |
|
828 | 828 | |
|
829 | 829 | matched:List[str] = [] |
|
830 | 830 | for key in filtered_keys: |
|
831 | 831 | try: |
|
832 | 832 | if not key.startswith(prefix_str): |
|
833 | 833 | continue |
|
834 | 834 | except (AttributeError, TypeError, UnicodeError): |
|
835 | 835 | # Python 3+ TypeError on b'a'.startswith('a') or vice-versa |
|
836 | 836 | continue |
|
837 | 837 | |
|
838 | 838 | # reformat remainder of key to begin with prefix |
|
839 | 839 | rem = key[len(prefix_str):] |
|
840 | 840 | # force repr wrapped in ' |
|
841 | 841 | rem_repr = repr(rem + '"') if isinstance(rem, str) else repr(rem + b'"') |
|
842 | 842 | rem_repr = rem_repr[1 + rem_repr.index("'"):-2] |
|
843 | 843 | if quote == '"': |
|
844 | 844 | # The entered prefix is quoted with ", |
|
845 | 845 | # but the match is quoted with '. |
|
846 | 846 | # A contained " hence needs escaping for comparison: |
|
847 | 847 | rem_repr = rem_repr.replace('"', '\\"') |
|
848 | 848 | |
|
849 | 849 | # then reinsert prefix from start of token |
|
850 | 850 | matched.append('%s%s' % (token_prefix, rem_repr)) |
|
851 | 851 | return quote, token_start, matched |
|
852 | 852 | |
|
853 | 853 | |
|
854 | 854 | def cursor_to_position(text:str, line:int, column:int)->int: |
|
855 | 855 | """ |
|
856 | 856 | Convert the (line,column) position of the cursor in text to an offset in a |
|
857 | 857 | string. |
|
858 | 858 | |
|
859 | 859 | Parameters |
|
860 | 860 | ---------- |
|
861 | 861 | text : str |
|
862 | 862 | The text in which to calculate the cursor offset |
|
863 | 863 | line : int |
|
864 | 864 | Line of the cursor; 0-indexed |
|
865 | 865 | column : int |
|
866 | 866 | Column of the cursor 0-indexed |
|
867 | 867 | |
|
868 | 868 | Returns |
|
869 | 869 | ------- |
|
870 | 870 | Position of the cursor in ``text``, 0-indexed. |
|
871 | 871 | |
|
872 | 872 | See Also |
|
873 | 873 | -------- |
|
874 | 874 | position_to_cursor : reciprocal of this function |
|
875 | 875 | |
|
876 | 876 | """ |
|
877 | 877 | lines = text.split('\n') |
|
878 | 878 | assert line <= len(lines), '{} <= {}'.format(str(line), str(len(lines))) |
|
879 | 879 | |
|
880 | 880 | return sum(len(l) + 1 for l in lines[:line]) + column |
|
881 | 881 | |
|
882 | 882 | def position_to_cursor(text:str, offset:int)->Tuple[int, int]: |
|
883 | 883 | """ |
|
884 | 884 | Convert the position of the cursor in text (0 indexed) to a line |
|
885 | 885 | number(0-indexed) and a column number (0-indexed) pair |
|
886 | 886 | |
|
887 | 887 | Position should be a valid position in ``text``. |
|
888 | 888 | |
|
889 | 889 | Parameters |
|
890 | 890 | ---------- |
|
891 | 891 | text : str |
|
892 | 892 | The text in which to calculate the cursor offset |
|
893 | 893 | offset : int |
|
894 | 894 | Position of the cursor in ``text``, 0-indexed. |
|
895 | 895 | |
|
896 | 896 | Returns |
|
897 | 897 | ------- |
|
898 | 898 | (line, column) : (int, int) |
|
899 | 899 | Line of the cursor; 0-indexed, column of the cursor 0-indexed |
|
900 | 900 | |
|
901 | 901 | See Also |
|
902 | 902 | -------- |
|
903 | 903 | cursor_to_position : reciprocal of this function |
|
904 | 904 | |
|
905 | 905 | """ |
|
906 | 906 | |
|
907 | 907 | assert 0 <= offset <= len(text) , "0 <= %s <= %s" % (offset , len(text)) |
|
908 | 908 | |
|
909 | 909 | before = text[:offset] |
|
910 | 910 | blines = before.split('\n') # ! splitnes trim trailing \n |
|
911 | 911 | line = before.count('\n') |
|
912 | 912 | col = len(blines[-1]) |
|
913 | 913 | return line, col |
|
914 | 914 | |
|
915 | 915 | |
|
916 | 916 | def _safe_isinstance(obj, module, class_name): |
|
917 | 917 | """Checks if obj is an instance of module.class_name if loaded |
|
918 | 918 | """ |
|
919 | 919 | return (module in sys.modules and |
|
920 | 920 | isinstance(obj, getattr(import_module(module), class_name))) |
|
921 | 921 | |
|
922 | 922 | def back_unicode_name_matches(text:str) -> Tuple[str, Sequence[str]]: |
|
923 | 923 | """Match Unicode characters back to Unicode name |
|
924 | 924 | |
|
925 | 925 | This does ``☃`` -> ``\\snowman`` |
|
926 | 926 | |
|
927 | 927 | Note that snowman is not a valid python3 combining character but will be expanded. |
|
928 | 928 | Though it will not recombine back to the snowman character by the completion machinery. |
|
929 | 929 | |
|
930 | 930 | This will not either back-complete standard sequences like \\n, \\b ... |
|
931 | 931 | |
|
932 | 932 | Returns |
|
933 | 933 | ======= |
|
934 | 934 | |
|
935 | 935 | Return a tuple with two elements: |
|
936 | 936 | |
|
937 | 937 | - The Unicode character that was matched (preceded with a backslash), or |
|
938 | 938 | empty string, |
|
939 | 939 | - a sequence (of 1), name for the match Unicode character, preceded by |
|
940 | 940 | backslash, or empty if no match. |
|
941 | 941 | |
|
942 | 942 | """ |
|
943 | 943 | if len(text)<2: |
|
944 | 944 | return '', () |
|
945 | 945 | maybe_slash = text[-2] |
|
946 | 946 | if maybe_slash != '\\': |
|
947 | 947 | return '', () |
|
948 | 948 | |
|
949 | 949 | char = text[-1] |
|
950 | 950 | # no expand on quote for completion in strings. |
|
951 | 951 | # nor backcomplete standard ascii keys |
|
952 | 952 | if char in string.ascii_letters or char in ('"',"'"): |
|
953 | 953 | return '', () |
|
954 | 954 | try : |
|
955 | 955 | unic = unicodedata.name(char) |
|
956 | 956 | return '\\'+char,('\\'+unic,) |
|
957 | 957 | except KeyError: |
|
958 | 958 | pass |
|
959 | 959 | return '', () |
|
960 | 960 | |
|
961 | 961 | def back_latex_name_matches(text:str) -> Tuple[str, Sequence[str]] : |
|
962 | 962 | """Match latex characters back to unicode name |
|
963 | 963 | |
|
964 | 964 | This does ``\\ℵ`` -> ``\\aleph`` |
|
965 | 965 | |
|
966 | 966 | """ |
|
967 | 967 | if len(text)<2: |
|
968 | 968 | return '', () |
|
969 | 969 | maybe_slash = text[-2] |
|
970 | 970 | if maybe_slash != '\\': |
|
971 | 971 | return '', () |
|
972 | 972 | |
|
973 | 973 | |
|
974 | 974 | char = text[-1] |
|
975 | 975 | # no expand on quote for completion in strings. |
|
976 | 976 | # nor backcomplete standard ascii keys |
|
977 | 977 | if char in string.ascii_letters or char in ('"',"'"): |
|
978 | 978 | return '', () |
|
979 | 979 | try : |
|
980 | 980 | latex = reverse_latex_symbol[char] |
|
981 | 981 | # '\\' replace the \ as well |
|
982 | 982 | return '\\'+char,[latex] |
|
983 | 983 | except KeyError: |
|
984 | 984 | pass |
|
985 | 985 | return '', () |
|
986 | 986 | |
|
987 | 987 | |
|
988 | 988 | def _formatparamchildren(parameter) -> str: |
|
989 | 989 | """ |
|
990 | 990 | Get parameter name and value from Jedi Private API |
|
991 | 991 | |
|
992 | 992 | Jedi does not expose a simple way to get `param=value` from its API. |
|
993 | 993 | |
|
994 | 994 | Parameters |
|
995 | 995 | ---------- |
|
996 |
parameter |
|
|
996 | parameter | |
|
997 | 997 | Jedi's function `Param` |
|
998 | 998 | |
|
999 | 999 | Returns |
|
1000 | 1000 | ------- |
|
1001 | 1001 | A string like 'a', 'b=1', '*args', '**kwargs' |
|
1002 | 1002 | |
|
1003 | 1003 | """ |
|
1004 | 1004 | description = parameter.description |
|
1005 | 1005 | if not description.startswith('param '): |
|
1006 | 1006 | raise ValueError('Jedi function parameter description have change format.' |
|
1007 | 1007 | 'Expected "param ...", found %r".' % description) |
|
1008 | 1008 | return description[6:] |
|
1009 | 1009 | |
|
1010 | 1010 | def _make_signature(completion)-> str: |
|
1011 | 1011 | """ |
|
1012 | 1012 | Make the signature from a jedi completion |
|
1013 | 1013 | |
|
1014 | 1014 | Parameters |
|
1015 | 1015 | ---------- |
|
1016 | 1016 | completion: jedi.Completion |
|
1017 | 1017 | object does not complete a function type |
|
1018 | 1018 | |
|
1019 | 1019 | Returns |
|
1020 | 1020 | ------- |
|
1021 | 1021 | a string consisting of the function signature, with the parenthesis but |
|
1022 | 1022 | without the function name. example: |
|
1023 | 1023 | `(a, *args, b=1, **kwargs)` |
|
1024 | 1024 | |
|
1025 | 1025 | """ |
|
1026 | 1026 | |
|
1027 | 1027 | # it looks like this might work on jedi 0.17 |
|
1028 | 1028 | if hasattr(completion, 'get_signatures'): |
|
1029 | 1029 | signatures = completion.get_signatures() |
|
1030 | 1030 | if not signatures: |
|
1031 | 1031 | return '(?)' |
|
1032 | 1032 | |
|
1033 | 1033 | c0 = completion.get_signatures()[0] |
|
1034 | 1034 | return '('+c0.to_string().split('(', maxsplit=1)[1] |
|
1035 | 1035 | |
|
1036 | 1036 | return '(%s)'% ', '.join([f for f in (_formatparamchildren(p) for signature in completion.get_signatures() |
|
1037 | 1037 | for p in signature.defined_names()) if f]) |
|
1038 | 1038 | |
|
1039 | 1039 | |
|
1040 | 1040 | class _CompleteResult(NamedTuple): |
|
1041 | 1041 | matched_text : str |
|
1042 | 1042 | matches: Sequence[str] |
|
1043 | 1043 | matches_origin: Sequence[str] |
|
1044 | 1044 | jedi_matches: Any |
|
1045 | 1045 | |
|
1046 | 1046 | |
|
1047 | 1047 | class IPCompleter(Completer): |
|
1048 | 1048 | """Extension of the completer class with IPython-specific features""" |
|
1049 | 1049 | |
|
1050 | 1050 | __dict_key_regexps: Optional[Dict[bool,Pattern]] = None |
|
1051 | 1051 | |
|
1052 | 1052 | @observe('greedy') |
|
1053 | 1053 | def _greedy_changed(self, change): |
|
1054 | 1054 | """update the splitter and readline delims when greedy is changed""" |
|
1055 | 1055 | if change['new']: |
|
1056 | 1056 | self.splitter.delims = GREEDY_DELIMS |
|
1057 | 1057 | else: |
|
1058 | 1058 | self.splitter.delims = DELIMS |
|
1059 | 1059 | |
|
1060 | 1060 | dict_keys_only = Bool(False, |
|
1061 | 1061 | help="""Whether to show dict key matches only""") |
|
1062 | 1062 | |
|
1063 | 1063 | merge_completions = Bool(True, |
|
1064 | 1064 | help="""Whether to merge completion results into a single list |
|
1065 | 1065 | |
|
1066 | 1066 | If False, only the completion results from the first non-empty |
|
1067 | 1067 | completer will be returned. |
|
1068 | 1068 | """ |
|
1069 | 1069 | ).tag(config=True) |
|
1070 | 1070 | omit__names = Enum((0,1,2), default_value=2, |
|
1071 | 1071 | help="""Instruct the completer to omit private method names |
|
1072 | 1072 | |
|
1073 | 1073 | Specifically, when completing on ``object.<tab>``. |
|
1074 | 1074 | |
|
1075 | 1075 | When 2 [default]: all names that start with '_' will be excluded. |
|
1076 | 1076 | |
|
1077 | 1077 | When 1: all 'magic' names (``__foo__``) will be excluded. |
|
1078 | 1078 | |
|
1079 | 1079 | When 0: nothing will be excluded. |
|
1080 | 1080 | """ |
|
1081 | 1081 | ).tag(config=True) |
|
1082 | 1082 | limit_to__all__ = Bool(False, |
|
1083 | 1083 | help=""" |
|
1084 | 1084 | DEPRECATED as of version 5.0. |
|
1085 | 1085 | |
|
1086 | 1086 | Instruct the completer to use __all__ for the completion |
|
1087 | 1087 | |
|
1088 | 1088 | Specifically, when completing on ``object.<tab>``. |
|
1089 | 1089 | |
|
1090 | 1090 | When True: only those names in obj.__all__ will be included. |
|
1091 | 1091 | |
|
1092 | 1092 | When False [default]: the __all__ attribute is ignored |
|
1093 | 1093 | """, |
|
1094 | 1094 | ).tag(config=True) |
|
1095 | 1095 | |
|
1096 | 1096 | profile_completions = Bool( |
|
1097 | 1097 | default_value=False, |
|
1098 | 1098 | help="If True, emit profiling data for completion subsystem using cProfile." |
|
1099 | 1099 | ).tag(config=True) |
|
1100 | 1100 | |
|
1101 | 1101 | profiler_output_dir = Unicode( |
|
1102 | 1102 | default_value=".completion_profiles", |
|
1103 | 1103 | help="Template for path at which to output profile data for completions." |
|
1104 | 1104 | ).tag(config=True) |
|
1105 | 1105 | |
|
1106 | 1106 | @observe('limit_to__all__') |
|
1107 | 1107 | def _limit_to_all_changed(self, change): |
|
1108 | 1108 | warnings.warn('`IPython.core.IPCompleter.limit_to__all__` configuration ' |
|
1109 | 1109 | 'value has been deprecated since IPython 5.0, will be made to have ' |
|
1110 | 1110 | 'no effects and then removed in future version of IPython.', |
|
1111 | 1111 | UserWarning) |
|
1112 | 1112 | |
|
1113 | 1113 | def __init__(self, shell=None, namespace=None, global_namespace=None, |
|
1114 | 1114 | use_readline=_deprecation_readline_sentinel, config=None, **kwargs): |
|
1115 | 1115 | """IPCompleter() -> completer |
|
1116 | 1116 | |
|
1117 | 1117 | Return a completer object. |
|
1118 | 1118 | |
|
1119 | 1119 | Parameters |
|
1120 | 1120 | ---------- |
|
1121 | 1121 | shell |
|
1122 | 1122 | a pointer to the ipython shell itself. This is needed |
|
1123 | 1123 | because this completer knows about magic functions, and those can |
|
1124 | 1124 | only be accessed via the ipython instance. |
|
1125 | 1125 | namespace : dict, optional |
|
1126 | 1126 | an optional dict where completions are performed. |
|
1127 | 1127 | global_namespace : dict, optional |
|
1128 | 1128 | secondary optional dict for completions, to |
|
1129 | 1129 | handle cases (such as IPython embedded inside functions) where |
|
1130 | 1130 | both Python scopes are visible. |
|
1131 | 1131 | use_readline : bool, optional |
|
1132 | 1132 | DEPRECATED, ignored since IPython 6.0, will have no effects |
|
1133 | 1133 | """ |
|
1134 | 1134 | |
|
1135 | 1135 | self.magic_escape = ESC_MAGIC |
|
1136 | 1136 | self.splitter = CompletionSplitter() |
|
1137 | 1137 | |
|
1138 | 1138 | if use_readline is not _deprecation_readline_sentinel: |
|
1139 | 1139 | warnings.warn('The `use_readline` parameter is deprecated and ignored since IPython 6.0.', |
|
1140 | 1140 | DeprecationWarning, stacklevel=2) |
|
1141 | 1141 | |
|
1142 | 1142 | # _greedy_changed() depends on splitter and readline being defined: |
|
1143 | 1143 | Completer.__init__(self, namespace=namespace, global_namespace=global_namespace, |
|
1144 | 1144 | config=config, **kwargs) |
|
1145 | 1145 | |
|
1146 | 1146 | # List where completion matches will be stored |
|
1147 | 1147 | self.matches = [] |
|
1148 | 1148 | self.shell = shell |
|
1149 | 1149 | # Regexp to split filenames with spaces in them |
|
1150 | 1150 | self.space_name_re = re.compile(r'([^\\] )') |
|
1151 | 1151 | # Hold a local ref. to glob.glob for speed |
|
1152 | 1152 | self.glob = glob.glob |
|
1153 | 1153 | |
|
1154 | 1154 | # Determine if we are running on 'dumb' terminals, like (X)Emacs |
|
1155 | 1155 | # buffers, to avoid completion problems. |
|
1156 | 1156 | term = os.environ.get('TERM','xterm') |
|
1157 | 1157 | self.dumb_terminal = term in ['dumb','emacs'] |
|
1158 | 1158 | |
|
1159 | 1159 | # Special handling of backslashes needed in win32 platforms |
|
1160 | 1160 | if sys.platform == "win32": |
|
1161 | 1161 | self.clean_glob = self._clean_glob_win32 |
|
1162 | 1162 | else: |
|
1163 | 1163 | self.clean_glob = self._clean_glob |
|
1164 | 1164 | |
|
1165 | 1165 | #regexp to parse docstring for function signature |
|
1166 | 1166 | self.docstring_sig_re = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') |
|
1167 | 1167 | self.docstring_kwd_re = re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') |
|
1168 | 1168 | #use this if positional argument name is also needed |
|
1169 | 1169 | #= re.compile(r'[\s|\[]*(\w+)(?:\s*=?\s*.*)') |
|
1170 | 1170 | |
|
1171 | 1171 | self.magic_arg_matchers = [ |
|
1172 | 1172 | self.magic_config_matches, |
|
1173 | 1173 | self.magic_color_matches, |
|
1174 | 1174 | ] |
|
1175 | 1175 | |
|
1176 | 1176 | # This is set externally by InteractiveShell |
|
1177 | 1177 | self.custom_completers = None |
|
1178 | 1178 | |
|
1179 | 1179 | # This is a list of names of unicode characters that can be completed |
|
1180 | 1180 | # into their corresponding unicode value. The list is large, so we |
|
1181 | 1181 | # laziliy initialize it on first use. Consuming code should access this |
|
1182 | 1182 | # attribute through the `@unicode_names` property. |
|
1183 | 1183 | self._unicode_names = None |
|
1184 | 1184 | |
|
1185 | 1185 | @property |
|
1186 | 1186 | def matchers(self) -> List[Any]: |
|
1187 | 1187 | """All active matcher routines for completion""" |
|
1188 | 1188 | if self.dict_keys_only: |
|
1189 | 1189 | return [self.dict_key_matches] |
|
1190 | 1190 | |
|
1191 | 1191 | if self.use_jedi: |
|
1192 | 1192 | return [ |
|
1193 | 1193 | *self.custom_matchers, |
|
1194 | 1194 | self.file_matches, |
|
1195 | 1195 | self.magic_matches, |
|
1196 | 1196 | self.dict_key_matches, |
|
1197 | 1197 | ] |
|
1198 | 1198 | else: |
|
1199 | 1199 | return [ |
|
1200 | 1200 | *self.custom_matchers, |
|
1201 | 1201 | self.python_matches, |
|
1202 | 1202 | self.file_matches, |
|
1203 | 1203 | self.magic_matches, |
|
1204 | 1204 | self.python_func_kw_matches, |
|
1205 | 1205 | self.dict_key_matches, |
|
1206 | 1206 | ] |
|
1207 | 1207 | |
|
1208 | 1208 | def all_completions(self, text:str) -> List[str]: |
|
1209 | 1209 | """ |
|
1210 | 1210 | Wrapper around the completion methods for the benefit of emacs. |
|
1211 | 1211 | """ |
|
1212 | 1212 | prefix = text.rpartition('.')[0] |
|
1213 | 1213 | with provisionalcompleter(): |
|
1214 | 1214 | return ['.'.join([prefix, c.text]) if prefix and self.use_jedi else c.text |
|
1215 | 1215 | for c in self.completions(text, len(text))] |
|
1216 | 1216 | |
|
1217 | 1217 | return self.complete(text)[1] |
|
1218 | 1218 | |
|
1219 | 1219 | def _clean_glob(self, text:str): |
|
1220 | 1220 | return self.glob("%s*" % text) |
|
1221 | 1221 | |
|
1222 | 1222 | def _clean_glob_win32(self, text:str): |
|
1223 | 1223 | return [f.replace("\\","/") |
|
1224 | 1224 | for f in self.glob("%s*" % text)] |
|
1225 | 1225 | |
|
1226 | 1226 | def file_matches(self, text:str)->List[str]: |
|
1227 | 1227 | """Match filenames, expanding ~USER type strings. |
|
1228 | 1228 | |
|
1229 | 1229 | Most of the seemingly convoluted logic in this completer is an |
|
1230 | 1230 | attempt to handle filenames with spaces in them. And yet it's not |
|
1231 | 1231 | quite perfect, because Python's readline doesn't expose all of the |
|
1232 | 1232 | GNU readline details needed for this to be done correctly. |
|
1233 | 1233 | |
|
1234 | 1234 | For a filename with a space in it, the printed completions will be |
|
1235 | 1235 | only the parts after what's already been typed (instead of the |
|
1236 | 1236 | full completions, as is normally done). I don't think with the |
|
1237 | 1237 | current (as of Python 2.3) Python readline it's possible to do |
|
1238 | 1238 | better.""" |
|
1239 | 1239 | |
|
1240 | 1240 | # chars that require escaping with backslash - i.e. chars |
|
1241 | 1241 | # that readline treats incorrectly as delimiters, but we |
|
1242 | 1242 | # don't want to treat as delimiters in filename matching |
|
1243 | 1243 | # when escaped with backslash |
|
1244 | 1244 | if text.startswith('!'): |
|
1245 | 1245 | text = text[1:] |
|
1246 | 1246 | text_prefix = u'!' |
|
1247 | 1247 | else: |
|
1248 | 1248 | text_prefix = u'' |
|
1249 | 1249 | |
|
1250 | 1250 | text_until_cursor = self.text_until_cursor |
|
1251 | 1251 | # track strings with open quotes |
|
1252 | 1252 | open_quotes = has_open_quotes(text_until_cursor) |
|
1253 | 1253 | |
|
1254 | 1254 | if '(' in text_until_cursor or '[' in text_until_cursor: |
|
1255 | 1255 | lsplit = text |
|
1256 | 1256 | else: |
|
1257 | 1257 | try: |
|
1258 | 1258 | # arg_split ~ shlex.split, but with unicode bugs fixed by us |
|
1259 | 1259 | lsplit = arg_split(text_until_cursor)[-1] |
|
1260 | 1260 | except ValueError: |
|
1261 | 1261 | # typically an unmatched ", or backslash without escaped char. |
|
1262 | 1262 | if open_quotes: |
|
1263 | 1263 | lsplit = text_until_cursor.split(open_quotes)[-1] |
|
1264 | 1264 | else: |
|
1265 | 1265 | return [] |
|
1266 | 1266 | except IndexError: |
|
1267 | 1267 | # tab pressed on empty line |
|
1268 | 1268 | lsplit = "" |
|
1269 | 1269 | |
|
1270 | 1270 | if not open_quotes and lsplit != protect_filename(lsplit): |
|
1271 | 1271 | # if protectables are found, do matching on the whole escaped name |
|
1272 | 1272 | has_protectables = True |
|
1273 | 1273 | text0,text = text,lsplit |
|
1274 | 1274 | else: |
|
1275 | 1275 | has_protectables = False |
|
1276 | 1276 | text = os.path.expanduser(text) |
|
1277 | 1277 | |
|
1278 | 1278 | if text == "": |
|
1279 | 1279 | return [text_prefix + protect_filename(f) for f in self.glob("*")] |
|
1280 | 1280 | |
|
1281 | 1281 | # Compute the matches from the filesystem |
|
1282 | 1282 | if sys.platform == 'win32': |
|
1283 | 1283 | m0 = self.clean_glob(text) |
|
1284 | 1284 | else: |
|
1285 | 1285 | m0 = self.clean_glob(text.replace('\\', '')) |
|
1286 | 1286 | |
|
1287 | 1287 | if has_protectables: |
|
1288 | 1288 | # If we had protectables, we need to revert our changes to the |
|
1289 | 1289 | # beginning of filename so that we don't double-write the part |
|
1290 | 1290 | # of the filename we have so far |
|
1291 | 1291 | len_lsplit = len(lsplit) |
|
1292 | 1292 | matches = [text_prefix + text0 + |
|
1293 | 1293 | protect_filename(f[len_lsplit:]) for f in m0] |
|
1294 | 1294 | else: |
|
1295 | 1295 | if open_quotes: |
|
1296 | 1296 | # if we have a string with an open quote, we don't need to |
|
1297 | 1297 | # protect the names beyond the quote (and we _shouldn't_, as |
|
1298 | 1298 | # it would cause bugs when the filesystem call is made). |
|
1299 | 1299 | matches = m0 if sys.platform == "win32" else\ |
|
1300 | 1300 | [protect_filename(f, open_quotes) for f in m0] |
|
1301 | 1301 | else: |
|
1302 | 1302 | matches = [text_prefix + |
|
1303 | 1303 | protect_filename(f) for f in m0] |
|
1304 | 1304 | |
|
1305 | 1305 | # Mark directories in input list by appending '/' to their names. |
|
1306 | 1306 | return [x+'/' if os.path.isdir(x) else x for x in matches] |
|
1307 | 1307 | |
|
1308 | 1308 | def magic_matches(self, text:str): |
|
1309 | 1309 | """Match magics""" |
|
1310 | 1310 | # Get all shell magics now rather than statically, so magics loaded at |
|
1311 | 1311 | # runtime show up too. |
|
1312 | 1312 | lsm = self.shell.magics_manager.lsmagic() |
|
1313 | 1313 | line_magics = lsm['line'] |
|
1314 | 1314 | cell_magics = lsm['cell'] |
|
1315 | 1315 | pre = self.magic_escape |
|
1316 | 1316 | pre2 = pre+pre |
|
1317 | 1317 | |
|
1318 | 1318 | explicit_magic = text.startswith(pre) |
|
1319 | 1319 | |
|
1320 | 1320 | # Completion logic: |
|
1321 | 1321 | # - user gives %%: only do cell magics |
|
1322 | 1322 | # - user gives %: do both line and cell magics |
|
1323 | 1323 | # - no prefix: do both |
|
1324 | 1324 | # In other words, line magics are skipped if the user gives %% explicitly |
|
1325 | 1325 | # |
|
1326 | 1326 | # We also exclude magics that match any currently visible names: |
|
1327 | 1327 | # https://github.com/ipython/ipython/issues/4877, unless the user has |
|
1328 | 1328 | # typed a %: |
|
1329 | 1329 | # https://github.com/ipython/ipython/issues/10754 |
|
1330 | 1330 | bare_text = text.lstrip(pre) |
|
1331 | 1331 | global_matches = self.global_matches(bare_text) |
|
1332 | 1332 | if not explicit_magic: |
|
1333 | 1333 | def matches(magic): |
|
1334 | 1334 | """ |
|
1335 | 1335 | Filter magics, in particular remove magics that match |
|
1336 | 1336 | a name present in global namespace. |
|
1337 | 1337 | """ |
|
1338 | 1338 | return ( magic.startswith(bare_text) and |
|
1339 | 1339 | magic not in global_matches ) |
|
1340 | 1340 | else: |
|
1341 | 1341 | def matches(magic): |
|
1342 | 1342 | return magic.startswith(bare_text) |
|
1343 | 1343 | |
|
1344 | 1344 | comp = [ pre2+m for m in cell_magics if matches(m)] |
|
1345 | 1345 | if not text.startswith(pre2): |
|
1346 | 1346 | comp += [ pre+m for m in line_magics if matches(m)] |
|
1347 | 1347 | |
|
1348 | 1348 | return comp |
|
1349 | 1349 | |
|
1350 | 1350 | def magic_config_matches(self, text:str) -> List[str]: |
|
1351 | 1351 | """ Match class names and attributes for %config magic """ |
|
1352 | 1352 | texts = text.strip().split() |
|
1353 | 1353 | |
|
1354 | 1354 | if len(texts) > 0 and (texts[0] == 'config' or texts[0] == '%config'): |
|
1355 | 1355 | # get all configuration classes |
|
1356 | 1356 | classes = sorted(set([ c for c in self.shell.configurables |
|
1357 | 1357 | if c.__class__.class_traits(config=True) |
|
1358 | 1358 | ]), key=lambda x: x.__class__.__name__) |
|
1359 | 1359 | classnames = [ c.__class__.__name__ for c in classes ] |
|
1360 | 1360 | |
|
1361 | 1361 | # return all classnames if config or %config is given |
|
1362 | 1362 | if len(texts) == 1: |
|
1363 | 1363 | return classnames |
|
1364 | 1364 | |
|
1365 | 1365 | # match classname |
|
1366 | 1366 | classname_texts = texts[1].split('.') |
|
1367 | 1367 | classname = classname_texts[0] |
|
1368 | 1368 | classname_matches = [ c for c in classnames |
|
1369 | 1369 | if c.startswith(classname) ] |
|
1370 | 1370 | |
|
1371 | 1371 | # return matched classes or the matched class with attributes |
|
1372 | 1372 | if texts[1].find('.') < 0: |
|
1373 | 1373 | return classname_matches |
|
1374 | 1374 | elif len(classname_matches) == 1 and \ |
|
1375 | 1375 | classname_matches[0] == classname: |
|
1376 | 1376 | cls = classes[classnames.index(classname)].__class__ |
|
1377 | 1377 | help = cls.class_get_help() |
|
1378 | 1378 | # strip leading '--' from cl-args: |
|
1379 | 1379 | help = re.sub(re.compile(r'^--', re.MULTILINE), '', help) |
|
1380 | 1380 | return [ attr.split('=')[0] |
|
1381 | 1381 | for attr in help.strip().splitlines() |
|
1382 | 1382 | if attr.startswith(texts[1]) ] |
|
1383 | 1383 | return [] |
|
1384 | 1384 | |
|
1385 | 1385 | def magic_color_matches(self, text:str) -> List[str] : |
|
1386 | 1386 | """ Match color schemes for %colors magic""" |
|
1387 | 1387 | texts = text.split() |
|
1388 | 1388 | if text.endswith(' '): |
|
1389 | 1389 | # .split() strips off the trailing whitespace. Add '' back |
|
1390 | 1390 | # so that: '%colors ' -> ['%colors', ''] |
|
1391 | 1391 | texts.append('') |
|
1392 | 1392 | |
|
1393 | 1393 | if len(texts) == 2 and (texts[0] == 'colors' or texts[0] == '%colors'): |
|
1394 | 1394 | prefix = texts[1] |
|
1395 | 1395 | return [ color for color in InspectColors.keys() |
|
1396 | 1396 | if color.startswith(prefix) ] |
|
1397 | 1397 | return [] |
|
1398 | 1398 | |
|
1399 | 1399 | def _jedi_matches(self, cursor_column:int, cursor_line:int, text:str) -> Iterable[Any]: |
|
1400 | 1400 | """ |
|
1401 | 1401 | Return a list of :any:`jedi.api.Completions` object from a ``text`` and |
|
1402 | 1402 | cursor position. |
|
1403 | 1403 | |
|
1404 | 1404 | Parameters |
|
1405 | 1405 | ---------- |
|
1406 | 1406 | cursor_column : int |
|
1407 | 1407 | column position of the cursor in ``text``, 0-indexed. |
|
1408 | 1408 | cursor_line : int |
|
1409 | 1409 | line position of the cursor in ``text``, 0-indexed |
|
1410 | 1410 | text : str |
|
1411 | 1411 | text to complete |
|
1412 | 1412 | |
|
1413 | 1413 | Notes |
|
1414 | 1414 | ----- |
|
1415 | 1415 | If ``IPCompleter.debug`` is ``True`` may return a :any:`_FakeJediCompletion` |
|
1416 | 1416 | object containing a string with the Jedi debug information attached. |
|
1417 | 1417 | """ |
|
1418 | 1418 | namespaces = [self.namespace] |
|
1419 | 1419 | if self.global_namespace is not None: |
|
1420 | 1420 | namespaces.append(self.global_namespace) |
|
1421 | 1421 | |
|
1422 | 1422 | completion_filter = lambda x:x |
|
1423 | 1423 | offset = cursor_to_position(text, cursor_line, cursor_column) |
|
1424 | 1424 | # filter output if we are completing for object members |
|
1425 | 1425 | if offset: |
|
1426 | 1426 | pre = text[offset-1] |
|
1427 | 1427 | if pre == '.': |
|
1428 | 1428 | if self.omit__names == 2: |
|
1429 | 1429 | completion_filter = lambda c:not c.name.startswith('_') |
|
1430 | 1430 | elif self.omit__names == 1: |
|
1431 | 1431 | completion_filter = lambda c:not (c.name.startswith('__') and c.name.endswith('__')) |
|
1432 | 1432 | elif self.omit__names == 0: |
|
1433 | 1433 | completion_filter = lambda x:x |
|
1434 | 1434 | else: |
|
1435 | 1435 | raise ValueError("Don't understand self.omit__names == {}".format(self.omit__names)) |
|
1436 | 1436 | |
|
1437 | 1437 | interpreter = jedi.Interpreter(text[:offset], namespaces) |
|
1438 | 1438 | try_jedi = True |
|
1439 | 1439 | |
|
1440 | 1440 | try: |
|
1441 | 1441 | # find the first token in the current tree -- if it is a ' or " then we are in a string |
|
1442 | 1442 | completing_string = False |
|
1443 | 1443 | try: |
|
1444 | 1444 | first_child = next(c for c in interpreter._get_module().tree_node.children if hasattr(c, 'value')) |
|
1445 | 1445 | except StopIteration: |
|
1446 | 1446 | pass |
|
1447 | 1447 | else: |
|
1448 | 1448 | # note the value may be ', ", or it may also be ''' or """, or |
|
1449 | 1449 | # in some cases, """what/you/typed..., but all of these are |
|
1450 | 1450 | # strings. |
|
1451 | 1451 | completing_string = len(first_child.value) > 0 and first_child.value[0] in {"'", '"'} |
|
1452 | 1452 | |
|
1453 | 1453 | # if we are in a string jedi is likely not the right candidate for |
|
1454 | 1454 | # now. Skip it. |
|
1455 | 1455 | try_jedi = not completing_string |
|
1456 | 1456 | except Exception as e: |
|
1457 | 1457 | # many of things can go wrong, we are using private API just don't crash. |
|
1458 | 1458 | if self.debug: |
|
1459 | 1459 | print("Error detecting if completing a non-finished string :", e, '|') |
|
1460 | 1460 | |
|
1461 | 1461 | if not try_jedi: |
|
1462 | 1462 | return [] |
|
1463 | 1463 | try: |
|
1464 | 1464 | return filter(completion_filter, interpreter.complete(column=cursor_column, line=cursor_line + 1)) |
|
1465 | 1465 | except Exception as e: |
|
1466 | 1466 | if self.debug: |
|
1467 | 1467 | return [_FakeJediCompletion('Oops Jedi has crashed, please report a bug with the following:\n"""\n%s\ns"""' % (e))] |
|
1468 | 1468 | else: |
|
1469 | 1469 | return [] |
|
1470 | 1470 | |
|
1471 | 1471 | def python_matches(self, text:str)->List[str]: |
|
1472 | 1472 | """Match attributes or global python names""" |
|
1473 | 1473 | if "." in text: |
|
1474 | 1474 | try: |
|
1475 | 1475 | matches = self.attr_matches(text) |
|
1476 | 1476 | if text.endswith('.') and self.omit__names: |
|
1477 | 1477 | if self.omit__names == 1: |
|
1478 | 1478 | # true if txt is _not_ a __ name, false otherwise: |
|
1479 | 1479 | no__name = (lambda txt: |
|
1480 | 1480 | re.match(r'.*\.__.*?__',txt) is None) |
|
1481 | 1481 | else: |
|
1482 | 1482 | # true if txt is _not_ a _ name, false otherwise: |
|
1483 | 1483 | no__name = (lambda txt: |
|
1484 | 1484 | re.match(r'\._.*?',txt[txt.rindex('.'):]) is None) |
|
1485 | 1485 | matches = filter(no__name, matches) |
|
1486 | 1486 | except NameError: |
|
1487 | 1487 | # catches <undefined attributes>.<tab> |
|
1488 | 1488 | matches = [] |
|
1489 | 1489 | else: |
|
1490 | 1490 | matches = self.global_matches(text) |
|
1491 | 1491 | return matches |
|
1492 | 1492 | |
|
1493 | 1493 | def _default_arguments_from_docstring(self, doc): |
|
1494 | 1494 | """Parse the first line of docstring for call signature. |
|
1495 | 1495 | |
|
1496 | 1496 | Docstring should be of the form 'min(iterable[, key=func])\n'. |
|
1497 | 1497 | It can also parse cython docstring of the form |
|
1498 | 1498 | 'Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)'. |
|
1499 | 1499 | """ |
|
1500 | 1500 | if doc is None: |
|
1501 | 1501 | return [] |
|
1502 | 1502 | |
|
1503 | 1503 | #care only the firstline |
|
1504 | 1504 | line = doc.lstrip().splitlines()[0] |
|
1505 | 1505 | |
|
1506 | 1506 | #p = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') |
|
1507 | 1507 | #'min(iterable[, key=func])\n' -> 'iterable[, key=func]' |
|
1508 | 1508 | sig = self.docstring_sig_re.search(line) |
|
1509 | 1509 | if sig is None: |
|
1510 | 1510 | return [] |
|
1511 | 1511 | # iterable[, key=func]' -> ['iterable[' ,' key=func]'] |
|
1512 | 1512 | sig = sig.groups()[0].split(',') |
|
1513 | 1513 | ret = [] |
|
1514 | 1514 | for s in sig: |
|
1515 | 1515 | #re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') |
|
1516 | 1516 | ret += self.docstring_kwd_re.findall(s) |
|
1517 | 1517 | return ret |
|
1518 | 1518 | |
|
1519 | 1519 | def _default_arguments(self, obj): |
|
1520 | 1520 | """Return the list of default arguments of obj if it is callable, |
|
1521 | 1521 | or empty list otherwise.""" |
|
1522 | 1522 | call_obj = obj |
|
1523 | 1523 | ret = [] |
|
1524 | 1524 | if inspect.isbuiltin(obj): |
|
1525 | 1525 | pass |
|
1526 | 1526 | elif not (inspect.isfunction(obj) or inspect.ismethod(obj)): |
|
1527 | 1527 | if inspect.isclass(obj): |
|
1528 | 1528 | #for cython embedsignature=True the constructor docstring |
|
1529 | 1529 | #belongs to the object itself not __init__ |
|
1530 | 1530 | ret += self._default_arguments_from_docstring( |
|
1531 | 1531 | getattr(obj, '__doc__', '')) |
|
1532 | 1532 | # for classes, check for __init__,__new__ |
|
1533 | 1533 | call_obj = (getattr(obj, '__init__', None) or |
|
1534 | 1534 | getattr(obj, '__new__', None)) |
|
1535 | 1535 | # for all others, check if they are __call__able |
|
1536 | 1536 | elif hasattr(obj, '__call__'): |
|
1537 | 1537 | call_obj = obj.__call__ |
|
1538 | 1538 | ret += self._default_arguments_from_docstring( |
|
1539 | 1539 | getattr(call_obj, '__doc__', '')) |
|
1540 | 1540 | |
|
1541 | 1541 | _keeps = (inspect.Parameter.KEYWORD_ONLY, |
|
1542 | 1542 | inspect.Parameter.POSITIONAL_OR_KEYWORD) |
|
1543 | 1543 | |
|
1544 | 1544 | try: |
|
1545 | 1545 | sig = inspect.signature(call_obj) |
|
1546 | 1546 | ret.extend(k for k, v in sig.parameters.items() if |
|
1547 | 1547 | v.kind in _keeps) |
|
1548 | 1548 | except ValueError: |
|
1549 | 1549 | pass |
|
1550 | 1550 | |
|
1551 | 1551 | return list(set(ret)) |
|
1552 | 1552 | |
|
1553 | 1553 | def python_func_kw_matches(self, text): |
|
1554 | 1554 | """Match named parameters (kwargs) of the last open function""" |
|
1555 | 1555 | |
|
1556 | 1556 | if "." in text: # a parameter cannot be dotted |
|
1557 | 1557 | return [] |
|
1558 | 1558 | try: regexp = self.__funcParamsRegex |
|
1559 | 1559 | except AttributeError: |
|
1560 | 1560 | regexp = self.__funcParamsRegex = re.compile(r''' |
|
1561 | 1561 | '.*?(?<!\\)' | # single quoted strings or |
|
1562 | 1562 | ".*?(?<!\\)" | # double quoted strings or |
|
1563 | 1563 | \w+ | # identifier |
|
1564 | 1564 | \S # other characters |
|
1565 | 1565 | ''', re.VERBOSE | re.DOTALL) |
|
1566 | 1566 | # 1. find the nearest identifier that comes before an unclosed |
|
1567 | 1567 | # parenthesis before the cursor |
|
1568 | 1568 | # e.g. for "foo (1+bar(x), pa<cursor>,a=1)", the candidate is "foo" |
|
1569 | 1569 | tokens = regexp.findall(self.text_until_cursor) |
|
1570 | 1570 | iterTokens = reversed(tokens); openPar = 0 |
|
1571 | 1571 | |
|
1572 | 1572 | for token in iterTokens: |
|
1573 | 1573 | if token == ')': |
|
1574 | 1574 | openPar -= 1 |
|
1575 | 1575 | elif token == '(': |
|
1576 | 1576 | openPar += 1 |
|
1577 | 1577 | if openPar > 0: |
|
1578 | 1578 | # found the last unclosed parenthesis |
|
1579 | 1579 | break |
|
1580 | 1580 | else: |
|
1581 | 1581 | return [] |
|
1582 | 1582 | # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" ) |
|
1583 | 1583 | ids = [] |
|
1584 | 1584 | isId = re.compile(r'\w+$').match |
|
1585 | 1585 | |
|
1586 | 1586 | while True: |
|
1587 | 1587 | try: |
|
1588 | 1588 | ids.append(next(iterTokens)) |
|
1589 | 1589 | if not isId(ids[-1]): |
|
1590 | 1590 | ids.pop(); break |
|
1591 | 1591 | if not next(iterTokens) == '.': |
|
1592 | 1592 | break |
|
1593 | 1593 | except StopIteration: |
|
1594 | 1594 | break |
|
1595 | 1595 | |
|
1596 | 1596 | # Find all named arguments already assigned to, as to avoid suggesting |
|
1597 | 1597 | # them again |
|
1598 | 1598 | usedNamedArgs = set() |
|
1599 | 1599 | par_level = -1 |
|
1600 | 1600 | for token, next_token in zip(tokens, tokens[1:]): |
|
1601 | 1601 | if token == '(': |
|
1602 | 1602 | par_level += 1 |
|
1603 | 1603 | elif token == ')': |
|
1604 | 1604 | par_level -= 1 |
|
1605 | 1605 | |
|
1606 | 1606 | if par_level != 0: |
|
1607 | 1607 | continue |
|
1608 | 1608 | |
|
1609 | 1609 | if next_token != '=': |
|
1610 | 1610 | continue |
|
1611 | 1611 | |
|
1612 | 1612 | usedNamedArgs.add(token) |
|
1613 | 1613 | |
|
1614 | 1614 | argMatches = [] |
|
1615 | 1615 | try: |
|
1616 | 1616 | callableObj = '.'.join(ids[::-1]) |
|
1617 | 1617 | namedArgs = self._default_arguments(eval(callableObj, |
|
1618 | 1618 | self.namespace)) |
|
1619 | 1619 | |
|
1620 | 1620 | # Remove used named arguments from the list, no need to show twice |
|
1621 | 1621 | for namedArg in set(namedArgs) - usedNamedArgs: |
|
1622 | 1622 | if namedArg.startswith(text): |
|
1623 | 1623 | argMatches.append("%s=" %namedArg) |
|
1624 | 1624 | except: |
|
1625 | 1625 | pass |
|
1626 | 1626 | |
|
1627 | 1627 | return argMatches |
|
1628 | 1628 | |
|
1629 | 1629 | @staticmethod |
|
1630 | 1630 | def _get_keys(obj: Any) -> List[Any]: |
|
1631 | 1631 | # Objects can define their own completions by defining an |
|
1632 | 1632 | # _ipy_key_completions_() method. |
|
1633 | 1633 | method = get_real_method(obj, '_ipython_key_completions_') |
|
1634 | 1634 | if method is not None: |
|
1635 | 1635 | return method() |
|
1636 | 1636 | |
|
1637 | 1637 | # Special case some common in-memory dict-like types |
|
1638 | 1638 | if isinstance(obj, dict) or\ |
|
1639 | 1639 | _safe_isinstance(obj, 'pandas', 'DataFrame'): |
|
1640 | 1640 | try: |
|
1641 | 1641 | return list(obj.keys()) |
|
1642 | 1642 | except Exception: |
|
1643 | 1643 | return [] |
|
1644 | 1644 | elif _safe_isinstance(obj, 'numpy', 'ndarray') or\ |
|
1645 | 1645 | _safe_isinstance(obj, 'numpy', 'void'): |
|
1646 | 1646 | return obj.dtype.names or [] |
|
1647 | 1647 | return [] |
|
1648 | 1648 | |
|
1649 | 1649 | def dict_key_matches(self, text:str) -> List[str]: |
|
1650 | 1650 | "Match string keys in a dictionary, after e.g. 'foo[' " |
|
1651 | 1651 | |
|
1652 | 1652 | |
|
1653 | 1653 | if self.__dict_key_regexps is not None: |
|
1654 | 1654 | regexps = self.__dict_key_regexps |
|
1655 | 1655 | else: |
|
1656 | 1656 | dict_key_re_fmt = r'''(?x) |
|
1657 | 1657 | ( # match dict-referring expression wrt greedy setting |
|
1658 | 1658 | %s |
|
1659 | 1659 | ) |
|
1660 | 1660 | \[ # open bracket |
|
1661 | 1661 | \s* # and optional whitespace |
|
1662 | 1662 | # Capture any number of str-like objects (e.g. "a", "b", 'c') |
|
1663 | 1663 | ((?:[uUbB]? # string prefix (r not handled) |
|
1664 | 1664 | (?: |
|
1665 | 1665 | '(?:[^']|(?<!\\)\\')*' |
|
1666 | 1666 | | |
|
1667 | 1667 | "(?:[^"]|(?<!\\)\\")*" |
|
1668 | 1668 | ) |
|
1669 | 1669 | \s*,\s* |
|
1670 | 1670 | )*) |
|
1671 | 1671 | ([uUbB]? # string prefix (r not handled) |
|
1672 | 1672 | (?: # unclosed string |
|
1673 | 1673 | '(?:[^']|(?<!\\)\\')* |
|
1674 | 1674 | | |
|
1675 | 1675 | "(?:[^"]|(?<!\\)\\")* |
|
1676 | 1676 | ) |
|
1677 | 1677 | )? |
|
1678 | 1678 | $ |
|
1679 | 1679 | ''' |
|
1680 | 1680 | regexps = self.__dict_key_regexps = { |
|
1681 | 1681 | False: re.compile(dict_key_re_fmt % r''' |
|
1682 | 1682 | # identifiers separated by . |
|
1683 | 1683 | (?!\d)\w+ |
|
1684 | 1684 | (?:\.(?!\d)\w+)* |
|
1685 | 1685 | '''), |
|
1686 | 1686 | True: re.compile(dict_key_re_fmt % ''' |
|
1687 | 1687 | .+ |
|
1688 | 1688 | ''') |
|
1689 | 1689 | } |
|
1690 | 1690 | |
|
1691 | 1691 | match = regexps[self.greedy].search(self.text_until_cursor) |
|
1692 | 1692 | |
|
1693 | 1693 | if match is None: |
|
1694 | 1694 | return [] |
|
1695 | 1695 | |
|
1696 | 1696 | expr, prefix0, prefix = match.groups() |
|
1697 | 1697 | try: |
|
1698 | 1698 | obj = eval(expr, self.namespace) |
|
1699 | 1699 | except Exception: |
|
1700 | 1700 | try: |
|
1701 | 1701 | obj = eval(expr, self.global_namespace) |
|
1702 | 1702 | except Exception: |
|
1703 | 1703 | return [] |
|
1704 | 1704 | |
|
1705 | 1705 | keys = self._get_keys(obj) |
|
1706 | 1706 | if not keys: |
|
1707 | 1707 | return keys |
|
1708 | 1708 | |
|
1709 | 1709 | extra_prefix = eval(prefix0) if prefix0 != '' else None |
|
1710 | 1710 | |
|
1711 | 1711 | closing_quote, token_offset, matches = match_dict_keys(keys, prefix, self.splitter.delims, extra_prefix=extra_prefix) |
|
1712 | 1712 | if not matches: |
|
1713 | 1713 | return matches |
|
1714 | 1714 | |
|
1715 | 1715 | # get the cursor position of |
|
1716 | 1716 | # - the text being completed |
|
1717 | 1717 | # - the start of the key text |
|
1718 | 1718 | # - the start of the completion |
|
1719 | 1719 | text_start = len(self.text_until_cursor) - len(text) |
|
1720 | 1720 | if prefix: |
|
1721 | 1721 | key_start = match.start(3) |
|
1722 | 1722 | completion_start = key_start + token_offset |
|
1723 | 1723 | else: |
|
1724 | 1724 | key_start = completion_start = match.end() |
|
1725 | 1725 | |
|
1726 | 1726 | # grab the leading prefix, to make sure all completions start with `text` |
|
1727 | 1727 | if text_start > key_start: |
|
1728 | 1728 | leading = '' |
|
1729 | 1729 | else: |
|
1730 | 1730 | leading = text[text_start:completion_start] |
|
1731 | 1731 | |
|
1732 | 1732 | # the index of the `[` character |
|
1733 | 1733 | bracket_idx = match.end(1) |
|
1734 | 1734 | |
|
1735 | 1735 | # append closing quote and bracket as appropriate |
|
1736 | 1736 | # this is *not* appropriate if the opening quote or bracket is outside |
|
1737 | 1737 | # the text given to this method |
|
1738 | 1738 | suf = '' |
|
1739 | 1739 | continuation = self.line_buffer[len(self.text_until_cursor):] |
|
1740 | 1740 | if key_start > text_start and closing_quote: |
|
1741 | 1741 | # quotes were opened inside text, maybe close them |
|
1742 | 1742 | if continuation.startswith(closing_quote): |
|
1743 | 1743 | continuation = continuation[len(closing_quote):] |
|
1744 | 1744 | else: |
|
1745 | 1745 | suf += closing_quote |
|
1746 | 1746 | if bracket_idx > text_start: |
|
1747 | 1747 | # brackets were opened inside text, maybe close them |
|
1748 | 1748 | if not continuation.startswith(']'): |
|
1749 | 1749 | suf += ']' |
|
1750 | 1750 | |
|
1751 | 1751 | return [leading + k + suf for k in matches] |
|
1752 | 1752 | |
|
1753 | 1753 | @staticmethod |
|
1754 | 1754 | def unicode_name_matches(text:str) -> Tuple[str, List[str]] : |
|
1755 | 1755 | """Match Latex-like syntax for unicode characters base |
|
1756 | 1756 | on the name of the character. |
|
1757 | 1757 | |
|
1758 | 1758 | This does ``\\GREEK SMALL LETTER ETA`` -> ``η`` |
|
1759 | 1759 | |
|
1760 | 1760 | Works only on valid python 3 identifier, or on combining characters that |
|
1761 | 1761 | will combine to form a valid identifier. |
|
1762 | 1762 | """ |
|
1763 | 1763 | slashpos = text.rfind('\\') |
|
1764 | 1764 | if slashpos > -1: |
|
1765 | 1765 | s = text[slashpos+1:] |
|
1766 | 1766 | try : |
|
1767 | 1767 | unic = unicodedata.lookup(s) |
|
1768 | 1768 | # allow combining chars |
|
1769 | 1769 | if ('a'+unic).isidentifier(): |
|
1770 | 1770 | return '\\'+s,[unic] |
|
1771 | 1771 | except KeyError: |
|
1772 | 1772 | pass |
|
1773 | 1773 | return '', [] |
|
1774 | 1774 | |
|
1775 | 1775 | |
|
1776 | 1776 | def latex_matches(self, text:str) -> Tuple[str, Sequence[str]]: |
|
1777 | 1777 | """Match Latex syntax for unicode characters. |
|
1778 | 1778 | |
|
1779 | 1779 | This does both ``\\alp`` -> ``\\alpha`` and ``\\alpha`` -> ``α`` |
|
1780 | 1780 | """ |
|
1781 | 1781 | slashpos = text.rfind('\\') |
|
1782 | 1782 | if slashpos > -1: |
|
1783 | 1783 | s = text[slashpos:] |
|
1784 | 1784 | if s in latex_symbols: |
|
1785 | 1785 | # Try to complete a full latex symbol to unicode |
|
1786 | 1786 | # \\alpha -> α |
|
1787 | 1787 | return s, [latex_symbols[s]] |
|
1788 | 1788 | else: |
|
1789 | 1789 | # If a user has partially typed a latex symbol, give them |
|
1790 | 1790 | # a full list of options \al -> [\aleph, \alpha] |
|
1791 | 1791 | matches = [k for k in latex_symbols if k.startswith(s)] |
|
1792 | 1792 | if matches: |
|
1793 | 1793 | return s, matches |
|
1794 | 1794 | return '', () |
|
1795 | 1795 | |
|
1796 | 1796 | def dispatch_custom_completer(self, text): |
|
1797 | 1797 | if not self.custom_completers: |
|
1798 | 1798 | return |
|
1799 | 1799 | |
|
1800 | 1800 | line = self.line_buffer |
|
1801 | 1801 | if not line.strip(): |
|
1802 | 1802 | return None |
|
1803 | 1803 | |
|
1804 | 1804 | # Create a little structure to pass all the relevant information about |
|
1805 | 1805 | # the current completion to any custom completer. |
|
1806 | 1806 | event = SimpleNamespace() |
|
1807 | 1807 | event.line = line |
|
1808 | 1808 | event.symbol = text |
|
1809 | 1809 | cmd = line.split(None,1)[0] |
|
1810 | 1810 | event.command = cmd |
|
1811 | 1811 | event.text_until_cursor = self.text_until_cursor |
|
1812 | 1812 | |
|
1813 | 1813 | # for foo etc, try also to find completer for %foo |
|
1814 | 1814 | if not cmd.startswith(self.magic_escape): |
|
1815 | 1815 | try_magic = self.custom_completers.s_matches( |
|
1816 | 1816 | self.magic_escape + cmd) |
|
1817 | 1817 | else: |
|
1818 | 1818 | try_magic = [] |
|
1819 | 1819 | |
|
1820 | 1820 | for c in itertools.chain(self.custom_completers.s_matches(cmd), |
|
1821 | 1821 | try_magic, |
|
1822 | 1822 | self.custom_completers.flat_matches(self.text_until_cursor)): |
|
1823 | 1823 | try: |
|
1824 | 1824 | res = c(event) |
|
1825 | 1825 | if res: |
|
1826 | 1826 | # first, try case sensitive match |
|
1827 | 1827 | withcase = [r for r in res if r.startswith(text)] |
|
1828 | 1828 | if withcase: |
|
1829 | 1829 | return withcase |
|
1830 | 1830 | # if none, then case insensitive ones are ok too |
|
1831 | 1831 | text_low = text.lower() |
|
1832 | 1832 | return [r for r in res if r.lower().startswith(text_low)] |
|
1833 | 1833 | except TryNext: |
|
1834 | 1834 | pass |
|
1835 | 1835 | except KeyboardInterrupt: |
|
1836 | 1836 | """ |
|
1837 | 1837 | If custom completer take too long, |
|
1838 | 1838 | let keyboard interrupt abort and return nothing. |
|
1839 | 1839 | """ |
|
1840 | 1840 | break |
|
1841 | 1841 | |
|
1842 | 1842 | return None |
|
1843 | 1843 | |
|
1844 | 1844 | def completions(self, text: str, offset: int)->Iterator[Completion]: |
|
1845 | 1845 | """ |
|
1846 | 1846 | Returns an iterator over the possible completions |
|
1847 | 1847 | |
|
1848 | 1848 | .. warning:: |
|
1849 | 1849 | |
|
1850 | 1850 | Unstable |
|
1851 | 1851 | |
|
1852 | 1852 | This function is unstable, API may change without warning. |
|
1853 | 1853 | It will also raise unless use in proper context manager. |
|
1854 | 1854 | |
|
1855 | 1855 | Parameters |
|
1856 | 1856 | ---------- |
|
1857 | 1857 | text:str |
|
1858 | 1858 | Full text of the current input, multi line string. |
|
1859 | 1859 | offset:int |
|
1860 | 1860 | Integer representing the position of the cursor in ``text``. Offset |
|
1861 | 1861 | is 0-based indexed. |
|
1862 | 1862 | |
|
1863 | 1863 | Yields |
|
1864 | 1864 | ------ |
|
1865 | 1865 | Completion |
|
1866 | 1866 | |
|
1867 | 1867 | Notes |
|
1868 | 1868 | ----- |
|
1869 | 1869 | The cursor on a text can either be seen as being "in between" |
|
1870 | 1870 | characters or "On" a character depending on the interface visible to |
|
1871 | 1871 | the user. For consistency the cursor being on "in between" characters X |
|
1872 | 1872 | and Y is equivalent to the cursor being "on" character Y, that is to say |
|
1873 | 1873 | the character the cursor is on is considered as being after the cursor. |
|
1874 | 1874 | |
|
1875 | 1875 | Combining characters may span more that one position in the |
|
1876 | 1876 | text. |
|
1877 | 1877 | |
|
1878 | 1878 | .. note:: |
|
1879 | 1879 | |
|
1880 | 1880 | If ``IPCompleter.debug`` is :any:`True` will yield a ``--jedi/ipython--`` |
|
1881 | 1881 | fake Completion token to distinguish completion returned by Jedi |
|
1882 | 1882 | and usual IPython completion. |
|
1883 | 1883 | |
|
1884 | 1884 | .. note:: |
|
1885 | 1885 | |
|
1886 | 1886 | Completions are not completely deduplicated yet. If identical |
|
1887 | 1887 | completions are coming from different sources this function does not |
|
1888 | 1888 | ensure that each completion object will only be present once. |
|
1889 | 1889 | """ |
|
1890 | 1890 | warnings.warn("_complete is a provisional API (as of IPython 6.0). " |
|
1891 | 1891 | "It may change without warnings. " |
|
1892 | 1892 | "Use in corresponding context manager.", |
|
1893 | 1893 | category=ProvisionalCompleterWarning, stacklevel=2) |
|
1894 | 1894 | |
|
1895 | 1895 | seen = set() |
|
1896 | 1896 | profiler:Optional[cProfile.Profile] |
|
1897 | 1897 | try: |
|
1898 | 1898 | if self.profile_completions: |
|
1899 | 1899 | import cProfile |
|
1900 | 1900 | profiler = cProfile.Profile() |
|
1901 | 1901 | profiler.enable() |
|
1902 | 1902 | else: |
|
1903 | 1903 | profiler = None |
|
1904 | 1904 | |
|
1905 | 1905 | for c in self._completions(text, offset, _timeout=self.jedi_compute_type_timeout/1000): |
|
1906 | 1906 | if c and (c in seen): |
|
1907 | 1907 | continue |
|
1908 | 1908 | yield c |
|
1909 | 1909 | seen.add(c) |
|
1910 | 1910 | except KeyboardInterrupt: |
|
1911 | 1911 | """if completions take too long and users send keyboard interrupt, |
|
1912 | 1912 | do not crash and return ASAP. """ |
|
1913 | 1913 | pass |
|
1914 | 1914 | finally: |
|
1915 | 1915 | if profiler is not None: |
|
1916 | 1916 | profiler.disable() |
|
1917 | 1917 | ensure_dir_exists(self.profiler_output_dir) |
|
1918 | 1918 | output_path = os.path.join(self.profiler_output_dir, str(uuid.uuid4())) |
|
1919 | 1919 | print("Writing profiler output to", output_path) |
|
1920 | 1920 | profiler.dump_stats(output_path) |
|
1921 | 1921 | |
|
1922 | 1922 | def _completions(self, full_text: str, offset: int, *, _timeout) -> Iterator[Completion]: |
|
1923 | 1923 | """ |
|
1924 | 1924 | Core completion module.Same signature as :any:`completions`, with the |
|
1925 | 1925 | extra `timeout` parameter (in seconds). |
|
1926 | 1926 | |
|
1927 | 1927 | Computing jedi's completion ``.type`` can be quite expensive (it is a |
|
1928 | 1928 | lazy property) and can require some warm-up, more warm up than just |
|
1929 | 1929 | computing the ``name`` of a completion. The warm-up can be : |
|
1930 | 1930 | |
|
1931 | 1931 | - Long warm-up the first time a module is encountered after |
|
1932 | 1932 | install/update: actually build parse/inference tree. |
|
1933 | 1933 | |
|
1934 | 1934 | - first time the module is encountered in a session: load tree from |
|
1935 | 1935 | disk. |
|
1936 | 1936 | |
|
1937 | 1937 | We don't want to block completions for tens of seconds so we give the |
|
1938 | 1938 | completer a "budget" of ``_timeout`` seconds per invocation to compute |
|
1939 | 1939 | completions types, the completions that have not yet been computed will |
|
1940 | 1940 | be marked as "unknown" an will have a chance to be computed next round |
|
1941 | 1941 | are things get cached. |
|
1942 | 1942 | |
|
1943 | 1943 | Keep in mind that Jedi is not the only thing treating the completion so |
|
1944 | 1944 | keep the timeout short-ish as if we take more than 0.3 second we still |
|
1945 | 1945 | have lots of processing to do. |
|
1946 | 1946 | |
|
1947 | 1947 | """ |
|
1948 | 1948 | deadline = time.monotonic() + _timeout |
|
1949 | 1949 | |
|
1950 | 1950 | |
|
1951 | 1951 | before = full_text[:offset] |
|
1952 | 1952 | cursor_line, cursor_column = position_to_cursor(full_text, offset) |
|
1953 | 1953 | |
|
1954 | 1954 | matched_text, matches, matches_origin, jedi_matches = self._complete( |
|
1955 | 1955 | full_text=full_text, cursor_line=cursor_line, cursor_pos=cursor_column) |
|
1956 | 1956 | |
|
1957 | 1957 | iter_jm = iter(jedi_matches) |
|
1958 | 1958 | if _timeout: |
|
1959 | 1959 | for jm in iter_jm: |
|
1960 | 1960 | try: |
|
1961 | 1961 | type_ = jm.type |
|
1962 | 1962 | except Exception: |
|
1963 | 1963 | if self.debug: |
|
1964 | 1964 | print("Error in Jedi getting type of ", jm) |
|
1965 | 1965 | type_ = None |
|
1966 | 1966 | delta = len(jm.name_with_symbols) - len(jm.complete) |
|
1967 | 1967 | if type_ == 'function': |
|
1968 | 1968 | signature = _make_signature(jm) |
|
1969 | 1969 | else: |
|
1970 | 1970 | signature = '' |
|
1971 | 1971 | yield Completion(start=offset - delta, |
|
1972 | 1972 | end=offset, |
|
1973 | 1973 | text=jm.name_with_symbols, |
|
1974 | 1974 | type=type_, |
|
1975 | 1975 | signature=signature, |
|
1976 | 1976 | _origin='jedi') |
|
1977 | 1977 | |
|
1978 | 1978 | if time.monotonic() > deadline: |
|
1979 | 1979 | break |
|
1980 | 1980 | |
|
1981 | 1981 | for jm in iter_jm: |
|
1982 | 1982 | delta = len(jm.name_with_symbols) - len(jm.complete) |
|
1983 | 1983 | yield Completion(start=offset - delta, |
|
1984 | 1984 | end=offset, |
|
1985 | 1985 | text=jm.name_with_symbols, |
|
1986 | 1986 | type='<unknown>', # don't compute type for speed |
|
1987 | 1987 | _origin='jedi', |
|
1988 | 1988 | signature='') |
|
1989 | 1989 | |
|
1990 | 1990 | |
|
1991 | 1991 | start_offset = before.rfind(matched_text) |
|
1992 | 1992 | |
|
1993 | 1993 | # TODO: |
|
1994 | 1994 | # Suppress this, right now just for debug. |
|
1995 | 1995 | if jedi_matches and matches and self.debug: |
|
1996 | 1996 | yield Completion(start=start_offset, end=offset, text='--jedi/ipython--', |
|
1997 | 1997 | _origin='debug', type='none', signature='') |
|
1998 | 1998 | |
|
1999 | 1999 | # I'm unsure if this is always true, so let's assert and see if it |
|
2000 | 2000 | # crash |
|
2001 | 2001 | assert before.endswith(matched_text) |
|
2002 | 2002 | for m, t in zip(matches, matches_origin): |
|
2003 | 2003 | yield Completion(start=start_offset, end=offset, text=m, _origin=t, signature='', type='<unknown>') |
|
2004 | 2004 | |
|
2005 | 2005 | |
|
2006 | 2006 | def complete(self, text=None, line_buffer=None, cursor_pos=None) -> Tuple[str, Sequence[str]]: |
|
2007 | 2007 | """Find completions for the given text and line context. |
|
2008 | 2008 | |
|
2009 | 2009 | Note that both the text and the line_buffer are optional, but at least |
|
2010 | 2010 | one of them must be given. |
|
2011 | 2011 | |
|
2012 | 2012 | Parameters |
|
2013 | 2013 | ---------- |
|
2014 | 2014 | text : string, optional |
|
2015 | 2015 | Text to perform the completion on. If not given, the line buffer |
|
2016 | 2016 | is split using the instance's CompletionSplitter object. |
|
2017 | 2017 | line_buffer : string, optional |
|
2018 | 2018 | If not given, the completer attempts to obtain the current line |
|
2019 | 2019 | buffer via readline. This keyword allows clients which are |
|
2020 | 2020 | requesting for text completions in non-readline contexts to inform |
|
2021 | 2021 | the completer of the entire text. |
|
2022 | 2022 | cursor_pos : int, optional |
|
2023 | 2023 | Index of the cursor in the full line buffer. Should be provided by |
|
2024 | 2024 | remote frontends where kernel has no access to frontend state. |
|
2025 | 2025 | |
|
2026 | 2026 | Returns |
|
2027 | 2027 | ------- |
|
2028 | 2028 | Tuple of two items: |
|
2029 | 2029 | text : str |
|
2030 | 2030 | Text that was actually used in the completion. |
|
2031 | 2031 | matches : list |
|
2032 | 2032 | A list of completion matches. |
|
2033 | 2033 | |
|
2034 | 2034 | Notes |
|
2035 | 2035 | ----- |
|
2036 | 2036 | This API is likely to be deprecated and replaced by |
|
2037 | 2037 | :any:`IPCompleter.completions` in the future. |
|
2038 | 2038 | |
|
2039 | 2039 | """ |
|
2040 | 2040 | warnings.warn('`Completer.complete` is pending deprecation since ' |
|
2041 | 2041 | 'IPython 6.0 and will be replaced by `Completer.completions`.', |
|
2042 | 2042 | PendingDeprecationWarning) |
|
2043 | 2043 | # potential todo, FOLD the 3rd throw away argument of _complete |
|
2044 | 2044 | # into the first 2 one. |
|
2045 | 2045 | return self._complete(line_buffer=line_buffer, cursor_pos=cursor_pos, text=text, cursor_line=0)[:2] |
|
2046 | 2046 | |
|
2047 | 2047 | def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None, |
|
2048 | 2048 | full_text=None) -> _CompleteResult: |
|
2049 | 2049 | """ |
|
2050 | 2050 | Like complete but can also returns raw jedi completions as well as the |
|
2051 | 2051 | origin of the completion text. This could (and should) be made much |
|
2052 | 2052 | cleaner but that will be simpler once we drop the old (and stateful) |
|
2053 | 2053 | :any:`complete` API. |
|
2054 | 2054 | |
|
2055 | 2055 | With current provisional API, cursor_pos act both (depending on the |
|
2056 | 2056 | caller) as the offset in the ``text`` or ``line_buffer``, or as the |
|
2057 | 2057 | ``column`` when passing multiline strings this could/should be renamed |
|
2058 | 2058 | but would add extra noise. |
|
2059 | 2059 | |
|
2060 | 2060 | Returns |
|
2061 | 2061 | ------- |
|
2062 | 2062 | A tuple of N elements which are (likely): |
|
2063 | 2063 | matched_text: ? the text that the complete matched |
|
2064 | 2064 | matches: list of completions ? |
|
2065 | 2065 | matches_origin: ? list same lenght as matches, and where each completion came from |
|
2066 | 2066 | jedi_matches: list of Jedi matches, have it's own structure. |
|
2067 | 2067 | """ |
|
2068 | 2068 | |
|
2069 | 2069 | |
|
2070 | 2070 | # if the cursor position isn't given, the only sane assumption we can |
|
2071 | 2071 | # make is that it's at the end of the line (the common case) |
|
2072 | 2072 | if cursor_pos is None: |
|
2073 | 2073 | cursor_pos = len(line_buffer) if text is None else len(text) |
|
2074 | 2074 | |
|
2075 | 2075 | if self.use_main_ns: |
|
2076 | 2076 | self.namespace = __main__.__dict__ |
|
2077 | 2077 | |
|
2078 | 2078 | # if text is either None or an empty string, rely on the line buffer |
|
2079 | 2079 | if (not line_buffer) and full_text: |
|
2080 | 2080 | line_buffer = full_text.split('\n')[cursor_line] |
|
2081 | 2081 | if not text: # issue #11508: check line_buffer before calling split_line |
|
2082 | 2082 | text = self.splitter.split_line(line_buffer, cursor_pos) if line_buffer else '' |
|
2083 | 2083 | |
|
2084 | 2084 | if self.backslash_combining_completions: |
|
2085 | 2085 | # allow deactivation of these on windows. |
|
2086 | 2086 | base_text = text if not line_buffer else line_buffer[:cursor_pos] |
|
2087 | 2087 | |
|
2088 | 2088 | for meth in (self.latex_matches, |
|
2089 | 2089 | self.unicode_name_matches, |
|
2090 | 2090 | back_latex_name_matches, |
|
2091 | 2091 | back_unicode_name_matches, |
|
2092 | 2092 | self.fwd_unicode_match): |
|
2093 | 2093 | name_text, name_matches = meth(base_text) |
|
2094 | 2094 | if name_text: |
|
2095 | 2095 | return _CompleteResult(name_text, name_matches[:MATCHES_LIMIT], \ |
|
2096 | 2096 | [meth.__qualname__]*min(len(name_matches), MATCHES_LIMIT), ()) |
|
2097 | 2097 | |
|
2098 | 2098 | |
|
2099 | 2099 | # If no line buffer is given, assume the input text is all there was |
|
2100 | 2100 | if line_buffer is None: |
|
2101 | 2101 | line_buffer = text |
|
2102 | 2102 | |
|
2103 | 2103 | self.line_buffer = line_buffer |
|
2104 | 2104 | self.text_until_cursor = self.line_buffer[:cursor_pos] |
|
2105 | 2105 | |
|
2106 | 2106 | # Do magic arg matches |
|
2107 | 2107 | for matcher in self.magic_arg_matchers: |
|
2108 | 2108 | matches = list(matcher(line_buffer))[:MATCHES_LIMIT] |
|
2109 | 2109 | if matches: |
|
2110 | 2110 | origins = [matcher.__qualname__] * len(matches) |
|
2111 | 2111 | return _CompleteResult(text, matches, origins, ()) |
|
2112 | 2112 | |
|
2113 | 2113 | # Start with a clean slate of completions |
|
2114 | 2114 | matches = [] |
|
2115 | 2115 | |
|
2116 | 2116 | # FIXME: we should extend our api to return a dict with completions for |
|
2117 | 2117 | # different types of objects. The rlcomplete() method could then |
|
2118 | 2118 | # simply collapse the dict into a list for readline, but we'd have |
|
2119 | 2119 | # richer completion semantics in other environments. |
|
2120 | 2120 | completions:Iterable[Any] = [] |
|
2121 | 2121 | if self.use_jedi: |
|
2122 | 2122 | if not full_text: |
|
2123 | 2123 | full_text = line_buffer |
|
2124 | 2124 | completions = self._jedi_matches( |
|
2125 | 2125 | cursor_pos, cursor_line, full_text) |
|
2126 | 2126 | |
|
2127 | 2127 | if self.merge_completions: |
|
2128 | 2128 | matches = [] |
|
2129 | 2129 | for matcher in self.matchers: |
|
2130 | 2130 | try: |
|
2131 | 2131 | matches.extend([(m, matcher.__qualname__) |
|
2132 | 2132 | for m in matcher(text)]) |
|
2133 | 2133 | except: |
|
2134 | 2134 | # Show the ugly traceback if the matcher causes an |
|
2135 | 2135 | # exception, but do NOT crash the kernel! |
|
2136 | 2136 | sys.excepthook(*sys.exc_info()) |
|
2137 | 2137 | else: |
|
2138 | 2138 | for matcher in self.matchers: |
|
2139 | 2139 | matches = [(m, matcher.__qualname__) |
|
2140 | 2140 | for m in matcher(text)] |
|
2141 | 2141 | if matches: |
|
2142 | 2142 | break |
|
2143 | 2143 | |
|
2144 | 2144 | seen = set() |
|
2145 | 2145 | filtered_matches = set() |
|
2146 | 2146 | for m in matches: |
|
2147 | 2147 | t, c = m |
|
2148 | 2148 | if t not in seen: |
|
2149 | 2149 | filtered_matches.add(m) |
|
2150 | 2150 | seen.add(t) |
|
2151 | 2151 | |
|
2152 | 2152 | _filtered_matches = sorted(filtered_matches, key=lambda x: completions_sorting_key(x[0])) |
|
2153 | 2153 | |
|
2154 | 2154 | custom_res = [(m, 'custom') for m in self.dispatch_custom_completer(text) or []] |
|
2155 | 2155 | |
|
2156 | 2156 | _filtered_matches = custom_res or _filtered_matches |
|
2157 | 2157 | |
|
2158 | 2158 | _filtered_matches = _filtered_matches[:MATCHES_LIMIT] |
|
2159 | 2159 | _matches = [m[0] for m in _filtered_matches] |
|
2160 | 2160 | origins = [m[1] for m in _filtered_matches] |
|
2161 | 2161 | |
|
2162 | 2162 | self.matches = _matches |
|
2163 | 2163 | |
|
2164 | 2164 | return _CompleteResult(text, _matches, origins, completions) |
|
2165 | 2165 | |
|
2166 | 2166 | def fwd_unicode_match(self, text:str) -> Tuple[str, Sequence[str]]: |
|
2167 | 2167 | """ |
|
2168 | 2168 | Forward match a string starting with a backslash with a list of |
|
2169 | 2169 | potential Unicode completions. |
|
2170 | 2170 | |
|
2171 | 2171 | Will compute list list of Unicode character names on first call and cache it. |
|
2172 | 2172 | |
|
2173 | 2173 | Returns |
|
2174 | 2174 | ------- |
|
2175 | 2175 | At tuple with: |
|
2176 | 2176 | - matched text (empty if no matches) |
|
2177 | 2177 | - list of potential completions, empty tuple otherwise) |
|
2178 | 2178 | """ |
|
2179 | 2179 | # TODO: self.unicode_names is here a list we traverse each time with ~100k elements. |
|
2180 | 2180 | # We could do a faster match using a Trie. |
|
2181 | 2181 | |
|
2182 | 2182 | # Using pygtrie the follwing seem to work: |
|
2183 | 2183 | |
|
2184 | 2184 | # s = PrefixSet() |
|
2185 | 2185 | |
|
2186 | 2186 | # for c in range(0,0x10FFFF + 1): |
|
2187 | 2187 | # try: |
|
2188 | 2188 | # s.add(unicodedata.name(chr(c))) |
|
2189 | 2189 | # except ValueError: |
|
2190 | 2190 | # pass |
|
2191 | 2191 | # [''.join(k) for k in s.iter(prefix)] |
|
2192 | 2192 | |
|
2193 | 2193 | # But need to be timed and adds an extra dependency. |
|
2194 | 2194 | |
|
2195 | 2195 | slashpos = text.rfind('\\') |
|
2196 | 2196 | # if text starts with slash |
|
2197 | 2197 | if slashpos > -1: |
|
2198 | 2198 | # PERF: It's important that we don't access self._unicode_names |
|
2199 | 2199 | # until we're inside this if-block. _unicode_names is lazily |
|
2200 | 2200 | # initialized, and it takes a user-noticeable amount of time to |
|
2201 | 2201 | # initialize it, so we don't want to initialize it unless we're |
|
2202 | 2202 | # actually going to use it. |
|
2203 | 2203 | s = text[slashpos+1:] |
|
2204 | 2204 | candidates = [x for x in self.unicode_names if x.startswith(s)] |
|
2205 | 2205 | if candidates: |
|
2206 | 2206 | return s, candidates |
|
2207 | 2207 | else: |
|
2208 | 2208 | return '', () |
|
2209 | 2209 | |
|
2210 | 2210 | # if text does not start with slash |
|
2211 | 2211 | else: |
|
2212 | 2212 | return '', () |
|
2213 | 2213 | |
|
2214 | 2214 | @property |
|
2215 | 2215 | def unicode_names(self) -> List[str]: |
|
2216 | 2216 | """List of names of unicode code points that can be completed. |
|
2217 | 2217 | |
|
2218 | 2218 | The list is lazily initialized on first access. |
|
2219 | 2219 | """ |
|
2220 | 2220 | if self._unicode_names is None: |
|
2221 | 2221 | names = [] |
|
2222 | 2222 | for c in range(0,0x10FFFF + 1): |
|
2223 | 2223 | try: |
|
2224 | 2224 | names.append(unicodedata.name(chr(c))) |
|
2225 | 2225 | except ValueError: |
|
2226 | 2226 | pass |
|
2227 | 2227 | self._unicode_names = _unicode_name_compute(_UNICODE_RANGES) |
|
2228 | 2228 | |
|
2229 | 2229 | return self._unicode_names |
|
2230 | 2230 | |
|
2231 | 2231 | def _unicode_name_compute(ranges:List[Tuple[int,int]]) -> List[str]: |
|
2232 | 2232 | names = [] |
|
2233 | 2233 | for start,stop in ranges: |
|
2234 | 2234 | for c in range(start, stop) : |
|
2235 | 2235 | try: |
|
2236 | 2236 | names.append(unicodedata.name(chr(c))) |
|
2237 | 2237 | except ValueError: |
|
2238 | 2238 | pass |
|
2239 | 2239 | return names |
@@ -1,354 +1,370 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """Implementations for various useful completers. |
|
3 | 3 | |
|
4 | 4 | These are all loaded by default by IPython. |
|
5 | 5 | """ |
|
6 | 6 | #----------------------------------------------------------------------------- |
|
7 | 7 | # Copyright (C) 2010-2011 The IPython Development Team. |
|
8 | 8 | # |
|
9 | 9 | # Distributed under the terms of the BSD License. |
|
10 | 10 | # |
|
11 | 11 | # The full license is in the file COPYING.txt, distributed with this software. |
|
12 | 12 | #----------------------------------------------------------------------------- |
|
13 | 13 | |
|
14 | 14 | #----------------------------------------------------------------------------- |
|
15 | 15 | # Imports |
|
16 | 16 | #----------------------------------------------------------------------------- |
|
17 | 17 | |
|
18 | 18 | # Stdlib imports |
|
19 | 19 | import glob |
|
20 | 20 | import inspect |
|
21 | 21 | import os |
|
22 | 22 | import re |
|
23 | 23 | import sys |
|
24 | 24 | from importlib import import_module |
|
25 | 25 | from importlib.machinery import all_suffixes |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | # Third-party imports |
|
29 | 29 | from time import time |
|
30 | 30 | from zipimport import zipimporter |
|
31 | 31 | |
|
32 | 32 | # Our own imports |
|
33 | 33 | from .completer import expand_user, compress_user |
|
34 | 34 | from .error import TryNext |
|
35 | 35 | from ..utils._process_common import arg_split |
|
36 | 36 | |
|
37 | 37 | # FIXME: this should be pulled in with the right call via the component system |
|
38 | 38 | from IPython import get_ipython |
|
39 | 39 | |
|
40 | 40 | from typing import List |
|
41 | 41 | |
|
42 | 42 | #----------------------------------------------------------------------------- |
|
43 | 43 | # Globals and constants |
|
44 | 44 | #----------------------------------------------------------------------------- |
|
45 | 45 | _suffixes = all_suffixes() |
|
46 | 46 | |
|
47 | 47 | # Time in seconds after which the rootmodules will be stored permanently in the |
|
48 | 48 | # ipython ip.db database (kept in the user's .ipython dir). |
|
49 | 49 | TIMEOUT_STORAGE = 2 |
|
50 | 50 | |
|
51 | 51 | # Time in seconds after which we give up |
|
52 | 52 | TIMEOUT_GIVEUP = 20 |
|
53 | 53 | |
|
54 | 54 | # Regular expression for the python import statement |
|
55 | 55 | import_re = re.compile(r'(?P<name>[^\W\d]\w*?)' |
|
56 | 56 | r'(?P<package>[/\\]__init__)?' |
|
57 | 57 | r'(?P<suffix>%s)$' % |
|
58 | 58 | r'|'.join(re.escape(s) for s in _suffixes)) |
|
59 | 59 | |
|
60 | 60 | # RE for the ipython %run command (python + ipython scripts) |
|
61 | 61 | magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$') |
|
62 | 62 | |
|
63 | 63 | #----------------------------------------------------------------------------- |
|
64 | 64 | # Local utilities |
|
65 | 65 | #----------------------------------------------------------------------------- |
|
66 | 66 | |
|
67 | 67 | def module_list(path): |
|
68 | 68 | """ |
|
69 | 69 | Return the list containing the names of the modules available in the given |
|
70 | 70 | folder. |
|
71 | 71 | """ |
|
72 | 72 | # sys.path has the cwd as an empty string, but isdir/listdir need it as '.' |
|
73 | 73 | if path == '': |
|
74 | 74 | path = '.' |
|
75 | 75 | |
|
76 | 76 | # A few local constants to be used in loops below |
|
77 | 77 | pjoin = os.path.join |
|
78 | 78 | |
|
79 | 79 | if os.path.isdir(path): |
|
80 | 80 | # Build a list of all files in the directory and all files |
|
81 | 81 | # in its subdirectories. For performance reasons, do not |
|
82 | 82 | # recurse more than one level into subdirectories. |
|
83 | 83 | files = [] |
|
84 | 84 | for root, dirs, nondirs in os.walk(path, followlinks=True): |
|
85 | 85 | subdir = root[len(path)+1:] |
|
86 | 86 | if subdir: |
|
87 | 87 | files.extend(pjoin(subdir, f) for f in nondirs) |
|
88 | 88 | dirs[:] = [] # Do not recurse into additional subdirectories. |
|
89 | 89 | else: |
|
90 | 90 | files.extend(nondirs) |
|
91 | 91 | |
|
92 | 92 | else: |
|
93 | 93 | try: |
|
94 | 94 | files = list(zipimporter(path)._files.keys()) |
|
95 | 95 | except: |
|
96 | 96 | files = [] |
|
97 | 97 | |
|
98 | 98 | # Build a list of modules which match the import_re regex. |
|
99 | 99 | modules = [] |
|
100 | 100 | for f in files: |
|
101 | 101 | m = import_re.match(f) |
|
102 | 102 | if m: |
|
103 | 103 | modules.append(m.group('name')) |
|
104 | 104 | return list(set(modules)) |
|
105 | 105 | |
|
106 | 106 | |
|
107 | 107 | def get_root_modules(): |
|
108 | 108 | """ |
|
109 | 109 | Returns a list containing the names of all the modules available in the |
|
110 | 110 | folders of the pythonpath. |
|
111 | 111 | |
|
112 | 112 | ip.db['rootmodules_cache'] maps sys.path entries to list of modules. |
|
113 | 113 | """ |
|
114 | 114 | ip = get_ipython() |
|
115 | 115 | if ip is None: |
|
116 | 116 | # No global shell instance to store cached list of modules. |
|
117 | 117 | # Don't try to scan for modules every time. |
|
118 | 118 | return list(sys.builtin_module_names) |
|
119 | 119 | |
|
120 | 120 | rootmodules_cache = ip.db.get('rootmodules_cache', {}) |
|
121 | 121 | rootmodules = list(sys.builtin_module_names) |
|
122 | 122 | start_time = time() |
|
123 | 123 | store = False |
|
124 | 124 | for path in sys.path: |
|
125 | 125 | try: |
|
126 | 126 | modules = rootmodules_cache[path] |
|
127 | 127 | except KeyError: |
|
128 | 128 | modules = module_list(path) |
|
129 | 129 | try: |
|
130 | 130 | modules.remove('__init__') |
|
131 | 131 | except ValueError: |
|
132 | 132 | pass |
|
133 | 133 | if path not in ('', '.'): # cwd modules should not be cached |
|
134 | 134 | rootmodules_cache[path] = modules |
|
135 | 135 | if time() - start_time > TIMEOUT_STORAGE and not store: |
|
136 | 136 | store = True |
|
137 | 137 | print("\nCaching the list of root modules, please wait!") |
|
138 | 138 | print("(This will only be done once - type '%rehashx' to " |
|
139 | 139 | "reset cache!)\n") |
|
140 | 140 | sys.stdout.flush() |
|
141 | 141 | if time() - start_time > TIMEOUT_GIVEUP: |
|
142 | 142 | print("This is taking too long, we give up.\n") |
|
143 | 143 | return [] |
|
144 | 144 | rootmodules.extend(modules) |
|
145 | 145 | if store: |
|
146 | 146 | ip.db['rootmodules_cache'] = rootmodules_cache |
|
147 | 147 | rootmodules = list(set(rootmodules)) |
|
148 | 148 | return rootmodules |
|
149 | 149 | |
|
150 | 150 | |
|
151 | 151 | def is_importable(module, attr, only_modules): |
|
152 | 152 | if only_modules: |
|
153 | 153 | return inspect.ismodule(getattr(module, attr)) |
|
154 | 154 | else: |
|
155 | 155 | return not(attr[:2] == '__' and attr[-2:] == '__') |
|
156 | 156 | |
|
157 | def is_possible_submodule(module, attr): | |
|
158 | try: | |
|
159 | obj = getattr(module, attr) | |
|
160 | except AttributeError: | |
|
161 | # Is possilby an unimported submodule | |
|
162 | return True | |
|
163 | except TypeError: | |
|
164 | # https://github.com/ipython/ipython/issues/9678 | |
|
165 | return False | |
|
166 | return inspect.ismodule(obj) | |
|
167 | ||
|
157 | 168 | |
|
158 | 169 | def try_import(mod: str, only_modules=False) -> List[str]: |
|
159 | 170 | """ |
|
160 | 171 | Try to import given module and return list of potential completions. |
|
161 | 172 | """ |
|
162 | 173 | mod = mod.rstrip('.') |
|
163 | 174 | try: |
|
164 | 175 | m = import_module(mod) |
|
165 | 176 | except: |
|
166 | 177 | return [] |
|
167 | 178 | |
|
168 | 179 | m_is_init = '__init__' in (getattr(m, '__file__', '') or '') |
|
169 | 180 | |
|
170 | 181 | completions = [] |
|
171 | 182 | if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init: |
|
172 | 183 | completions.extend( [attr for attr in dir(m) if |
|
173 | 184 | is_importable(m, attr, only_modules)]) |
|
174 | 185 | |
|
175 |
|
|
|
186 | m_all = getattr(m, "__all__", []) | |
|
187 | if only_modules: | |
|
188 | completions.extend(attr for attr in m_all if is_possible_submodule(m, attr)) | |
|
189 | else: | |
|
190 | completions.extend(m_all) | |
|
191 | ||
|
176 | 192 | if m_is_init: |
|
177 | 193 | completions.extend(module_list(os.path.dirname(m.__file__))) |
|
178 | 194 | completions_set = {c for c in completions if isinstance(c, str)} |
|
179 | 195 | completions_set.discard('__init__') |
|
180 | 196 | return list(completions_set) |
|
181 | 197 | |
|
182 | 198 | |
|
183 | 199 | #----------------------------------------------------------------------------- |
|
184 | 200 | # Completion-related functions. |
|
185 | 201 | #----------------------------------------------------------------------------- |
|
186 | 202 | |
|
187 | 203 | def quick_completer(cmd, completions): |
|
188 | 204 | r""" Easily create a trivial completer for a command. |
|
189 | 205 | |
|
190 | 206 | Takes either a list of completions, or all completions in string (that will |
|
191 | 207 | be split on whitespace). |
|
192 | 208 | |
|
193 | 209 | Example:: |
|
194 | 210 | |
|
195 | 211 | [d:\ipython]|1> import ipy_completers |
|
196 | 212 | [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz']) |
|
197 | 213 | [d:\ipython]|3> foo b<TAB> |
|
198 | 214 | bar baz |
|
199 | 215 | [d:\ipython]|3> foo ba |
|
200 | 216 | """ |
|
201 | 217 | |
|
202 | 218 | if isinstance(completions, str): |
|
203 | 219 | completions = completions.split() |
|
204 | 220 | |
|
205 | 221 | def do_complete(self, event): |
|
206 | 222 | return completions |
|
207 | 223 | |
|
208 | 224 | get_ipython().set_hook('complete_command',do_complete, str_key = cmd) |
|
209 | 225 | |
|
210 | 226 | def module_completion(line): |
|
211 | 227 | """ |
|
212 | 228 | Returns a list containing the completion possibilities for an import line. |
|
213 | 229 | |
|
214 | 230 | The line looks like this : |
|
215 | 231 | 'import xml.d' |
|
216 | 232 | 'from xml.dom import' |
|
217 | 233 | """ |
|
218 | 234 | |
|
219 | 235 | words = line.split(' ') |
|
220 | 236 | nwords = len(words) |
|
221 | 237 | |
|
222 | 238 | # from whatever <tab> -> 'import ' |
|
223 | 239 | if nwords == 3 and words[0] == 'from': |
|
224 | 240 | return ['import '] |
|
225 | 241 | |
|
226 | 242 | # 'from xy<tab>' or 'import xy<tab>' |
|
227 | 243 | if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) : |
|
228 | 244 | if nwords == 1: |
|
229 | 245 | return get_root_modules() |
|
230 | 246 | mod = words[1].split('.') |
|
231 | 247 | if len(mod) < 2: |
|
232 | 248 | return get_root_modules() |
|
233 | 249 | completion_list = try_import('.'.join(mod[:-1]), True) |
|
234 | 250 | return ['.'.join(mod[:-1] + [el]) for el in completion_list] |
|
235 | 251 | |
|
236 | 252 | # 'from xyz import abc<tab>' |
|
237 | 253 | if nwords >= 3 and words[0] == 'from': |
|
238 | 254 | mod = words[1] |
|
239 | 255 | return try_import(mod) |
|
240 | 256 | |
|
241 | 257 | #----------------------------------------------------------------------------- |
|
242 | 258 | # Completers |
|
243 | 259 | #----------------------------------------------------------------------------- |
|
244 | 260 | # These all have the func(self, event) signature to be used as custom |
|
245 | 261 | # completers |
|
246 | 262 | |
|
247 | 263 | def module_completer(self,event): |
|
248 | 264 | """Give completions after user has typed 'import ...' or 'from ...'""" |
|
249 | 265 | |
|
250 | 266 | # This works in all versions of python. While 2.5 has |
|
251 | 267 | # pkgutil.walk_packages(), that particular routine is fairly dangerous, |
|
252 | 268 | # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full |
|
253 | 269 | # of possibly problematic side effects. |
|
254 | 270 | # This search the folders in the sys.path for available modules. |
|
255 | 271 | |
|
256 | 272 | return module_completion(event.line) |
|
257 | 273 | |
|
258 | 274 | # FIXME: there's a lot of logic common to the run, cd and builtin file |
|
259 | 275 | # completers, that is currently reimplemented in each. |
|
260 | 276 | |
|
261 | 277 | def magic_run_completer(self, event): |
|
262 | 278 | """Complete files that end in .py or .ipy or .ipynb for the %run command. |
|
263 | 279 | """ |
|
264 | 280 | comps = arg_split(event.line, strict=False) |
|
265 | 281 | # relpath should be the current token that we need to complete. |
|
266 | 282 | if (len(comps) > 1) and (not event.line.endswith(' ')): |
|
267 | 283 | relpath = comps[-1].strip("'\"") |
|
268 | 284 | else: |
|
269 | 285 | relpath = '' |
|
270 | 286 | |
|
271 | 287 | #print("\nev=", event) # dbg |
|
272 | 288 | #print("rp=", relpath) # dbg |
|
273 | 289 | #print('comps=', comps) # dbg |
|
274 | 290 | |
|
275 | 291 | lglob = glob.glob |
|
276 | 292 | isdir = os.path.isdir |
|
277 | 293 | relpath, tilde_expand, tilde_val = expand_user(relpath) |
|
278 | 294 | |
|
279 | 295 | # Find if the user has already typed the first filename, after which we |
|
280 | 296 | # should complete on all files, since after the first one other files may |
|
281 | 297 | # be arguments to the input script. |
|
282 | 298 | |
|
283 | 299 | if any(magic_run_re.match(c) for c in comps): |
|
284 | 300 | matches = [f.replace('\\','/') + ('/' if isdir(f) else '') |
|
285 | 301 | for f in lglob(relpath+'*')] |
|
286 | 302 | else: |
|
287 | 303 | dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)] |
|
288 | 304 | pys = [f.replace('\\','/') |
|
289 | 305 | for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') + |
|
290 | 306 | lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')] |
|
291 | 307 | |
|
292 | 308 | matches = dirs + pys |
|
293 | 309 | |
|
294 | 310 | #print('run comp:', dirs+pys) # dbg |
|
295 | 311 | return [compress_user(p, tilde_expand, tilde_val) for p in matches] |
|
296 | 312 | |
|
297 | 313 | |
|
298 | 314 | def cd_completer(self, event): |
|
299 | 315 | """Completer function for cd, which only returns directories.""" |
|
300 | 316 | ip = get_ipython() |
|
301 | 317 | relpath = event.symbol |
|
302 | 318 | |
|
303 | 319 | #print(event) # dbg |
|
304 | 320 | if event.line.endswith('-b') or ' -b ' in event.line: |
|
305 | 321 | # return only bookmark completions |
|
306 | 322 | bkms = self.db.get('bookmarks', None) |
|
307 | 323 | if bkms: |
|
308 | 324 | return bkms.keys() |
|
309 | 325 | else: |
|
310 | 326 | return [] |
|
311 | 327 | |
|
312 | 328 | if event.symbol == '-': |
|
313 | 329 | width_dh = str(len(str(len(ip.user_ns['_dh']) + 1))) |
|
314 | 330 | # jump in directory history by number |
|
315 | 331 | fmt = '-%0' + width_dh +'d [%s]' |
|
316 | 332 | ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])] |
|
317 | 333 | if len(ents) > 1: |
|
318 | 334 | return ents |
|
319 | 335 | return [] |
|
320 | 336 | |
|
321 | 337 | if event.symbol.startswith('--'): |
|
322 | 338 | return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']] |
|
323 | 339 | |
|
324 | 340 | # Expand ~ in path and normalize directory separators. |
|
325 | 341 | relpath, tilde_expand, tilde_val = expand_user(relpath) |
|
326 | 342 | relpath = relpath.replace('\\','/') |
|
327 | 343 | |
|
328 | 344 | found = [] |
|
329 | 345 | for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*') |
|
330 | 346 | if os.path.isdir(f)]: |
|
331 | 347 | if ' ' in d: |
|
332 | 348 | # we don't want to deal with any of that, complex code |
|
333 | 349 | # for this is elsewhere |
|
334 | 350 | raise TryNext |
|
335 | 351 | |
|
336 | 352 | found.append(d) |
|
337 | 353 | |
|
338 | 354 | if not found: |
|
339 | 355 | if os.path.isdir(relpath): |
|
340 | 356 | return [compress_user(relpath, tilde_expand, tilde_val)] |
|
341 | 357 | |
|
342 | 358 | # if no completions so far, try bookmarks |
|
343 | 359 | bks = self.db.get('bookmarks',{}) |
|
344 | 360 | bkmatches = [s for s in bks if s.startswith(event.symbol)] |
|
345 | 361 | if bkmatches: |
|
346 | 362 | return bkmatches |
|
347 | 363 | |
|
348 | 364 | raise TryNext |
|
349 | 365 | |
|
350 | 366 | return [compress_user(p, tilde_expand, tilde_val) for p in found] |
|
351 | 367 | |
|
352 | 368 | def reset_completer(self, event): |
|
353 | 369 | "A completer for %reset magic" |
|
354 | 370 | return '-f -s in out array dhist'.split() |
@@ -1,229 +1,223 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """sys.excepthook for IPython itself, leaves a detailed report on disk. |
|
3 | 3 | |
|
4 | 4 | Authors: |
|
5 | 5 | |
|
6 | 6 | * Fernando Perez |
|
7 | 7 | * Brian E. Granger |
|
8 | 8 | """ |
|
9 | 9 | |
|
10 | 10 | #----------------------------------------------------------------------------- |
|
11 | 11 | # Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu> |
|
12 | 12 | # Copyright (C) 2008-2011 The IPython Development Team |
|
13 | 13 | # |
|
14 | 14 | # Distributed under the terms of the BSD License. The full license is in |
|
15 | 15 | # the file COPYING, distributed as part of this software. |
|
16 | 16 | #----------------------------------------------------------------------------- |
|
17 | 17 | |
|
18 | 18 | #----------------------------------------------------------------------------- |
|
19 | 19 | # Imports |
|
20 | 20 | #----------------------------------------------------------------------------- |
|
21 | 21 | |
|
22 | 22 | import os |
|
23 | 23 | import sys |
|
24 | 24 | import traceback |
|
25 | 25 | from pprint import pformat |
|
26 | 26 | from pathlib import Path |
|
27 | 27 | |
|
28 | 28 | from IPython.core import ultratb |
|
29 | 29 | from IPython.core.release import author_email |
|
30 | 30 | from IPython.utils.sysinfo import sys_info |
|
31 | 31 | from IPython.utils.py3compat import input |
|
32 | 32 | |
|
33 | 33 | from IPython.core.release import __version__ as version |
|
34 | 34 | |
|
35 | 35 | #----------------------------------------------------------------------------- |
|
36 | 36 | # Code |
|
37 | 37 | #----------------------------------------------------------------------------- |
|
38 | 38 | |
|
39 | 39 | # Template for the user message. |
|
40 | 40 | _default_message_template = """\ |
|
41 | 41 | Oops, {app_name} crashed. We do our best to make it stable, but... |
|
42 | 42 | |
|
43 | 43 | A crash report was automatically generated with the following information: |
|
44 | 44 | - A verbatim copy of the crash traceback. |
|
45 | 45 | - A copy of your input history during this session. |
|
46 | 46 | - Data on your current {app_name} configuration. |
|
47 | 47 | |
|
48 | 48 | It was left in the file named: |
|
49 | 49 | \t'{crash_report_fname}' |
|
50 | 50 | If you can email this file to the developers, the information in it will help |
|
51 | 51 | them in understanding and correcting the problem. |
|
52 | 52 | |
|
53 | 53 | You can mail it to: {contact_name} at {contact_email} |
|
54 | 54 | with the subject '{app_name} Crash Report'. |
|
55 | 55 | |
|
56 | 56 | If you want to do it now, the following command will work (under Unix): |
|
57 | 57 | mail -s '{app_name} Crash Report' {contact_email} < {crash_report_fname} |
|
58 | 58 | |
|
59 | 59 | In your email, please also include information about: |
|
60 | 60 | - The operating system under which the crash happened: Linux, macOS, Windows, |
|
61 | 61 | other, and which exact version (for example: Ubuntu 16.04.3, macOS 10.13.2, |
|
62 | 62 | Windows 10 Pro), and whether it is 32-bit or 64-bit; |
|
63 | 63 | - How {app_name} was installed: using pip or conda, from GitHub, as part of |
|
64 | 64 | a Docker container, or other, providing more detail if possible; |
|
65 | 65 | - How to reproduce the crash: what exact sequence of instructions can one |
|
66 | 66 | input to get the same crash? Ideally, find a minimal yet complete sequence |
|
67 | 67 | of instructions that yields the crash. |
|
68 | 68 | |
|
69 | 69 | To ensure accurate tracking of this issue, please file a report about it at: |
|
70 | 70 | {bug_tracker} |
|
71 | 71 | """ |
|
72 | 72 | |
|
73 | 73 | _lite_message_template = """ |
|
74 | 74 | If you suspect this is an IPython {version} bug, please report it at: |
|
75 | 75 | https://github.com/ipython/ipython/issues |
|
76 | 76 | or send an email to the mailing list at {email} |
|
77 | 77 | |
|
78 | 78 | You can print a more detailed traceback right now with "%tb", or use "%debug" |
|
79 | 79 | to interactively debug it. |
|
80 | 80 | |
|
81 | 81 | Extra-detailed tracebacks for bug-reporting purposes can be enabled via: |
|
82 | 82 | {config}Application.verbose_crash=True |
|
83 | 83 | """ |
|
84 | 84 | |
|
85 | 85 | |
|
86 | 86 | class CrashHandler(object): |
|
87 | 87 | """Customizable crash handlers for IPython applications. |
|
88 | 88 | |
|
89 | 89 | Instances of this class provide a :meth:`__call__` method which can be |
|
90 | 90 | used as a ``sys.excepthook``. The :meth:`__call__` signature is:: |
|
91 | 91 | |
|
92 | 92 | def __call__(self, etype, evalue, etb) |
|
93 | 93 | """ |
|
94 | 94 | |
|
95 | 95 | message_template = _default_message_template |
|
96 | 96 | section_sep = '\n\n'+'*'*75+'\n\n' |
|
97 | 97 | |
|
98 | 98 | def __init__(self, app, contact_name=None, contact_email=None, |
|
99 | 99 | bug_tracker=None, show_crash_traceback=True, call_pdb=False): |
|
100 | 100 | """Create a new crash handler |
|
101 | 101 | |
|
102 | 102 | Parameters |
|
103 | 103 | ---------- |
|
104 | 104 |
app : |
|
105 | 105 | A running :class:`Application` instance, which will be queried at |
|
106 | 106 | crash time for internal information. |
|
107 | ||
|
108 | 107 | contact_name : str |
|
109 | 108 | A string with the name of the person to contact. |
|
110 | ||
|
111 | 109 | contact_email : str |
|
112 | 110 | A string with the email address of the contact. |
|
113 | ||
|
114 | 111 | bug_tracker : str |
|
115 | 112 | A string with the URL for your project's bug tracker. |
|
116 | ||
|
117 | 113 | show_crash_traceback : bool |
|
118 | 114 | If false, don't print the crash traceback on stderr, only generate |
|
119 | 115 | the on-disk report |
|
120 | ||
|
121 | Non-argument instance attributes: | |
|
122 | ||
|
116 | Non-argument instance attributes | |
|
123 | 117 | These instances contain some non-argument attributes which allow for |
|
124 | 118 | further customization of the crash handler's behavior. Please see the |
|
125 | 119 | source for further details. |
|
126 | 120 | """ |
|
127 | 121 | self.crash_report_fname = "Crash_report_%s.txt" % app.name |
|
128 | 122 | self.app = app |
|
129 | 123 | self.call_pdb = call_pdb |
|
130 | 124 | #self.call_pdb = True # dbg |
|
131 | 125 | self.show_crash_traceback = show_crash_traceback |
|
132 | 126 | self.info = dict(app_name = app.name, |
|
133 | 127 | contact_name = contact_name, |
|
134 | 128 | contact_email = contact_email, |
|
135 | 129 | bug_tracker = bug_tracker, |
|
136 | 130 | crash_report_fname = self.crash_report_fname) |
|
137 | 131 | |
|
138 | 132 | |
|
139 | 133 | def __call__(self, etype, evalue, etb): |
|
140 | 134 | """Handle an exception, call for compatible with sys.excepthook""" |
|
141 | 135 | |
|
142 | 136 | # do not allow the crash handler to be called twice without reinstalling it |
|
143 | 137 | # this prevents unlikely errors in the crash handling from entering an |
|
144 | 138 | # infinite loop. |
|
145 | 139 | sys.excepthook = sys.__excepthook__ |
|
146 | 140 | |
|
147 | 141 | # Report tracebacks shouldn't use color in general (safer for users) |
|
148 | 142 | color_scheme = 'NoColor' |
|
149 | 143 | |
|
150 | 144 | # Use this ONLY for developer debugging (keep commented out for release) |
|
151 | 145 | #color_scheme = 'Linux' # dbg |
|
152 | 146 | try: |
|
153 | 147 | rptdir = self.app.ipython_dir |
|
154 | 148 | except: |
|
155 | 149 | rptdir = Path.cwd() |
|
156 | 150 | if rptdir is None or not Path.is_dir(rptdir): |
|
157 | 151 | rptdir = Path.cwd() |
|
158 | 152 | report_name = rptdir / self.crash_report_fname |
|
159 | 153 | # write the report filename into the instance dict so it can get |
|
160 | 154 | # properly expanded out in the user message template |
|
161 | 155 | self.crash_report_fname = report_name |
|
162 | 156 | self.info['crash_report_fname'] = report_name |
|
163 | 157 | TBhandler = ultratb.VerboseTB( |
|
164 | 158 | color_scheme=color_scheme, |
|
165 | 159 | long_header=1, |
|
166 | 160 | call_pdb=self.call_pdb, |
|
167 | 161 | ) |
|
168 | 162 | if self.call_pdb: |
|
169 | 163 | TBhandler(etype,evalue,etb) |
|
170 | 164 | return |
|
171 | 165 | else: |
|
172 | 166 | traceback = TBhandler.text(etype,evalue,etb,context=31) |
|
173 | 167 | |
|
174 | 168 | # print traceback to screen |
|
175 | 169 | if self.show_crash_traceback: |
|
176 | 170 | print(traceback, file=sys.stderr) |
|
177 | 171 | |
|
178 | 172 | # and generate a complete report on disk |
|
179 | 173 | try: |
|
180 | 174 | report = open(report_name,'w') |
|
181 | 175 | except: |
|
182 | 176 | print('Could not create crash report on disk.', file=sys.stderr) |
|
183 | 177 | return |
|
184 | 178 | |
|
185 | 179 | with report: |
|
186 | 180 | # Inform user on stderr of what happened |
|
187 | 181 | print('\n'+'*'*70+'\n', file=sys.stderr) |
|
188 | 182 | print(self.message_template.format(**self.info), file=sys.stderr) |
|
189 | 183 | |
|
190 | 184 | # Construct report on disk |
|
191 | 185 | report.write(self.make_report(traceback)) |
|
192 | 186 | |
|
193 | 187 | input("Hit <Enter> to quit (your terminal may close):") |
|
194 | 188 | |
|
195 | 189 | def make_report(self,traceback): |
|
196 | 190 | """Return a string containing a crash report.""" |
|
197 | 191 | |
|
198 | 192 | sec_sep = self.section_sep |
|
199 | 193 | |
|
200 | 194 | report = ['*'*75+'\n\n'+'IPython post-mortem report\n\n'] |
|
201 | 195 | rpt_add = report.append |
|
202 | 196 | rpt_add(sys_info()) |
|
203 | 197 | |
|
204 | 198 | try: |
|
205 | 199 | config = pformat(self.app.config) |
|
206 | 200 | rpt_add(sec_sep) |
|
207 | 201 | rpt_add('Application name: %s\n\n' % self.app_name) |
|
208 | 202 | rpt_add('Current user configuration structure:\n\n') |
|
209 | 203 | rpt_add(config) |
|
210 | 204 | except: |
|
211 | 205 | pass |
|
212 | 206 | rpt_add(sec_sep+'Crash traceback:\n\n' + traceback) |
|
213 | 207 | |
|
214 | 208 | return ''.join(report) |
|
215 | 209 | |
|
216 | 210 | |
|
217 | 211 | def crash_handler_lite(etype, evalue, tb): |
|
218 | 212 | """a light excepthook, adding a small message to the usual traceback""" |
|
219 | 213 | traceback.print_exception(etype, evalue, tb) |
|
220 | 214 | |
|
221 | 215 | from IPython.core.interactiveshell import InteractiveShell |
|
222 | 216 | if InteractiveShell.initialized(): |
|
223 | 217 | # we are in a Shell environment, give %magic example |
|
224 | 218 | config = "%config " |
|
225 | 219 | else: |
|
226 | 220 | # we are not in a shell, show generic config |
|
227 | 221 | config = "c." |
|
228 | 222 | print(_lite_message_template.format(email=author_email, config=config, version=version), file=sys.stderr) |
|
229 | 223 |
@@ -1,857 +1,972 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | Pdb debugger class. |
|
4 | 4 | |
|
5 | 5 | Modified from the standard pdb.Pdb class to avoid including readline, so that |
|
6 | 6 | the command line completion of other programs which include this isn't |
|
7 | 7 | damaged. |
|
8 | 8 | |
|
9 | 9 | In the future, this class will be expanded with improvements over the standard |
|
10 | 10 | pdb. |
|
11 | 11 | |
|
12 | 12 | The code in this file is mainly lifted out of cmd.py in Python 2.2, with minor |
|
13 | 13 | changes. Licensing should therefore be under the standard Python terms. For |
|
14 | 14 | details on the PSF (Python Software Foundation) standard license, see: |
|
15 | 15 | |
|
16 | 16 | https://docs.python.org/2/license.html |
|
17 | 17 | """ |
|
18 | 18 | |
|
19 | 19 | #***************************************************************************** |
|
20 | 20 | # |
|
21 | 21 | # This file is licensed under the PSF license. |
|
22 | 22 | # |
|
23 | 23 | # Copyright (C) 2001 Python Software Foundation, www.python.org |
|
24 | 24 | # Copyright (C) 2005-2006 Fernando Perez. <fperez@colorado.edu> |
|
25 | 25 | # |
|
26 | 26 | # |
|
27 | 27 | #***************************************************************************** |
|
28 | 28 | |
|
29 | 29 | import bdb |
|
30 | 30 | import functools |
|
31 | 31 | import inspect |
|
32 | 32 | import linecache |
|
33 | 33 | import sys |
|
34 | 34 | import warnings |
|
35 | 35 | import re |
|
36 | import os | |
|
36 | 37 | |
|
37 | 38 | from IPython import get_ipython |
|
38 | 39 | from IPython.utils import PyColorize |
|
39 | 40 | from IPython.utils import coloransi, py3compat |
|
40 | 41 | from IPython.core.excolors import exception_colors |
|
41 | 42 | from IPython.testing.skipdoctest import skip_doctest |
|
42 | 43 | |
|
43 | 44 | |
|
44 | 45 | prompt = 'ipdb> ' |
|
45 | 46 | |
|
46 | 47 | # We have to check this directly from sys.argv, config struct not yet available |
|
47 | 48 | from pdb import Pdb as OldPdb |
|
48 | 49 | |
|
49 | 50 | # Allow the set_trace code to operate outside of an ipython instance, even if |
|
50 | 51 | # it does so with some limitations. The rest of this support is implemented in |
|
51 | 52 | # the Tracer constructor. |
|
52 | 53 | |
|
53 | 54 | |
|
54 | 55 | def make_arrow(pad): |
|
55 | 56 | """generate the leading arrow in front of traceback or debugger""" |
|
56 | 57 | if pad >= 2: |
|
57 | 58 | return '-'*(pad-2) + '> ' |
|
58 | 59 | elif pad == 1: |
|
59 | 60 | return '>' |
|
60 | 61 | return '' |
|
61 | 62 | |
|
62 | 63 | |
|
63 | 64 | def BdbQuit_excepthook(et, ev, tb, excepthook=None): |
|
64 | 65 | """Exception hook which handles `BdbQuit` exceptions. |
|
65 | 66 | |
|
66 | 67 | All other exceptions are processed using the `excepthook` |
|
67 | 68 | parameter. |
|
68 | 69 | """ |
|
69 | 70 | warnings.warn("`BdbQuit_excepthook` is deprecated since version 5.1", |
|
70 | 71 | DeprecationWarning, stacklevel=2) |
|
71 | 72 | if et == bdb.BdbQuit: |
|
72 | 73 | print('Exiting Debugger.') |
|
73 | 74 | elif excepthook is not None: |
|
74 | 75 | excepthook(et, ev, tb) |
|
75 | 76 | else: |
|
76 | 77 | # Backwards compatibility. Raise deprecation warning? |
|
77 | 78 | BdbQuit_excepthook.excepthook_ori(et, ev, tb) |
|
78 | 79 | |
|
79 | 80 | |
|
80 | 81 | def BdbQuit_IPython_excepthook(self, et, ev, tb, tb_offset=None): |
|
81 | 82 | warnings.warn( |
|
82 | 83 | "`BdbQuit_IPython_excepthook` is deprecated since version 5.1", |
|
83 | 84 | DeprecationWarning, stacklevel=2) |
|
84 | 85 | print('Exiting Debugger.') |
|
85 | 86 | |
|
86 | 87 | |
|
87 | 88 | class Tracer(object): |
|
88 | 89 | """ |
|
89 | 90 | DEPRECATED |
|
90 | 91 | |
|
91 | 92 | Class for local debugging, similar to pdb.set_trace. |
|
92 | 93 | |
|
93 | 94 | Instances of this class, when called, behave like pdb.set_trace, but |
|
94 | 95 | providing IPython's enhanced capabilities. |
|
95 | 96 | |
|
96 | 97 | This is implemented as a class which must be initialized in your own code |
|
97 | 98 | and not as a standalone function because we need to detect at runtime |
|
98 | 99 | whether IPython is already active or not. That detection is done in the |
|
99 | 100 | constructor, ensuring that this code plays nicely with a running IPython, |
|
100 | 101 | while functioning acceptably (though with limitations) if outside of it. |
|
101 | 102 | """ |
|
102 | 103 | |
|
103 | 104 | @skip_doctest |
|
104 | 105 | def __init__(self, colors=None): |
|
105 | 106 | """ |
|
106 | 107 | DEPRECATED |
|
107 | 108 | |
|
108 | 109 | Create a local debugger instance. |
|
109 | 110 | |
|
110 | 111 | Parameters |
|
111 | 112 | ---------- |
|
112 | ||
|
113 | 113 | colors : str, optional |
|
114 | 114 | The name of the color scheme to use, it must be one of IPython's |
|
115 | 115 | valid color schemes. If not given, the function will default to |
|
116 | 116 | the current IPython scheme when running inside IPython, and to |
|
117 | 117 | 'NoColor' otherwise. |
|
118 | 118 | |
|
119 | 119 | Examples |
|
120 | 120 | -------- |
|
121 | 121 | :: |
|
122 | 122 | |
|
123 | 123 | from IPython.core.debugger import Tracer; debug_here = Tracer() |
|
124 | 124 | |
|
125 | 125 | Later in your code:: |
|
126 | 126 | |
|
127 | 127 | debug_here() # -> will open up the debugger at that point. |
|
128 | 128 | |
|
129 | 129 | Once the debugger activates, you can use all of its regular commands to |
|
130 | 130 | step through code, set breakpoints, etc. See the pdb documentation |
|
131 | 131 | from the Python standard library for usage details. |
|
132 | 132 | """ |
|
133 | 133 | warnings.warn("`Tracer` is deprecated since version 5.1, directly use " |
|
134 | 134 | "`IPython.core.debugger.Pdb.set_trace()`", |
|
135 | 135 | DeprecationWarning, stacklevel=2) |
|
136 | 136 | |
|
137 | 137 | ip = get_ipython() |
|
138 | 138 | if ip is None: |
|
139 | 139 | # Outside of ipython, we set our own exception hook manually |
|
140 | 140 | sys.excepthook = functools.partial(BdbQuit_excepthook, |
|
141 | 141 | excepthook=sys.excepthook) |
|
142 | 142 | def_colors = 'NoColor' |
|
143 | 143 | else: |
|
144 | 144 | # In ipython, we use its custom exception handler mechanism |
|
145 | 145 | def_colors = ip.colors |
|
146 | 146 | ip.set_custom_exc((bdb.BdbQuit,), BdbQuit_IPython_excepthook) |
|
147 | 147 | |
|
148 | 148 | if colors is None: |
|
149 | 149 | colors = def_colors |
|
150 | 150 | |
|
151 | 151 | # The stdlib debugger internally uses a modified repr from the `repr` |
|
152 | 152 | # module, that limits the length of printed strings to a hardcoded |
|
153 | 153 | # limit of 30 characters. That much trimming is too aggressive, let's |
|
154 | 154 | # at least raise that limit to 80 chars, which should be enough for |
|
155 | 155 | # most interactive uses. |
|
156 | 156 | try: |
|
157 | 157 | from reprlib import aRepr |
|
158 | 158 | aRepr.maxstring = 80 |
|
159 | 159 | except: |
|
160 | 160 | # This is only a user-facing convenience, so any error we encounter |
|
161 | 161 | # here can be warned about but can be otherwise ignored. These |
|
162 | 162 | # printouts will tell us about problems if this API changes |
|
163 | 163 | import traceback |
|
164 | 164 | traceback.print_exc() |
|
165 | 165 | |
|
166 | 166 | self.debugger = Pdb(colors) |
|
167 | 167 | |
|
168 | 168 | def __call__(self): |
|
169 | 169 | """Starts an interactive debugger at the point where called. |
|
170 | 170 | |
|
171 | 171 | This is similar to the pdb.set_trace() function from the std lib, but |
|
172 | 172 | using IPython's enhanced debugger.""" |
|
173 | 173 | |
|
174 | 174 | self.debugger.set_trace(sys._getframe().f_back) |
|
175 | 175 | |
|
176 | 176 | |
|
177 | 177 | RGX_EXTRA_INDENT = re.compile(r'(?<=\n)\s+') |
|
178 | 178 | |
|
179 | 179 | |
|
180 | 180 | def strip_indentation(multiline_string): |
|
181 | 181 | return RGX_EXTRA_INDENT.sub('', multiline_string) |
|
182 | 182 | |
|
183 | 183 | |
|
184 | 184 | def decorate_fn_with_doc(new_fn, old_fn, additional_text=""): |
|
185 | 185 | """Make new_fn have old_fn's doc string. This is particularly useful |
|
186 | 186 | for the ``do_...`` commands that hook into the help system. |
|
187 | 187 | Adapted from from a comp.lang.python posting |
|
188 | 188 | by Duncan Booth.""" |
|
189 | 189 | def wrapper(*args, **kw): |
|
190 | 190 | return new_fn(*args, **kw) |
|
191 | 191 | if old_fn.__doc__: |
|
192 | 192 | wrapper.__doc__ = strip_indentation(old_fn.__doc__) + additional_text |
|
193 | 193 | return wrapper |
|
194 | 194 | |
|
195 | 195 | |
|
196 | 196 | class Pdb(OldPdb): |
|
197 | 197 | """Modified Pdb class, does not load readline. |
|
198 | 198 | |
|
199 | 199 | for a standalone version that uses prompt_toolkit, see |
|
200 | 200 | `IPython.terminal.debugger.TerminalPdb` and |
|
201 | 201 | `IPython.terminal.debugger.set_trace()` |
|
202 | ||
|
203 | ||
|
204 | This debugger can hide and skip frames that are tagged according to some predicates. | |
|
205 | See the `skip_predicates` commands. | |
|
206 | ||
|
202 | 207 | """ |
|
203 | 208 | |
|
209 | default_predicates = {"tbhide": True, "readonly": False, "ipython_internal": True} | |
|
210 | ||
|
204 | 211 | def __init__(self, color_scheme=None, completekey=None, |
|
205 | 212 | stdin=None, stdout=None, context=5, **kwargs): |
|
206 | 213 | """Create a new IPython debugger. |
|
207 | 214 | |
|
208 | :param color_scheme: Deprecated, do not use. | |
|
209 | :param completekey: Passed to pdb.Pdb. | |
|
210 | :param stdin: Passed to pdb.Pdb. | |
|
211 | :param stdout: Passed to pdb.Pdb. | |
|
212 | :param context: Number of lines of source code context to show when | |
|
215 | Parameters | |
|
216 | ---------- | |
|
217 | color_scheme : default None | |
|
218 | Deprecated, do not use. | |
|
219 | completekey : default None | |
|
220 | Passed to pdb.Pdb. | |
|
221 | stdin : default None | |
|
222 | Passed to pdb.Pdb. | |
|
223 | stdout : default None | |
|
224 | Passed to pdb.Pdb. | |
|
225 | context : int | |
|
226 | Number of lines of source code context to show when | |
|
213 | 227 | displaying stacktrace information. |
|
214 | :param kwargs: Passed to pdb.Pdb. | |
|
228 | **kwargs | |
|
229 | Passed to pdb.Pdb. | |
|
230 | ||
|
231 | Notes | |
|
232 | ----- | |
|
215 | 233 |
|
|
216 | 234 |
|
|
217 | 235 | """ |
|
218 | 236 | |
|
219 | 237 | # Parent constructor: |
|
220 | 238 | try: |
|
221 | 239 | self.context = int(context) |
|
222 | 240 | if self.context <= 0: |
|
223 | 241 | raise ValueError("Context must be a positive integer") |
|
224 | 242 | except (TypeError, ValueError) as e: |
|
225 | 243 | raise ValueError("Context must be a positive integer") from e |
|
226 | 244 | |
|
227 | 245 | # `kwargs` ensures full compatibility with stdlib's `pdb.Pdb`. |
|
228 | 246 | OldPdb.__init__(self, completekey, stdin, stdout, **kwargs) |
|
229 | 247 | |
|
230 | 248 | # IPython changes... |
|
231 | 249 | self.shell = get_ipython() |
|
232 | 250 | |
|
233 | 251 | if self.shell is None: |
|
234 | 252 | save_main = sys.modules['__main__'] |
|
235 | 253 | # No IPython instance running, we must create one |
|
236 | 254 | from IPython.terminal.interactiveshell import \ |
|
237 | 255 | TerminalInteractiveShell |
|
238 | 256 | self.shell = TerminalInteractiveShell.instance() |
|
239 | 257 | # needed by any code which calls __import__("__main__") after |
|
240 | 258 | # the debugger was entered. See also #9941. |
|
241 | 259 | sys.modules["__main__"] = save_main |
|
242 | 260 | |
|
243 | 261 | if color_scheme is not None: |
|
244 | 262 | warnings.warn( |
|
245 | 263 | "The `color_scheme` argument is deprecated since version 5.1", |
|
246 | 264 | DeprecationWarning, stacklevel=2) |
|
247 | 265 | else: |
|
248 | 266 | color_scheme = self.shell.colors |
|
249 | 267 | |
|
250 | 268 | self.aliases = {} |
|
251 | 269 | |
|
252 | 270 | # Create color table: we copy the default one from the traceback |
|
253 | 271 | # module and add a few attributes needed for debugging |
|
254 | 272 | self.color_scheme_table = exception_colors() |
|
255 | 273 | |
|
256 | 274 | # shorthands |
|
257 | 275 | C = coloransi.TermColors |
|
258 | 276 | cst = self.color_scheme_table |
|
259 | 277 | |
|
260 | 278 | cst['NoColor'].colors.prompt = C.NoColor |
|
261 | 279 | cst['NoColor'].colors.breakpoint_enabled = C.NoColor |
|
262 | 280 | cst['NoColor'].colors.breakpoint_disabled = C.NoColor |
|
263 | 281 | |
|
264 | 282 | cst['Linux'].colors.prompt = C.Green |
|
265 | 283 | cst['Linux'].colors.breakpoint_enabled = C.LightRed |
|
266 | 284 | cst['Linux'].colors.breakpoint_disabled = C.Red |
|
267 | 285 | |
|
268 | 286 | cst['LightBG'].colors.prompt = C.Blue |
|
269 | 287 | cst['LightBG'].colors.breakpoint_enabled = C.LightRed |
|
270 | 288 | cst['LightBG'].colors.breakpoint_disabled = C.Red |
|
271 | 289 | |
|
272 | 290 | cst['Neutral'].colors.prompt = C.Blue |
|
273 | 291 | cst['Neutral'].colors.breakpoint_enabled = C.LightRed |
|
274 | 292 | cst['Neutral'].colors.breakpoint_disabled = C.Red |
|
275 | 293 | |
|
276 | 294 | # Add a python parser so we can syntax highlight source while |
|
277 | 295 | # debugging. |
|
278 | 296 | self.parser = PyColorize.Parser(style=color_scheme) |
|
279 | 297 | self.set_colors(color_scheme) |
|
280 | 298 | |
|
281 | 299 | # Set the prompt - the default prompt is '(Pdb)' |
|
282 | 300 | self.prompt = prompt |
|
283 | 301 | self.skip_hidden = True |
|
302 | self.report_skipped = True | |
|
303 | ||
|
304 | # list of predicates we use to skip frames | |
|
305 | self._predicates = self.default_predicates | |
|
284 | 306 | |
|
285 | 307 | def set_colors(self, scheme): |
|
286 | 308 | """Shorthand access to the color table scheme selector method.""" |
|
287 | 309 | self.color_scheme_table.set_active_scheme(scheme) |
|
288 | 310 | self.parser.style = scheme |
|
289 | 311 | |
|
290 | 312 | def set_trace(self, frame=None): |
|
291 | 313 | if frame is None: |
|
292 | 314 | frame = sys._getframe().f_back |
|
293 | 315 | self.initial_frame = frame |
|
294 | 316 | return super().set_trace(frame) |
|
295 | 317 | |
|
318 | def _hidden_predicate(self, frame): | |
|
319 | """ | |
|
320 | Given a frame return whether it it should be hidden or not by IPython. | |
|
321 | """ | |
|
322 | ||
|
323 | if self._predicates["readonly"]: | |
|
324 | fname = frame.f_code.co_filename | |
|
325 | # we need to check for file existence and interactively define | |
|
326 | # function would otherwise appear as RO. | |
|
327 | if os.path.isfile(fname) and not os.access(fname, os.W_OK): | |
|
328 | return True | |
|
329 | ||
|
330 | if self._predicates["tbhide"]: | |
|
331 | if frame in (self.curframe, getattr(self, "initial_frame", None)): | |
|
332 | return False | |
|
333 | else: | |
|
334 | return self._get_frame_locals(frame).get("__tracebackhide__", False) | |
|
335 | ||
|
336 | return False | |
|
337 | ||
|
296 | 338 | def hidden_frames(self, stack): |
|
297 | 339 | """ |
|
298 | 340 | Given an index in the stack return whether it should be skipped. |
|
299 | 341 | |
|
300 | 342 | This is used in up/down and where to skip frames. |
|
301 | 343 | """ |
|
302 | 344 | # The f_locals dictionary is updated from the actual frame |
|
303 | 345 | # locals whenever the .f_locals accessor is called, so we |
|
304 | 346 | # avoid calling it here to preserve self.curframe_locals. |
|
305 | 347 | # Futhermore, there is no good reason to hide the current frame. |
|
306 | ip_hide = [ | |
|
307 | False | |
|
308 | if s[0] in (self.curframe, getattr(self, "initial_frame", None)) | |
|
309 | else s[0].f_locals.get("__tracebackhide__", False) | |
|
310 | for s in stack | |
|
311 | ] | |
|
348 | ip_hide = [self._hidden_predicate(s[0]) for s in stack] | |
|
312 | 349 | ip_start = [i for i, s in enumerate(ip_hide) if s == "__ipython_bottom__"] |
|
313 | if ip_start: | |
|
350 | if ip_start and self._predicates["ipython_internal"]: | |
|
314 | 351 | ip_hide = [h if i > ip_start[0] else True for (i, h) in enumerate(ip_hide)] |
|
315 | 352 | return ip_hide |
|
316 | 353 | |
|
317 | 354 | def interaction(self, frame, traceback): |
|
318 | 355 | try: |
|
319 | 356 | OldPdb.interaction(self, frame, traceback) |
|
320 | 357 | except KeyboardInterrupt: |
|
321 | 358 | self.stdout.write("\n" + self.shell.get_exception_only()) |
|
322 | 359 | |
|
323 | 360 | def precmd(self, line): |
|
324 | 361 | """Perform useful escapes on the command before it is executed.""" |
|
325 | 362 | |
|
326 | 363 | if line.endswith("??"): |
|
327 | 364 | line = "pinfo2 " + line[:-2] |
|
328 | 365 | elif line.endswith("?"): |
|
329 | 366 | line = "pinfo " + line[:-1] |
|
330 | 367 | |
|
331 | 368 | line = super().precmd(line) |
|
332 | 369 | |
|
333 | 370 | return line |
|
334 | 371 | |
|
335 | 372 | def new_do_frame(self, arg): |
|
336 | 373 | OldPdb.do_frame(self, arg) |
|
337 | 374 | |
|
338 | 375 | def new_do_quit(self, arg): |
|
339 | 376 | |
|
340 | 377 | if hasattr(self, 'old_all_completions'): |
|
341 | 378 | self.shell.Completer.all_completions = self.old_all_completions |
|
342 | 379 | |
|
343 | 380 | return OldPdb.do_quit(self, arg) |
|
344 | 381 | |
|
345 | 382 | do_q = do_quit = decorate_fn_with_doc(new_do_quit, OldPdb.do_quit) |
|
346 | 383 | |
|
347 | 384 | def new_do_restart(self, arg): |
|
348 | 385 | """Restart command. In the context of ipython this is exactly the same |
|
349 | 386 | thing as 'quit'.""" |
|
350 | 387 | self.msg("Restart doesn't make sense here. Using 'quit' instead.") |
|
351 | 388 | return self.do_quit(arg) |
|
352 | 389 | |
|
353 | 390 | def print_stack_trace(self, context=None): |
|
354 | 391 | Colors = self.color_scheme_table.active_colors |
|
355 | 392 | ColorsNormal = Colors.Normal |
|
356 | 393 | if context is None: |
|
357 | 394 | context = self.context |
|
358 | 395 | try: |
|
359 | 396 | context = int(context) |
|
360 | 397 | if context <= 0: |
|
361 | 398 | raise ValueError("Context must be a positive integer") |
|
362 | 399 | except (TypeError, ValueError) as e: |
|
363 | 400 | raise ValueError("Context must be a positive integer") from e |
|
364 | 401 | try: |
|
365 | 402 | skipped = 0 |
|
366 | 403 | for hidden, frame_lineno in zip(self.hidden_frames(self.stack), self.stack): |
|
367 | 404 | if hidden and self.skip_hidden: |
|
368 | 405 | skipped += 1 |
|
369 | 406 | continue |
|
370 | 407 | if skipped: |
|
371 | 408 | print( |
|
372 | 409 | f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n" |
|
373 | 410 | ) |
|
374 | 411 | skipped = 0 |
|
375 | 412 | self.print_stack_entry(frame_lineno, context=context) |
|
376 | 413 | if skipped: |
|
377 | 414 | print( |
|
378 | 415 | f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n" |
|
379 | 416 | ) |
|
380 | 417 | except KeyboardInterrupt: |
|
381 | 418 | pass |
|
382 | 419 | |
|
383 | 420 | def print_stack_entry(self, frame_lineno, prompt_prefix='\n-> ', |
|
384 | 421 | context=None): |
|
385 | 422 | if context is None: |
|
386 | 423 | context = self.context |
|
387 | 424 | try: |
|
388 | 425 | context = int(context) |
|
389 | 426 | if context <= 0: |
|
390 | 427 | raise ValueError("Context must be a positive integer") |
|
391 | 428 | except (TypeError, ValueError) as e: |
|
392 | 429 | raise ValueError("Context must be a positive integer") from e |
|
393 | 430 | print(self.format_stack_entry(frame_lineno, '', context), file=self.stdout) |
|
394 | 431 | |
|
395 | 432 | # vds: >> |
|
396 | 433 | frame, lineno = frame_lineno |
|
397 | 434 | filename = frame.f_code.co_filename |
|
398 | 435 | self.shell.hooks.synchronize_with_editor(filename, lineno, 0) |
|
399 | 436 | # vds: << |
|
400 | 437 | |
|
438 | def _get_frame_locals(self, frame): | |
|
439 | """ " | |
|
440 | Acessing f_local of current frame reset the namespace, so we want to avoid | |
|
441 | that or the following can happend | |
|
442 | ||
|
443 | ipdb> foo | |
|
444 | "old" | |
|
445 | ipdb> foo = "new" | |
|
446 | ipdb> foo | |
|
447 | "new" | |
|
448 | ipdb> where | |
|
449 | ipdb> foo | |
|
450 | "old" | |
|
451 | ||
|
452 | So if frame is self.current_frame we instead return self.curframe_locals | |
|
453 | ||
|
454 | """ | |
|
455 | if frame is self.curframe: | |
|
456 | return self.curframe_locals | |
|
457 | else: | |
|
458 | return frame.f_locals | |
|
459 | ||
|
401 | 460 | def format_stack_entry(self, frame_lineno, lprefix=': ', context=None): |
|
402 | 461 | if context is None: |
|
403 | 462 | context = self.context |
|
404 | 463 | try: |
|
405 | 464 | context = int(context) |
|
406 | 465 | if context <= 0: |
|
407 | 466 | print("Context must be a positive integer", file=self.stdout) |
|
408 | 467 | except (TypeError, ValueError): |
|
409 | 468 | print("Context must be a positive integer", file=self.stdout) |
|
410 | 469 | |
|
411 | 470 | import reprlib |
|
412 | 471 | |
|
413 | 472 | ret = [] |
|
414 | 473 | |
|
415 | 474 | Colors = self.color_scheme_table.active_colors |
|
416 | 475 | ColorsNormal = Colors.Normal |
|
417 | 476 | tpl_link = "%s%%s%s" % (Colors.filenameEm, ColorsNormal) |
|
418 | 477 | tpl_call = "%s%%s%s%%s%s" % (Colors.vName, Colors.valEm, ColorsNormal) |
|
419 | 478 | tpl_line = "%%s%s%%s %s%%s" % (Colors.lineno, ColorsNormal) |
|
420 | 479 | tpl_line_em = "%%s%s%%s %s%%s%s" % (Colors.linenoEm, Colors.line, ColorsNormal) |
|
421 | 480 | |
|
422 | 481 | frame, lineno = frame_lineno |
|
423 | 482 | |
|
424 | 483 | return_value = '' |
|
425 | if '__return__' in frame.f_locals: | |
|
426 | rv = frame.f_locals['__return__'] | |
|
484 | loc_frame = self._get_frame_locals(frame) | |
|
485 | if "__return__" in loc_frame: | |
|
486 | rv = loc_frame["__return__"] | |
|
427 | 487 | #return_value += '->' |
|
428 |
return_value += reprlib.repr(rv) + |
|
|
488 | return_value += reprlib.repr(rv) + "\n" | |
|
429 | 489 | ret.append(return_value) |
|
430 | 490 | |
|
431 | 491 | #s = filename + '(' + `lineno` + ')' |
|
432 | 492 | filename = self.canonic(frame.f_code.co_filename) |
|
433 | 493 | link = tpl_link % py3compat.cast_unicode(filename) |
|
434 | 494 | |
|
435 | 495 | if frame.f_code.co_name: |
|
436 | 496 | func = frame.f_code.co_name |
|
437 | 497 | else: |
|
438 | 498 | func = "<lambda>" |
|
439 | 499 | |
|
440 |
call = |
|
|
441 |
if func != |
|
|
442 |
if |
|
|
443 |
args = reprlib.repr(frame |
|
|
500 | call = "" | |
|
501 | if func != "?": | |
|
502 | if "__args__" in loc_frame: | |
|
503 | args = reprlib.repr(loc_frame["__args__"]) | |
|
444 | 504 | else: |
|
445 | 505 | args = '()' |
|
446 | 506 | call = tpl_call % (func, args) |
|
447 | 507 | |
|
448 | 508 | # The level info should be generated in the same format pdb uses, to |
|
449 | 509 | # avoid breaking the pdbtrack functionality of python-mode in *emacs. |
|
450 | 510 | if frame is self.curframe: |
|
451 | 511 | ret.append('> ') |
|
452 | 512 | else: |
|
453 | 513 | ret.append(" ") |
|
454 | 514 | ret.append("%s(%s)%s\n" % (link, lineno, call)) |
|
455 | 515 | |
|
456 | 516 | start = lineno - 1 - context//2 |
|
457 | 517 | lines = linecache.getlines(filename) |
|
458 | 518 | start = min(start, len(lines) - context) |
|
459 | 519 | start = max(start, 0) |
|
460 | 520 | lines = lines[start : start + context] |
|
461 | 521 | |
|
462 | 522 | for i, line in enumerate(lines): |
|
463 | 523 | show_arrow = start + 1 + i == lineno |
|
464 | 524 | linetpl = (frame is self.curframe or show_arrow) and tpl_line_em or tpl_line |
|
465 | 525 | ret.append( |
|
466 | 526 | self.__format_line( |
|
467 | 527 | linetpl, filename, start + 1 + i, line, arrow=show_arrow |
|
468 | 528 | ) |
|
469 | 529 | ) |
|
470 | 530 | return "".join(ret) |
|
471 | 531 | |
|
472 | 532 | def __format_line(self, tpl_line, filename, lineno, line, arrow=False): |
|
473 | 533 | bp_mark = "" |
|
474 | 534 | bp_mark_color = "" |
|
475 | 535 | |
|
476 | 536 | new_line, err = self.parser.format2(line, 'str') |
|
477 | 537 | if not err: |
|
478 | 538 | line = new_line |
|
479 | 539 | |
|
480 | 540 | bp = None |
|
481 | 541 | if lineno in self.get_file_breaks(filename): |
|
482 | 542 | bps = self.get_breaks(filename, lineno) |
|
483 | 543 | bp = bps[-1] |
|
484 | 544 | |
|
485 | 545 | if bp: |
|
486 | 546 | Colors = self.color_scheme_table.active_colors |
|
487 | 547 | bp_mark = str(bp.number) |
|
488 | 548 | bp_mark_color = Colors.breakpoint_enabled |
|
489 | 549 | if not bp.enabled: |
|
490 | 550 | bp_mark_color = Colors.breakpoint_disabled |
|
491 | 551 | |
|
492 | 552 | numbers_width = 7 |
|
493 | 553 | if arrow: |
|
494 | 554 | # This is the line with the error |
|
495 | 555 | pad = numbers_width - len(str(lineno)) - len(bp_mark) |
|
496 | 556 | num = '%s%s' % (make_arrow(pad), str(lineno)) |
|
497 | 557 | else: |
|
498 | 558 | num = '%*s' % (numbers_width - len(bp_mark), str(lineno)) |
|
499 | 559 | |
|
500 | 560 | return tpl_line % (bp_mark_color + bp_mark, num, line) |
|
501 | 561 | |
|
502 | 562 | def print_list_lines(self, filename, first, last): |
|
503 | 563 | """The printing (as opposed to the parsing part of a 'list' |
|
504 | 564 | command.""" |
|
505 | 565 | try: |
|
506 | 566 | Colors = self.color_scheme_table.active_colors |
|
507 | 567 | ColorsNormal = Colors.Normal |
|
508 | 568 | tpl_line = '%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal) |
|
509 | 569 | tpl_line_em = '%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, ColorsNormal) |
|
510 | 570 | src = [] |
|
511 | 571 | if filename == "<string>" and hasattr(self, "_exec_filename"): |
|
512 | 572 | filename = self._exec_filename |
|
513 | 573 | |
|
514 | 574 | for lineno in range(first, last+1): |
|
515 | 575 | line = linecache.getline(filename, lineno) |
|
516 | 576 | if not line: |
|
517 | 577 | break |
|
518 | 578 | |
|
519 | 579 | if lineno == self.curframe.f_lineno: |
|
520 | 580 | line = self.__format_line( |
|
521 | 581 | tpl_line_em, filename, lineno, line, arrow=True |
|
522 | 582 | ) |
|
523 | 583 | else: |
|
524 | 584 | line = self.__format_line( |
|
525 | 585 | tpl_line, filename, lineno, line, arrow=False |
|
526 | 586 | ) |
|
527 | 587 | |
|
528 | 588 | src.append(line) |
|
529 | 589 | self.lineno = lineno |
|
530 | 590 | |
|
531 | 591 | print(''.join(src), file=self.stdout) |
|
532 | 592 | |
|
533 | 593 | except KeyboardInterrupt: |
|
534 | 594 | pass |
|
535 | 595 | |
|
596 | def do_skip_predicates(self, args): | |
|
597 | """ | |
|
598 | Turn on/off individual predicates as to whether a frame should be hidden/skip. | |
|
599 | ||
|
600 | The global option to skip (or not) hidden frames is set with skip_hidden | |
|
601 | ||
|
602 | To change the value of a predicate | |
|
603 | ||
|
604 | skip_predicates key [true|false] | |
|
605 | ||
|
606 | Call without arguments to see the current values. | |
|
607 | ||
|
608 | To permanently change the value of an option add the corresponding | |
|
609 | command to your ``~/.pdbrc`` file. If you are programmatically using the | |
|
610 | Pdb instance you can also change the ``default_predicates`` class | |
|
611 | attribute. | |
|
612 | """ | |
|
613 | if not args.strip(): | |
|
614 | print("current predicates:") | |
|
615 | for (p, v) in self._predicates.items(): | |
|
616 | print(" ", p, ":", v) | |
|
617 | return | |
|
618 | type_value = args.strip().split(" ") | |
|
619 | if len(type_value) != 2: | |
|
620 | print( | |
|
621 | f"Usage: skip_predicates <type> <value>, with <type> one of {set(self._predicates.keys())}" | |
|
622 | ) | |
|
623 | return | |
|
624 | ||
|
625 | type_, value = type_value | |
|
626 | if type_ not in self._predicates: | |
|
627 | print(f"{type_!r} not in {set(self._predicates.keys())}") | |
|
628 | return | |
|
629 | if value.lower() not in ("true", "yes", "1", "no", "false", "0"): | |
|
630 | print( | |
|
631 | f"{value!r} is invalid - use one of ('true', 'yes', '1', 'no', 'false', '0')" | |
|
632 | ) | |
|
633 | return | |
|
634 | ||
|
635 | self._predicates[type_] = value.lower() in ("true", "yes", "1") | |
|
636 | if not any(self._predicates.values()): | |
|
637 | print( | |
|
638 | "Warning, all predicates set to False, skip_hidden may not have any effects." | |
|
639 | ) | |
|
640 | ||
|
536 | 641 | def do_skip_hidden(self, arg): |
|
537 | 642 | """ |
|
538 | 643 | Change whether or not we should skip frames with the |
|
539 | 644 | __tracebackhide__ attribute. |
|
540 | 645 | """ |
|
541 | if arg.strip().lower() in ("true", "yes"): | |
|
646 | if not arg.strip(): | |
|
647 | print( | |
|
648 | f"skip_hidden = {self.skip_hidden}, use 'yes','no', 'true', or 'false' to change." | |
|
649 | ) | |
|
650 | elif arg.strip().lower() in ("true", "yes"): | |
|
542 | 651 | self.skip_hidden = True |
|
543 | 652 | elif arg.strip().lower() in ("false", "no"): |
|
544 | 653 | self.skip_hidden = False |
|
654 | if not any(self._predicates.values()): | |
|
655 | print( | |
|
656 | "Warning, all predicates set to False, skip_hidden may not have any effects." | |
|
657 | ) | |
|
545 | 658 | |
|
546 | 659 | def do_list(self, arg): |
|
547 | 660 | """Print lines of code from the current stack frame |
|
548 | 661 | """ |
|
549 | 662 | self.lastcmd = 'list' |
|
550 | 663 | last = None |
|
551 | 664 | if arg: |
|
552 | 665 | try: |
|
553 | 666 | x = eval(arg, {}, {}) |
|
554 | 667 | if type(x) == type(()): |
|
555 | 668 | first, last = x |
|
556 | 669 | first = int(first) |
|
557 | 670 | last = int(last) |
|
558 | 671 | if last < first: |
|
559 | 672 | # Assume it's a count |
|
560 | 673 | last = first + last |
|
561 | 674 | else: |
|
562 | 675 | first = max(1, int(x) - 5) |
|
563 | 676 | except: |
|
564 | 677 | print('*** Error in argument:', repr(arg), file=self.stdout) |
|
565 | 678 | return |
|
566 | 679 | elif self.lineno is None: |
|
567 | 680 | first = max(1, self.curframe.f_lineno - 5) |
|
568 | 681 | else: |
|
569 | 682 | first = self.lineno + 1 |
|
570 | 683 | if last is None: |
|
571 | 684 | last = first + 10 |
|
572 | 685 | self.print_list_lines(self.curframe.f_code.co_filename, first, last) |
|
573 | 686 | |
|
574 | 687 | # vds: >> |
|
575 | 688 | lineno = first |
|
576 | 689 | filename = self.curframe.f_code.co_filename |
|
577 | 690 | self.shell.hooks.synchronize_with_editor(filename, lineno, 0) |
|
578 | 691 | # vds: << |
|
579 | 692 | |
|
580 | 693 | do_l = do_list |
|
581 | 694 | |
|
582 | 695 | def getsourcelines(self, obj): |
|
583 | 696 | lines, lineno = inspect.findsource(obj) |
|
584 |
if inspect.isframe(obj) and obj.f_globals is obj |
|
|
697 | if inspect.isframe(obj) and obj.f_globals is self._get_frame_locals(obj): | |
|
585 | 698 | # must be a module frame: do not try to cut a block out of it |
|
586 | 699 | return lines, 1 |
|
587 | 700 | elif inspect.ismodule(obj): |
|
588 | 701 | return lines, 1 |
|
589 | 702 | return inspect.getblock(lines[lineno:]), lineno+1 |
|
590 | 703 | |
|
591 | 704 | def do_longlist(self, arg): |
|
592 | 705 | """Print lines of code from the current stack frame. |
|
593 | 706 | |
|
594 | 707 | Shows more lines than 'list' does. |
|
595 | 708 | """ |
|
596 | 709 | self.lastcmd = 'longlist' |
|
597 | 710 | try: |
|
598 | 711 | lines, lineno = self.getsourcelines(self.curframe) |
|
599 | 712 | except OSError as err: |
|
600 | 713 | self.error(err) |
|
601 | 714 | return |
|
602 | 715 | last = lineno + len(lines) |
|
603 | 716 | self.print_list_lines(self.curframe.f_code.co_filename, lineno, last) |
|
604 | 717 | do_ll = do_longlist |
|
605 | 718 | |
|
606 | 719 | def do_debug(self, arg): |
|
607 | 720 | """debug code |
|
608 | 721 | Enter a recursive debugger that steps through the code |
|
609 | 722 | argument (which is an arbitrary expression or statement to be |
|
610 | 723 | executed in the current environment). |
|
611 | 724 | """ |
|
612 | 725 | trace_function = sys.gettrace() |
|
613 | 726 | sys.settrace(None) |
|
614 | 727 | globals = self.curframe.f_globals |
|
615 | 728 | locals = self.curframe_locals |
|
616 | 729 | p = self.__class__(completekey=self.completekey, |
|
617 | 730 | stdin=self.stdin, stdout=self.stdout) |
|
618 | 731 | p.use_rawinput = self.use_rawinput |
|
619 | 732 | p.prompt = "(%s) " % self.prompt.strip() |
|
620 | 733 | self.message("ENTERING RECURSIVE DEBUGGER") |
|
621 | 734 | sys.call_tracing(p.run, (arg, globals, locals)) |
|
622 | 735 | self.message("LEAVING RECURSIVE DEBUGGER") |
|
623 | 736 | sys.settrace(trace_function) |
|
624 | 737 | self.lastcmd = p.lastcmd |
|
625 | 738 | |
|
626 | 739 | def do_pdef(self, arg): |
|
627 | 740 | """Print the call signature for any callable object. |
|
628 | 741 | |
|
629 | 742 | The debugger interface to %pdef""" |
|
630 | 743 | namespaces = [ |
|
631 | 744 | ("Locals", self.curframe_locals), |
|
632 | 745 | ("Globals", self.curframe.f_globals), |
|
633 | 746 | ] |
|
634 | 747 | self.shell.find_line_magic("pdef")(arg, namespaces=namespaces) |
|
635 | 748 | |
|
636 | 749 | def do_pdoc(self, arg): |
|
637 | 750 | """Print the docstring for an object. |
|
638 | 751 | |
|
639 | 752 | The debugger interface to %pdoc.""" |
|
640 | 753 | namespaces = [ |
|
641 | 754 | ("Locals", self.curframe_locals), |
|
642 | 755 | ("Globals", self.curframe.f_globals), |
|
643 | 756 | ] |
|
644 | 757 | self.shell.find_line_magic("pdoc")(arg, namespaces=namespaces) |
|
645 | 758 | |
|
646 | 759 | def do_pfile(self, arg): |
|
647 | 760 | """Print (or run through pager) the file where an object is defined. |
|
648 | 761 | |
|
649 | 762 | The debugger interface to %pfile. |
|
650 | 763 | """ |
|
651 | 764 | namespaces = [ |
|
652 | 765 | ("Locals", self.curframe_locals), |
|
653 | 766 | ("Globals", self.curframe.f_globals), |
|
654 | 767 | ] |
|
655 | 768 | self.shell.find_line_magic("pfile")(arg, namespaces=namespaces) |
|
656 | 769 | |
|
657 | 770 | def do_pinfo(self, arg): |
|
658 | 771 | """Provide detailed information about an object. |
|
659 | 772 | |
|
660 | 773 | The debugger interface to %pinfo, i.e., obj?.""" |
|
661 | 774 | namespaces = [ |
|
662 | 775 | ("Locals", self.curframe_locals), |
|
663 | 776 | ("Globals", self.curframe.f_globals), |
|
664 | 777 | ] |
|
665 | 778 | self.shell.find_line_magic("pinfo")(arg, namespaces=namespaces) |
|
666 | 779 | |
|
667 | 780 | def do_pinfo2(self, arg): |
|
668 | 781 | """Provide extra detailed information about an object. |
|
669 | 782 | |
|
670 | 783 | The debugger interface to %pinfo2, i.e., obj??.""" |
|
671 | 784 | namespaces = [ |
|
672 | 785 | ("Locals", self.curframe_locals), |
|
673 | 786 | ("Globals", self.curframe.f_globals), |
|
674 | 787 | ] |
|
675 | 788 | self.shell.find_line_magic("pinfo2")(arg, namespaces=namespaces) |
|
676 | 789 | |
|
677 | 790 | def do_psource(self, arg): |
|
678 | 791 | """Print (or run through pager) the source code for an object.""" |
|
679 | 792 | namespaces = [ |
|
680 | 793 | ("Locals", self.curframe_locals), |
|
681 | 794 | ("Globals", self.curframe.f_globals), |
|
682 | 795 | ] |
|
683 | 796 | self.shell.find_line_magic("psource")(arg, namespaces=namespaces) |
|
684 | 797 | |
|
685 | 798 | def do_where(self, arg): |
|
686 | 799 | """w(here) |
|
687 | 800 | Print a stack trace, with the most recent frame at the bottom. |
|
688 | 801 | An arrow indicates the "current frame", which determines the |
|
689 | 802 | context of most commands. 'bt' is an alias for this command. |
|
690 | 803 | |
|
691 | 804 | Take a number as argument as an (optional) number of context line to |
|
692 | 805 | print""" |
|
693 | 806 | if arg: |
|
694 | 807 | try: |
|
695 | 808 | context = int(arg) |
|
696 | 809 | except ValueError as err: |
|
697 | 810 | self.error(err) |
|
698 | 811 | return |
|
699 | 812 | self.print_stack_trace(context) |
|
700 | 813 | else: |
|
701 | 814 | self.print_stack_trace() |
|
702 | 815 | |
|
703 | 816 | do_w = do_where |
|
704 | 817 | |
|
705 | 818 | def stop_here(self, frame): |
|
706 | 819 | hidden = False |
|
707 | 820 | if self.skip_hidden: |
|
708 | hidden = frame.f_locals.get("__tracebackhide__", False) | |
|
821 | hidden = self._hidden_predicate(frame) | |
|
709 | 822 | if hidden: |
|
823 | if self.report_skipped: | |
|
710 | 824 | Colors = self.color_scheme_table.active_colors |
|
711 | 825 | ColorsNormal = Colors.Normal |
|
712 | print(f"{Colors.excName} [... skipped 1 hidden frame]{ColorsNormal}\n") | |
|
713 | ||
|
826 | print( | |
|
827 | f"{Colors.excName} [... skipped 1 hidden frame]{ColorsNormal}\n" | |
|
828 | ) | |
|
714 | 829 | return super().stop_here(frame) |
|
715 | 830 | |
|
716 | 831 | def do_up(self, arg): |
|
717 | 832 | """u(p) [count] |
|
718 | 833 | Move the current frame count (default one) levels up in the |
|
719 | 834 | stack trace (to an older frame). |
|
720 | 835 | |
|
721 | 836 | Will skip hidden frames. |
|
722 | 837 | """ |
|
723 | 838 | # modified version of upstream that skips |
|
724 | # frames with __tracebackide__ | |
|
839 | # frames with __tracebackhide__ | |
|
725 | 840 | if self.curindex == 0: |
|
726 | 841 | self.error("Oldest frame") |
|
727 | 842 | return |
|
728 | 843 | try: |
|
729 | 844 | count = int(arg or 1) |
|
730 | 845 | except ValueError: |
|
731 | 846 | self.error("Invalid frame count (%s)" % arg) |
|
732 | 847 | return |
|
733 | 848 | skipped = 0 |
|
734 | 849 | if count < 0: |
|
735 | 850 | _newframe = 0 |
|
736 | 851 | else: |
|
737 | 852 | counter = 0 |
|
738 | 853 | hidden_frames = self.hidden_frames(self.stack) |
|
739 | 854 | for i in range(self.curindex - 1, -1, -1): |
|
740 | 855 | if hidden_frames[i] and self.skip_hidden: |
|
741 | 856 | skipped += 1 |
|
742 | 857 | continue |
|
743 | 858 | counter += 1 |
|
744 | 859 | if counter >= count: |
|
745 | 860 | break |
|
746 | 861 | else: |
|
747 | 862 | # if no break occured. |
|
748 | 863 | self.error( |
|
749 | 864 | "all frames above hidden, use `skip_hidden False` to get get into those." |
|
750 | 865 | ) |
|
751 | 866 | return |
|
752 | 867 | |
|
753 | 868 | Colors = self.color_scheme_table.active_colors |
|
754 | 869 | ColorsNormal = Colors.Normal |
|
755 | 870 | _newframe = i |
|
756 | 871 | self._select_frame(_newframe) |
|
757 | 872 | if skipped: |
|
758 | 873 | print( |
|
759 | 874 | f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n" |
|
760 | 875 | ) |
|
761 | 876 | |
|
762 | 877 | def do_down(self, arg): |
|
763 | 878 | """d(own) [count] |
|
764 | 879 | Move the current frame count (default one) levels down in the |
|
765 | 880 | stack trace (to a newer frame). |
|
766 | 881 | |
|
767 | 882 | Will skip hidden frames. |
|
768 | 883 | """ |
|
769 | 884 | if self.curindex + 1 == len(self.stack): |
|
770 | 885 | self.error("Newest frame") |
|
771 | 886 | return |
|
772 | 887 | try: |
|
773 | 888 | count = int(arg or 1) |
|
774 | 889 | except ValueError: |
|
775 | 890 | self.error("Invalid frame count (%s)" % arg) |
|
776 | 891 | return |
|
777 | 892 | if count < 0: |
|
778 | 893 | _newframe = len(self.stack) - 1 |
|
779 | 894 | else: |
|
780 | 895 | counter = 0 |
|
781 | 896 | skipped = 0 |
|
782 | 897 | hidden_frames = self.hidden_frames(self.stack) |
|
783 | 898 | for i in range(self.curindex + 1, len(self.stack)): |
|
784 | 899 | if hidden_frames[i] and self.skip_hidden: |
|
785 | 900 | skipped += 1 |
|
786 | 901 | continue |
|
787 | 902 | counter += 1 |
|
788 | 903 | if counter >= count: |
|
789 | 904 | break |
|
790 | 905 | else: |
|
791 | 906 | self.error( |
|
792 | 907 | "all frames bellow hidden, use `skip_hidden False` to get get into those." |
|
793 | 908 | ) |
|
794 | 909 | return |
|
795 | 910 | |
|
796 | 911 | Colors = self.color_scheme_table.active_colors |
|
797 | 912 | ColorsNormal = Colors.Normal |
|
798 | 913 | if skipped: |
|
799 | 914 | print( |
|
800 | 915 | f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n" |
|
801 | 916 | ) |
|
802 | 917 | _newframe = i |
|
803 | 918 | |
|
804 | 919 | self._select_frame(_newframe) |
|
805 | 920 | |
|
806 | 921 | do_d = do_down |
|
807 | 922 | do_u = do_up |
|
808 | 923 | |
|
809 | 924 | def do_context(self, context): |
|
810 | 925 | """context number_of_lines |
|
811 | 926 | Set the number of lines of source code to show when displaying |
|
812 | 927 | stacktrace information. |
|
813 | 928 | """ |
|
814 | 929 | try: |
|
815 | 930 | new_context = int(context) |
|
816 | 931 | if new_context <= 0: |
|
817 | 932 | raise ValueError() |
|
818 | 933 | self.context = new_context |
|
819 | 934 | except ValueError: |
|
820 | 935 | self.error("The 'context' command requires a positive integer argument.") |
|
821 | 936 | |
|
822 | 937 | |
|
823 | 938 | class InterruptiblePdb(Pdb): |
|
824 | 939 | """Version of debugger where KeyboardInterrupt exits the debugger altogether.""" |
|
825 | 940 | |
|
826 | 941 | def cmdloop(self): |
|
827 | 942 | """Wrap cmdloop() such that KeyboardInterrupt stops the debugger.""" |
|
828 | 943 | try: |
|
829 | 944 | return OldPdb.cmdloop(self) |
|
830 | 945 | except KeyboardInterrupt: |
|
831 | 946 | self.stop_here = lambda frame: False |
|
832 | 947 | self.do_quit("") |
|
833 | 948 | sys.settrace(None) |
|
834 | 949 | self.quitting = False |
|
835 | 950 | raise |
|
836 | 951 | |
|
837 | 952 | def _cmdloop(self): |
|
838 | 953 | while True: |
|
839 | 954 | try: |
|
840 | 955 | # keyboard interrupts allow for an easy way to cancel |
|
841 | 956 | # the current command, so allow them during interactive input |
|
842 | 957 | self.allow_kbdint = True |
|
843 | 958 | self.cmdloop() |
|
844 | 959 | self.allow_kbdint = False |
|
845 | 960 | break |
|
846 | 961 | except KeyboardInterrupt: |
|
847 | 962 | self.message('--KeyboardInterrupt--') |
|
848 | 963 | raise |
|
849 | 964 | |
|
850 | 965 | |
|
851 | 966 | def set_trace(frame=None): |
|
852 | 967 | """ |
|
853 | 968 | Start debugging from `frame`. |
|
854 | 969 | |
|
855 | 970 | If frame is not specified, debugging starts from caller's frame. |
|
856 | 971 | """ |
|
857 | 972 | Pdb().set_trace(frame or sys._getframe().f_back) |
@@ -1,1234 +1,1256 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Top-level display functions for displaying object in different formats.""" |
|
3 | 3 | |
|
4 | 4 | # Copyright (c) IPython Development Team. |
|
5 | 5 | # Distributed under the terms of the Modified BSD License. |
|
6 | 6 | |
|
7 | 7 | |
|
8 | 8 | from binascii import b2a_base64, hexlify |
|
9 | 9 | import html |
|
10 | 10 | import json |
|
11 | 11 | import mimetypes |
|
12 | 12 | import os |
|
13 | 13 | import struct |
|
14 | 14 | import warnings |
|
15 | 15 | from copy import deepcopy |
|
16 | 16 | from os.path import splitext |
|
17 | 17 | from pathlib import Path, PurePath |
|
18 | 18 | |
|
19 | 19 | from IPython.utils.py3compat import cast_unicode |
|
20 | 20 | from IPython.testing.skipdoctest import skip_doctest |
|
21 | 21 | from . import display_functions |
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | __all__ = ['display_pretty', 'display_html', 'display_markdown', |
|
25 | 25 | 'display_svg', 'display_png', 'display_jpeg', 'display_latex', 'display_json', |
|
26 | 26 | 'display_javascript', 'display_pdf', 'DisplayObject', 'TextDisplayObject', |
|
27 | 27 | 'Pretty', 'HTML', 'Markdown', 'Math', 'Latex', 'SVG', 'ProgressBar', 'JSON', |
|
28 | 28 | 'GeoJSON', 'Javascript', 'Image', 'set_matplotlib_formats', |
|
29 | 29 | 'set_matplotlib_close', |
|
30 | 30 | 'Video'] |
|
31 | 31 | |
|
32 | 32 | _deprecated_names = ["display", "clear_output", "publish_display_data", "update_display", "DisplayHandle"] |
|
33 | 33 | |
|
34 | 34 | __all__ = __all__ + _deprecated_names |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | # ----- warn to import from IPython.display ----- |
|
38 | 38 | |
|
39 | 39 | from warnings import warn |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | def __getattr__(name): |
|
43 | 43 | if name in _deprecated_names: |
|
44 | 44 | warn(f"Importing {name} from IPython.core.display is deprecated since IPython 7.14, please import from IPython display", DeprecationWarning, stacklevel=2) |
|
45 | 45 | return getattr(display_functions, name) |
|
46 | 46 | |
|
47 | 47 | if name in globals().keys(): |
|
48 | 48 | return globals()[name] |
|
49 | 49 | else: |
|
50 | 50 | raise AttributeError(f"module {__name__} has no attribute {name}") |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | #----------------------------------------------------------------------------- |
|
54 | 54 | # utility functions |
|
55 | 55 | #----------------------------------------------------------------------------- |
|
56 | 56 | |
|
57 | 57 | def _safe_exists(path): |
|
58 | 58 | """Check path, but don't let exceptions raise""" |
|
59 | 59 | try: |
|
60 | 60 | return os.path.exists(path) |
|
61 | 61 | except Exception: |
|
62 | 62 | return False |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | def _display_mimetype(mimetype, objs, raw=False, metadata=None): |
|
66 | 66 | """internal implementation of all display_foo methods |
|
67 | 67 | |
|
68 | 68 | Parameters |
|
69 | 69 | ---------- |
|
70 | 70 | mimetype : str |
|
71 | 71 | The mimetype to be published (e.g. 'image/png') |
|
72 | 72 | *objs : object |
|
73 | 73 | The Python objects to display, or if raw=True raw text data to |
|
74 | 74 | display. |
|
75 | 75 | raw : bool |
|
76 | 76 | Are the data objects raw data or Python objects that need to be |
|
77 | 77 | formatted before display? [default: False] |
|
78 | 78 | metadata : dict (optional) |
|
79 | 79 | Metadata to be associated with the specific mimetype output. |
|
80 | 80 | """ |
|
81 | 81 | if metadata: |
|
82 | 82 | metadata = {mimetype: metadata} |
|
83 | 83 | if raw: |
|
84 | 84 | # turn list of pngdata into list of { 'image/png': pngdata } |
|
85 | 85 | objs = [ {mimetype: obj} for obj in objs ] |
|
86 | 86 | display(*objs, raw=raw, metadata=metadata, include=[mimetype]) |
|
87 | 87 | |
|
88 | 88 | #----------------------------------------------------------------------------- |
|
89 | 89 | # Main functions |
|
90 | 90 | #----------------------------------------------------------------------------- |
|
91 | 91 | |
|
92 | 92 | |
|
93 | 93 | def display_pretty(*objs, **kwargs): |
|
94 | 94 | """Display the pretty (default) representation of an object. |
|
95 | 95 | |
|
96 | 96 | Parameters |
|
97 | 97 | ---------- |
|
98 | 98 | *objs : object |
|
99 | 99 | The Python objects to display, or if raw=True raw text data to |
|
100 | 100 | display. |
|
101 | 101 | raw : bool |
|
102 | 102 | Are the data objects raw data or Python objects that need to be |
|
103 | 103 | formatted before display? [default: False] |
|
104 | 104 | metadata : dict (optional) |
|
105 | 105 | Metadata to be associated with the specific mimetype output. |
|
106 | 106 | """ |
|
107 | 107 | _display_mimetype('text/plain', objs, **kwargs) |
|
108 | 108 | |
|
109 | 109 | |
|
110 | 110 | def display_html(*objs, **kwargs): |
|
111 | 111 | """Display the HTML representation of an object. |
|
112 | 112 | |
|
113 | 113 | Note: If raw=False and the object does not have a HTML |
|
114 | 114 | representation, no HTML will be shown. |
|
115 | 115 | |
|
116 | 116 | Parameters |
|
117 | 117 | ---------- |
|
118 | 118 | *objs : object |
|
119 | 119 | The Python objects to display, or if raw=True raw HTML data to |
|
120 | 120 | display. |
|
121 | 121 | raw : bool |
|
122 | 122 | Are the data objects raw data or Python objects that need to be |
|
123 | 123 | formatted before display? [default: False] |
|
124 | 124 | metadata : dict (optional) |
|
125 | 125 | Metadata to be associated with the specific mimetype output. |
|
126 | 126 | """ |
|
127 | 127 | _display_mimetype('text/html', objs, **kwargs) |
|
128 | 128 | |
|
129 | 129 | |
|
130 | 130 | def display_markdown(*objs, **kwargs): |
|
131 | 131 | """Displays the Markdown representation of an object. |
|
132 | 132 | |
|
133 | 133 | Parameters |
|
134 | 134 | ---------- |
|
135 | 135 | *objs : object |
|
136 | 136 | The Python objects to display, or if raw=True raw markdown data to |
|
137 | 137 | display. |
|
138 | 138 | raw : bool |
|
139 | 139 | Are the data objects raw data or Python objects that need to be |
|
140 | 140 | formatted before display? [default: False] |
|
141 | 141 | metadata : dict (optional) |
|
142 | 142 | Metadata to be associated with the specific mimetype output. |
|
143 | 143 | """ |
|
144 | 144 | |
|
145 | 145 | _display_mimetype('text/markdown', objs, **kwargs) |
|
146 | 146 | |
|
147 | 147 | |
|
148 | 148 | def display_svg(*objs, **kwargs): |
|
149 | 149 | """Display the SVG representation of an object. |
|
150 | 150 | |
|
151 | 151 | Parameters |
|
152 | 152 | ---------- |
|
153 | 153 | *objs : object |
|
154 | 154 | The Python objects to display, or if raw=True raw svg data to |
|
155 | 155 | display. |
|
156 | 156 | raw : bool |
|
157 | 157 | Are the data objects raw data or Python objects that need to be |
|
158 | 158 | formatted before display? [default: False] |
|
159 | 159 | metadata : dict (optional) |
|
160 | 160 | Metadata to be associated with the specific mimetype output. |
|
161 | 161 | """ |
|
162 | 162 | _display_mimetype('image/svg+xml', objs, **kwargs) |
|
163 | 163 | |
|
164 | 164 | |
|
165 | 165 | def display_png(*objs, **kwargs): |
|
166 | 166 | """Display the PNG representation of an object. |
|
167 | 167 | |
|
168 | 168 | Parameters |
|
169 | 169 | ---------- |
|
170 | 170 | *objs : object |
|
171 | 171 | The Python objects to display, or if raw=True raw png data to |
|
172 | 172 | display. |
|
173 | 173 | raw : bool |
|
174 | 174 | Are the data objects raw data or Python objects that need to be |
|
175 | 175 | formatted before display? [default: False] |
|
176 | 176 | metadata : dict (optional) |
|
177 | 177 | Metadata to be associated with the specific mimetype output. |
|
178 | 178 | """ |
|
179 | 179 | _display_mimetype('image/png', objs, **kwargs) |
|
180 | 180 | |
|
181 | 181 | |
|
182 | 182 | def display_jpeg(*objs, **kwargs): |
|
183 | 183 | """Display the JPEG representation of an object. |
|
184 | 184 | |
|
185 | 185 | Parameters |
|
186 | 186 | ---------- |
|
187 | 187 | *objs : object |
|
188 | 188 | The Python objects to display, or if raw=True raw JPEG data to |
|
189 | 189 | display. |
|
190 | 190 | raw : bool |
|
191 | 191 | Are the data objects raw data or Python objects that need to be |
|
192 | 192 | formatted before display? [default: False] |
|
193 | 193 | metadata : dict (optional) |
|
194 | 194 | Metadata to be associated with the specific mimetype output. |
|
195 | 195 | """ |
|
196 | 196 | _display_mimetype('image/jpeg', objs, **kwargs) |
|
197 | 197 | |
|
198 | 198 | |
|
199 | 199 | def display_latex(*objs, **kwargs): |
|
200 | 200 | """Display the LaTeX representation of an object. |
|
201 | 201 | |
|
202 | 202 | Parameters |
|
203 | 203 | ---------- |
|
204 | 204 | *objs : object |
|
205 | 205 | The Python objects to display, or if raw=True raw latex data to |
|
206 | 206 | display. |
|
207 | 207 | raw : bool |
|
208 | 208 | Are the data objects raw data or Python objects that need to be |
|
209 | 209 | formatted before display? [default: False] |
|
210 | 210 | metadata : dict (optional) |
|
211 | 211 | Metadata to be associated with the specific mimetype output. |
|
212 | 212 | """ |
|
213 | 213 | _display_mimetype('text/latex', objs, **kwargs) |
|
214 | 214 | |
|
215 | 215 | |
|
216 | 216 | def display_json(*objs, **kwargs): |
|
217 | 217 | """Display the JSON representation of an object. |
|
218 | 218 | |
|
219 | 219 | Note that not many frontends support displaying JSON. |
|
220 | 220 | |
|
221 | 221 | Parameters |
|
222 | 222 | ---------- |
|
223 | 223 | *objs : object |
|
224 | 224 | The Python objects to display, or if raw=True raw json data to |
|
225 | 225 | display. |
|
226 | 226 | raw : bool |
|
227 | 227 | Are the data objects raw data or Python objects that need to be |
|
228 | 228 | formatted before display? [default: False] |
|
229 | 229 | metadata : dict (optional) |
|
230 | 230 | Metadata to be associated with the specific mimetype output. |
|
231 | 231 | """ |
|
232 | 232 | _display_mimetype('application/json', objs, **kwargs) |
|
233 | 233 | |
|
234 | 234 | |
|
235 | 235 | def display_javascript(*objs, **kwargs): |
|
236 | 236 | """Display the Javascript representation of an object. |
|
237 | 237 | |
|
238 | 238 | Parameters |
|
239 | 239 | ---------- |
|
240 | 240 | *objs : object |
|
241 | 241 | The Python objects to display, or if raw=True raw javascript data to |
|
242 | 242 | display. |
|
243 | 243 | raw : bool |
|
244 | 244 | Are the data objects raw data or Python objects that need to be |
|
245 | 245 | formatted before display? [default: False] |
|
246 | 246 | metadata : dict (optional) |
|
247 | 247 | Metadata to be associated with the specific mimetype output. |
|
248 | 248 | """ |
|
249 | 249 | _display_mimetype('application/javascript', objs, **kwargs) |
|
250 | 250 | |
|
251 | 251 | |
|
252 | 252 | def display_pdf(*objs, **kwargs): |
|
253 | 253 | """Display the PDF representation of an object. |
|
254 | 254 | |
|
255 | 255 | Parameters |
|
256 | 256 | ---------- |
|
257 | 257 | *objs : object |
|
258 | 258 | The Python objects to display, or if raw=True raw javascript data to |
|
259 | 259 | display. |
|
260 | 260 | raw : bool |
|
261 | 261 | Are the data objects raw data or Python objects that need to be |
|
262 | 262 | formatted before display? [default: False] |
|
263 | 263 | metadata : dict (optional) |
|
264 | 264 | Metadata to be associated with the specific mimetype output. |
|
265 | 265 | """ |
|
266 | 266 | _display_mimetype('application/pdf', objs, **kwargs) |
|
267 | 267 | |
|
268 | 268 | |
|
269 | 269 | #----------------------------------------------------------------------------- |
|
270 | 270 | # Smart classes |
|
271 | 271 | #----------------------------------------------------------------------------- |
|
272 | 272 | |
|
273 | 273 | |
|
274 | 274 | class DisplayObject(object): |
|
275 | 275 | """An object that wraps data to be displayed.""" |
|
276 | 276 | |
|
277 | 277 | _read_flags = 'r' |
|
278 | 278 | _show_mem_addr = False |
|
279 | 279 | metadata = None |
|
280 | 280 | |
|
281 | 281 | def __init__(self, data=None, url=None, filename=None, metadata=None): |
|
282 | 282 | """Create a display object given raw data. |
|
283 | 283 | |
|
284 | 284 | When this object is returned by an expression or passed to the |
|
285 | 285 | display function, it will result in the data being displayed |
|
286 | 286 | in the frontend. The MIME type of the data should match the |
|
287 | 287 | subclasses used, so the Png subclass should be used for 'image/png' |
|
288 | 288 | data. If the data is a URL, the data will first be downloaded |
|
289 | 289 | and then displayed. If |
|
290 | 290 | |
|
291 | 291 | Parameters |
|
292 | 292 | ---------- |
|
293 | 293 | data : unicode, str or bytes |
|
294 | 294 | The raw data or a URL or file to load the data from |
|
295 | 295 | url : unicode |
|
296 | 296 | A URL to download the data from. |
|
297 | 297 | filename : unicode |
|
298 | 298 | Path to a local file to load the data from. |
|
299 | 299 | metadata : dict |
|
300 | 300 | Dict of metadata associated to be the object when displayed |
|
301 | 301 | """ |
|
302 | 302 | if isinstance(data, (Path, PurePath)): |
|
303 | 303 | data = str(data) |
|
304 | 304 | |
|
305 | 305 | if data is not None and isinstance(data, str): |
|
306 | 306 | if data.startswith('http') and url is None: |
|
307 | 307 | url = data |
|
308 | 308 | filename = None |
|
309 | 309 | data = None |
|
310 | 310 | elif _safe_exists(data) and filename is None: |
|
311 | 311 | url = None |
|
312 | 312 | filename = data |
|
313 | 313 | data = None |
|
314 | 314 | |
|
315 | 315 | self.url = url |
|
316 | 316 | self.filename = filename |
|
317 | 317 | # because of @data.setter methods in |
|
318 | 318 | # subclasses ensure url and filename are set |
|
319 | 319 | # before assigning to self.data |
|
320 | 320 | self.data = data |
|
321 | 321 | |
|
322 | 322 | if metadata is not None: |
|
323 | 323 | self.metadata = metadata |
|
324 | 324 | elif self.metadata is None: |
|
325 | 325 | self.metadata = {} |
|
326 | 326 | |
|
327 | 327 | self.reload() |
|
328 | 328 | self._check_data() |
|
329 | 329 | |
|
330 | 330 | def __repr__(self): |
|
331 | 331 | if not self._show_mem_addr: |
|
332 | 332 | cls = self.__class__ |
|
333 | 333 | r = "<%s.%s object>" % (cls.__module__, cls.__name__) |
|
334 | 334 | else: |
|
335 | 335 | r = super(DisplayObject, self).__repr__() |
|
336 | 336 | return r |
|
337 | 337 | |
|
338 | 338 | def _check_data(self): |
|
339 | 339 | """Override in subclasses if there's something to check.""" |
|
340 | 340 | pass |
|
341 | 341 | |
|
342 | 342 | def _data_and_metadata(self): |
|
343 | 343 | """shortcut for returning metadata with shape information, if defined""" |
|
344 | 344 | if self.metadata: |
|
345 | 345 | return self.data, deepcopy(self.metadata) |
|
346 | 346 | else: |
|
347 | 347 | return self.data |
|
348 | 348 | |
|
349 | 349 | def reload(self): |
|
350 | 350 | """Reload the raw data from file or URL.""" |
|
351 | 351 | if self.filename is not None: |
|
352 | 352 | with open(self.filename, self._read_flags) as f: |
|
353 | 353 | self.data = f.read() |
|
354 | 354 | elif self.url is not None: |
|
355 | 355 | # Deferred import |
|
356 | 356 | from urllib.request import urlopen |
|
357 | 357 | response = urlopen(self.url) |
|
358 | 358 | data = response.read() |
|
359 | 359 | # extract encoding from header, if there is one: |
|
360 | 360 | encoding = None |
|
361 | 361 | if 'content-type' in response.headers: |
|
362 | 362 | for sub in response.headers['content-type'].split(';'): |
|
363 | 363 | sub = sub.strip() |
|
364 | 364 | if sub.startswith('charset'): |
|
365 | 365 | encoding = sub.split('=')[-1].strip() |
|
366 | 366 | break |
|
367 | 367 | if 'content-encoding' in response.headers: |
|
368 | 368 | # TODO: do deflate? |
|
369 | 369 | if 'gzip' in response.headers['content-encoding']: |
|
370 | 370 | import gzip |
|
371 | 371 | from io import BytesIO |
|
372 | 372 | with gzip.open(BytesIO(data), 'rt', encoding=encoding) as fp: |
|
373 | 373 | encoding = None |
|
374 | 374 | data = fp.read() |
|
375 | 375 | |
|
376 | 376 | # decode data, if an encoding was specified |
|
377 | 377 | # We only touch self.data once since |
|
378 | 378 | # subclasses such as SVG have @data.setter methods |
|
379 | 379 | # that transform self.data into ... well svg. |
|
380 | 380 | if encoding: |
|
381 | 381 | self.data = data.decode(encoding, 'replace') |
|
382 | 382 | else: |
|
383 | 383 | self.data = data |
|
384 | 384 | |
|
385 | 385 | |
|
386 | 386 | class TextDisplayObject(DisplayObject): |
|
387 | 387 | """Validate that display data is text""" |
|
388 | 388 | def _check_data(self): |
|
389 | 389 | if self.data is not None and not isinstance(self.data, str): |
|
390 | 390 | raise TypeError("%s expects text, not %r" % (self.__class__.__name__, self.data)) |
|
391 | 391 | |
|
392 | 392 | class Pretty(TextDisplayObject): |
|
393 | 393 | |
|
394 | 394 | def _repr_pretty_(self, pp, cycle): |
|
395 | 395 | return pp.text(self.data) |
|
396 | 396 | |
|
397 | 397 | |
|
398 | 398 | class HTML(TextDisplayObject): |
|
399 | 399 | |
|
400 | 400 | def __init__(self, data=None, url=None, filename=None, metadata=None): |
|
401 | 401 | def warn(): |
|
402 | 402 | if not data: |
|
403 | 403 | return False |
|
404 | 404 | |
|
405 | 405 | # |
|
406 | 406 | # Avoid calling lower() on the entire data, because it could be a |
|
407 | 407 | # long string and we're only interested in its beginning and end. |
|
408 | 408 | # |
|
409 | 409 | prefix = data[:10].lower() |
|
410 | 410 | suffix = data[-10:].lower() |
|
411 | 411 | return prefix.startswith("<iframe ") and suffix.endswith("</iframe>") |
|
412 | 412 | |
|
413 | 413 | if warn(): |
|
414 | 414 | warnings.warn("Consider using IPython.display.IFrame instead") |
|
415 | 415 | super(HTML, self).__init__(data=data, url=url, filename=filename, metadata=metadata) |
|
416 | 416 | |
|
417 | 417 | def _repr_html_(self): |
|
418 | 418 | return self._data_and_metadata() |
|
419 | 419 | |
|
420 | 420 | def __html__(self): |
|
421 | 421 | """ |
|
422 | 422 | This method exists to inform other HTML-using modules (e.g. Markupsafe, |
|
423 | 423 | htmltag, etc) that this object is HTML and does not need things like |
|
424 | 424 | special characters (<>&) escaped. |
|
425 | 425 | """ |
|
426 | 426 | return self._repr_html_() |
|
427 | 427 | |
|
428 | 428 | |
|
429 | 429 | class Markdown(TextDisplayObject): |
|
430 | 430 | |
|
431 | 431 | def _repr_markdown_(self): |
|
432 | 432 | return self._data_and_metadata() |
|
433 | 433 | |
|
434 | 434 | |
|
435 | 435 | class Math(TextDisplayObject): |
|
436 | 436 | |
|
437 | 437 | def _repr_latex_(self): |
|
438 | 438 | s = r"$\displaystyle %s$" % self.data.strip('$') |
|
439 | 439 | if self.metadata: |
|
440 | 440 | return s, deepcopy(self.metadata) |
|
441 | 441 | else: |
|
442 | 442 | return s |
|
443 | 443 | |
|
444 | 444 | |
|
445 | 445 | class Latex(TextDisplayObject): |
|
446 | 446 | |
|
447 | 447 | def _repr_latex_(self): |
|
448 | 448 | return self._data_and_metadata() |
|
449 | 449 | |
|
450 | 450 | |
|
451 | 451 | class SVG(DisplayObject): |
|
452 | 452 | """Embed an SVG into the display. |
|
453 | 453 | |
|
454 | 454 | Note if you just want to view a svg image via a URL use `:class:Image` with |
|
455 | 455 | a url=URL keyword argument. |
|
456 | 456 | """ |
|
457 | 457 | |
|
458 | 458 | _read_flags = 'rb' |
|
459 | 459 | # wrap data in a property, which extracts the <svg> tag, discarding |
|
460 | 460 | # document headers |
|
461 | 461 | _data = None |
|
462 | 462 | |
|
463 | 463 | @property |
|
464 | 464 | def data(self): |
|
465 | 465 | return self._data |
|
466 | 466 | |
|
467 | 467 | @data.setter |
|
468 | 468 | def data(self, svg): |
|
469 | 469 | if svg is None: |
|
470 | 470 | self._data = None |
|
471 | 471 | return |
|
472 | 472 | # parse into dom object |
|
473 | 473 | from xml.dom import minidom |
|
474 | 474 | x = minidom.parseString(svg) |
|
475 | 475 | # get svg tag (should be 1) |
|
476 | 476 | found_svg = x.getElementsByTagName('svg') |
|
477 | 477 | if found_svg: |
|
478 | 478 | svg = found_svg[0].toxml() |
|
479 | 479 | else: |
|
480 | 480 | # fallback on the input, trust the user |
|
481 | 481 | # but this is probably an error. |
|
482 | 482 | pass |
|
483 | 483 | svg = cast_unicode(svg) |
|
484 | 484 | self._data = svg |
|
485 | 485 | |
|
486 | 486 | def _repr_svg_(self): |
|
487 | 487 | return self._data_and_metadata() |
|
488 | 488 | |
|
489 | 489 | class ProgressBar(DisplayObject): |
|
490 | 490 | """Progressbar supports displaying a progressbar like element |
|
491 | 491 | """ |
|
492 | 492 | def __init__(self, total): |
|
493 | 493 | """Creates a new progressbar |
|
494 | 494 | |
|
495 | 495 | Parameters |
|
496 | 496 | ---------- |
|
497 | 497 | total : int |
|
498 | 498 | maximum size of the progressbar |
|
499 | 499 | """ |
|
500 | 500 | self.total = total |
|
501 | 501 | self._progress = 0 |
|
502 | 502 | self.html_width = '60ex' |
|
503 | 503 | self.text_width = 60 |
|
504 | 504 | self._display_id = hexlify(os.urandom(8)).decode('ascii') |
|
505 | 505 | |
|
506 | 506 | def __repr__(self): |
|
507 | 507 | fraction = self.progress / self.total |
|
508 | 508 | filled = '=' * int(fraction * self.text_width) |
|
509 | 509 | rest = ' ' * (self.text_width - len(filled)) |
|
510 | 510 | return '[{}{}] {}/{}'.format( |
|
511 | 511 | filled, rest, |
|
512 | 512 | self.progress, self.total, |
|
513 | 513 | ) |
|
514 | 514 | |
|
515 | 515 | def _repr_html_(self): |
|
516 | 516 | return "<progress style='width:{}' max='{}' value='{}'></progress>".format( |
|
517 | 517 | self.html_width, self.total, self.progress) |
|
518 | 518 | |
|
519 | 519 | def display(self): |
|
520 | 520 | display(self, display_id=self._display_id) |
|
521 | 521 | |
|
522 | 522 | def update(self): |
|
523 | 523 | display(self, display_id=self._display_id, update=True) |
|
524 | 524 | |
|
525 | 525 | @property |
|
526 | 526 | def progress(self): |
|
527 | 527 | return self._progress |
|
528 | 528 | |
|
529 | 529 | @progress.setter |
|
530 | 530 | def progress(self, value): |
|
531 | 531 | self._progress = value |
|
532 | 532 | self.update() |
|
533 | 533 | |
|
534 | 534 | def __iter__(self): |
|
535 | 535 | self.display() |
|
536 | 536 | self._progress = -1 # First iteration is 0 |
|
537 | 537 | return self |
|
538 | 538 | |
|
539 | 539 | def __next__(self): |
|
540 | 540 | """Returns current value and increments display by one.""" |
|
541 | 541 | self.progress += 1 |
|
542 | 542 | if self.progress < self.total: |
|
543 | 543 | return self.progress |
|
544 | 544 | else: |
|
545 | 545 | raise StopIteration() |
|
546 | 546 | |
|
547 | 547 | class JSON(DisplayObject): |
|
548 | 548 | """JSON expects a JSON-able dict or list |
|
549 | 549 | |
|
550 | 550 | not an already-serialized JSON string. |
|
551 | 551 | |
|
552 | 552 | Scalar types (None, number, string) are not allowed, only dict or list containers. |
|
553 | 553 | """ |
|
554 | 554 | # wrap data in a property, which warns about passing already-serialized JSON |
|
555 | 555 | _data = None |
|
556 | 556 | def __init__(self, data=None, url=None, filename=None, expanded=False, metadata=None, root='root', **kwargs): |
|
557 | 557 | """Create a JSON display object given raw data. |
|
558 | 558 | |
|
559 | 559 | Parameters |
|
560 | 560 | ---------- |
|
561 | 561 | data : dict or list |
|
562 | 562 | JSON data to display. Not an already-serialized JSON string. |
|
563 | 563 | Scalar types (None, number, string) are not allowed, only dict |
|
564 | 564 | or list containers. |
|
565 | 565 | url : unicode |
|
566 | 566 | A URL to download the data from. |
|
567 | 567 | filename : unicode |
|
568 | 568 | Path to a local file to load the data from. |
|
569 | 569 | expanded : boolean |
|
570 | 570 | Metadata to control whether a JSON display component is expanded. |
|
571 | 571 | metadata : dict |
|
572 | 572 | Specify extra metadata to attach to the json display object. |
|
573 | 573 | root : str |
|
574 | 574 | The name of the root element of the JSON tree |
|
575 | 575 | """ |
|
576 | 576 | self.metadata = { |
|
577 | 577 | 'expanded': expanded, |
|
578 | 578 | 'root': root, |
|
579 | 579 | } |
|
580 | 580 | if metadata: |
|
581 | 581 | self.metadata.update(metadata) |
|
582 | 582 | if kwargs: |
|
583 | 583 | self.metadata.update(kwargs) |
|
584 | 584 | super(JSON, self).__init__(data=data, url=url, filename=filename) |
|
585 | 585 | |
|
586 | 586 | def _check_data(self): |
|
587 | 587 | if self.data is not None and not isinstance(self.data, (dict, list)): |
|
588 | 588 | raise TypeError("%s expects JSONable dict or list, not %r" % (self.__class__.__name__, self.data)) |
|
589 | 589 | |
|
590 | 590 | @property |
|
591 | 591 | def data(self): |
|
592 | 592 | return self._data |
|
593 | 593 | |
|
594 | 594 | @data.setter |
|
595 | 595 | def data(self, data): |
|
596 | 596 | if isinstance(data, (Path, PurePath)): |
|
597 | 597 | data = str(data) |
|
598 | 598 | |
|
599 | 599 | if isinstance(data, str): |
|
600 | 600 | if self.filename is None and self.url is None: |
|
601 | 601 | warnings.warn("JSON expects JSONable dict or list, not JSON strings") |
|
602 | 602 | data = json.loads(data) |
|
603 | 603 | self._data = data |
|
604 | 604 | |
|
605 | 605 | def _data_and_metadata(self): |
|
606 | 606 | return self.data, self.metadata |
|
607 | 607 | |
|
608 | 608 | def _repr_json_(self): |
|
609 | 609 | return self._data_and_metadata() |
|
610 | 610 | |
|
611 | 611 | _css_t = """var link = document.createElement("link"); |
|
612 | 612 | link.ref = "stylesheet"; |
|
613 | 613 | link.type = "text/css"; |
|
614 | 614 | link.href = "%s"; |
|
615 | 615 | document.head.appendChild(link); |
|
616 | 616 | """ |
|
617 | 617 | |
|
618 | 618 | _lib_t1 = """new Promise(function(resolve, reject) { |
|
619 | 619 | var script = document.createElement("script"); |
|
620 | 620 | script.onload = resolve; |
|
621 | 621 | script.onerror = reject; |
|
622 | 622 | script.src = "%s"; |
|
623 | 623 | document.head.appendChild(script); |
|
624 | 624 | }).then(() => { |
|
625 | 625 | """ |
|
626 | 626 | |
|
627 | 627 | _lib_t2 = """ |
|
628 | 628 | });""" |
|
629 | 629 | |
|
630 | 630 | class GeoJSON(JSON): |
|
631 | 631 | """GeoJSON expects JSON-able dict |
|
632 | 632 | |
|
633 | 633 | not an already-serialized JSON string. |
|
634 | 634 | |
|
635 | 635 | Scalar types (None, number, string) are not allowed, only dict containers. |
|
636 | 636 | """ |
|
637 | 637 | |
|
638 | 638 | def __init__(self, *args, **kwargs): |
|
639 | 639 | """Create a GeoJSON display object given raw data. |
|
640 | 640 | |
|
641 | 641 | Parameters |
|
642 | 642 | ---------- |
|
643 | 643 | data : dict or list |
|
644 | 644 | VegaLite data. Not an already-serialized JSON string. |
|
645 | 645 | Scalar types (None, number, string) are not allowed, only dict |
|
646 | 646 | or list containers. |
|
647 | 647 | url_template : string |
|
648 | 648 | Leaflet TileLayer URL template: http://leafletjs.com/reference.html#url-template |
|
649 | 649 | layer_options : dict |
|
650 | 650 | Leaflet TileLayer options: http://leafletjs.com/reference.html#tilelayer-options |
|
651 | 651 | url : unicode |
|
652 | 652 | A URL to download the data from. |
|
653 | 653 | filename : unicode |
|
654 | 654 | Path to a local file to load the data from. |
|
655 | 655 | metadata : dict |
|
656 | 656 | Specify extra metadata to attach to the json display object. |
|
657 | 657 | |
|
658 | 658 | Examples |
|
659 | 659 | -------- |
|
660 | 660 | The following will display an interactive map of Mars with a point of |
|
661 | 661 | interest on frontend that do support GeoJSON display. |
|
662 | 662 | |
|
663 | 663 | >>> from IPython.display import GeoJSON |
|
664 | 664 | |
|
665 | 665 | >>> GeoJSON(data={ |
|
666 | 666 | ... "type": "Feature", |
|
667 | 667 | ... "geometry": { |
|
668 | 668 | ... "type": "Point", |
|
669 | 669 | ... "coordinates": [-81.327, 296.038] |
|
670 | 670 | ... } |
|
671 | 671 | ... }, |
|
672 | 672 | ... url_template="http://s3-eu-west-1.amazonaws.com/whereonmars.cartodb.net/{basemap_id}/{z}/{x}/{y}.png", |
|
673 | 673 | ... layer_options={ |
|
674 | 674 | ... "basemap_id": "celestia_mars-shaded-16k_global", |
|
675 | 675 | ... "attribution" : "Celestia/praesepe", |
|
676 | 676 | ... "minZoom" : 0, |
|
677 | 677 | ... "maxZoom" : 18, |
|
678 | 678 | ... }) |
|
679 | 679 | <IPython.core.display.GeoJSON object> |
|
680 | 680 | |
|
681 | 681 | In the terminal IPython, you will only see the text representation of |
|
682 | 682 | the GeoJSON object. |
|
683 | 683 | |
|
684 | 684 | """ |
|
685 | 685 | |
|
686 | 686 | super(GeoJSON, self).__init__(*args, **kwargs) |
|
687 | 687 | |
|
688 | 688 | |
|
689 | 689 | def _ipython_display_(self): |
|
690 | 690 | bundle = { |
|
691 | 691 | 'application/geo+json': self.data, |
|
692 | 692 | 'text/plain': '<IPython.display.GeoJSON object>' |
|
693 | 693 | } |
|
694 | 694 | metadata = { |
|
695 | 695 | 'application/geo+json': self.metadata |
|
696 | 696 | } |
|
697 | 697 | display(bundle, metadata=metadata, raw=True) |
|
698 | 698 | |
|
699 | 699 | class Javascript(TextDisplayObject): |
|
700 | 700 | |
|
701 | 701 | def __init__(self, data=None, url=None, filename=None, lib=None, css=None): |
|
702 | 702 | """Create a Javascript display object given raw data. |
|
703 | 703 | |
|
704 | 704 | When this object is returned by an expression or passed to the |
|
705 | 705 | display function, it will result in the data being displayed |
|
706 | 706 | in the frontend. If the data is a URL, the data will first be |
|
707 | 707 | downloaded and then displayed. |
|
708 | 708 | |
|
709 | 709 | In the Notebook, the containing element will be available as `element`, |
|
710 | 710 | and jQuery will be available. Content appended to `element` will be |
|
711 | 711 | visible in the output area. |
|
712 | 712 | |
|
713 | 713 | Parameters |
|
714 | 714 | ---------- |
|
715 | 715 | data : unicode, str or bytes |
|
716 | 716 | The Javascript source code or a URL to download it from. |
|
717 | 717 | url : unicode |
|
718 | 718 | A URL to download the data from. |
|
719 | 719 | filename : unicode |
|
720 | 720 | Path to a local file to load the data from. |
|
721 | 721 | lib : list or str |
|
722 | 722 | A sequence of Javascript library URLs to load asynchronously before |
|
723 | 723 | running the source code. The full URLs of the libraries should |
|
724 | 724 | be given. A single Javascript library URL can also be given as a |
|
725 | 725 | string. |
|
726 | 726 | css : list or str |
|
727 | 727 | A sequence of css files to load before running the source code. |
|
728 | 728 | The full URLs of the css files should be given. A single css URL |
|
729 | 729 | can also be given as a string. |
|
730 | 730 | """ |
|
731 | 731 | if isinstance(lib, str): |
|
732 | 732 | lib = [lib] |
|
733 | 733 | elif lib is None: |
|
734 | 734 | lib = [] |
|
735 | 735 | if isinstance(css, str): |
|
736 | 736 | css = [css] |
|
737 | 737 | elif css is None: |
|
738 | 738 | css = [] |
|
739 | 739 | if not isinstance(lib, (list,tuple)): |
|
740 | 740 | raise TypeError('expected sequence, got: %r' % lib) |
|
741 | 741 | if not isinstance(css, (list,tuple)): |
|
742 | 742 | raise TypeError('expected sequence, got: %r' % css) |
|
743 | 743 | self.lib = lib |
|
744 | 744 | self.css = css |
|
745 | 745 | super(Javascript, self).__init__(data=data, url=url, filename=filename) |
|
746 | 746 | |
|
747 | 747 | def _repr_javascript_(self): |
|
748 | 748 | r = '' |
|
749 | 749 | for c in self.css: |
|
750 | 750 | r += _css_t % c |
|
751 | 751 | for l in self.lib: |
|
752 | 752 | r += _lib_t1 % l |
|
753 | 753 | r += self.data |
|
754 | 754 | r += _lib_t2*len(self.lib) |
|
755 | 755 | return r |
|
756 | 756 | |
|
757 | 757 | # constants for identifying png/jpeg data |
|
758 | 758 | _PNG = b'\x89PNG\r\n\x1a\n' |
|
759 | 759 | _JPEG = b'\xff\xd8' |
|
760 | 760 | |
|
761 | 761 | def _pngxy(data): |
|
762 | 762 | """read the (width, height) from a PNG header""" |
|
763 | 763 | ihdr = data.index(b'IHDR') |
|
764 | 764 | # next 8 bytes are width/height |
|
765 | 765 | return struct.unpack('>ii', data[ihdr+4:ihdr+12]) |
|
766 | 766 | |
|
767 | 767 | def _jpegxy(data): |
|
768 | 768 | """read the (width, height) from a JPEG header""" |
|
769 | 769 | # adapted from http://www.64lines.com/jpeg-width-height |
|
770 | 770 | |
|
771 | 771 | idx = 4 |
|
772 | 772 | while True: |
|
773 | 773 | block_size = struct.unpack('>H', data[idx:idx+2])[0] |
|
774 | 774 | idx = idx + block_size |
|
775 | 775 | if data[idx:idx+2] == b'\xFF\xC0': |
|
776 | 776 | # found Start of Frame |
|
777 | 777 | iSOF = idx |
|
778 | 778 | break |
|
779 | 779 | else: |
|
780 | 780 | # read another block |
|
781 | 781 | idx += 2 |
|
782 | 782 | |
|
783 | 783 | h, w = struct.unpack('>HH', data[iSOF+5:iSOF+9]) |
|
784 | 784 | return w, h |
|
785 | 785 | |
|
786 | 786 | def _gifxy(data): |
|
787 | 787 | """read the (width, height) from a GIF header""" |
|
788 | 788 | return struct.unpack('<HH', data[6:10]) |
|
789 | 789 | |
|
790 | 790 | |
|
791 | 791 | class Image(DisplayObject): |
|
792 | 792 | |
|
793 | 793 | _read_flags = 'rb' |
|
794 | 794 | _FMT_JPEG = u'jpeg' |
|
795 | 795 | _FMT_PNG = u'png' |
|
796 | 796 | _FMT_GIF = u'gif' |
|
797 | 797 | _ACCEPTABLE_EMBEDDINGS = [_FMT_JPEG, _FMT_PNG, _FMT_GIF] |
|
798 | 798 | _MIMETYPES = { |
|
799 | 799 | _FMT_PNG: 'image/png', |
|
800 | 800 | _FMT_JPEG: 'image/jpeg', |
|
801 | 801 | _FMT_GIF: 'image/gif', |
|
802 | 802 | } |
|
803 | 803 | |
|
804 | 804 | def __init__( |
|
805 | 805 | self, |
|
806 | 806 | data=None, |
|
807 | 807 | url=None, |
|
808 | 808 | filename=None, |
|
809 | 809 | format=None, |
|
810 | 810 | embed=None, |
|
811 | 811 | width=None, |
|
812 | 812 | height=None, |
|
813 | 813 | retina=False, |
|
814 | 814 | unconfined=False, |
|
815 | 815 | metadata=None, |
|
816 | 816 | alt=None, |
|
817 | 817 | ): |
|
818 | 818 | """Create a PNG/JPEG/GIF image object given raw data. |
|
819 | 819 | |
|
820 | 820 | When this object is returned by an input cell or passed to the |
|
821 | 821 | display function, it will result in the image being displayed |
|
822 | 822 | in the frontend. |
|
823 | 823 | |
|
824 | 824 | Parameters |
|
825 | 825 | ---------- |
|
826 | 826 | data : unicode, str or bytes |
|
827 | 827 | The raw image data or a URL or filename to load the data from. |
|
828 | 828 | This always results in embedded image data. |
|
829 | 829 | url : unicode |
|
830 | 830 | A URL to download the data from. If you specify `url=`, |
|
831 | 831 | the image data will not be embedded unless you also specify `embed=True`. |
|
832 | 832 | filename : unicode |
|
833 | 833 | Path to a local file to load the data from. |
|
834 | 834 | Images from a file are always embedded. |
|
835 | 835 | format : unicode |
|
836 | 836 | The format of the image data (png/jpeg/jpg/gif). If a filename or URL is given |
|
837 | 837 | for format will be inferred from the filename extension. |
|
838 | 838 | embed : bool |
|
839 | 839 | Should the image data be embedded using a data URI (True) or be |
|
840 | 840 | loaded using an <img> tag. Set this to True if you want the image |
|
841 | 841 | to be viewable later with no internet connection in the notebook. |
|
842 | 842 | |
|
843 | 843 | Default is `True`, unless the keyword argument `url` is set, then |
|
844 | 844 | default value is `False`. |
|
845 | 845 | |
|
846 | 846 | Note that QtConsole is not able to display images if `embed` is set to `False` |
|
847 | 847 | width : int |
|
848 | 848 | Width in pixels to which to constrain the image in html |
|
849 | 849 | height : int |
|
850 | 850 | Height in pixels to which to constrain the image in html |
|
851 | 851 | retina : bool |
|
852 | 852 | Automatically set the width and height to half of the measured |
|
853 | 853 | width and height. |
|
854 | 854 | This only works for embedded images because it reads the width/height |
|
855 | 855 | from image data. |
|
856 | 856 | For non-embedded images, you can just set the desired display width |
|
857 | 857 | and height directly. |
|
858 | 858 | unconfined : bool |
|
859 | 859 | Set unconfined=True to disable max-width confinement of the image. |
|
860 | 860 | metadata : dict |
|
861 | 861 | Specify extra metadata to attach to the image. |
|
862 | 862 | alt : unicode |
|
863 | 863 | Alternative text for the image, for use by screen readers. |
|
864 | 864 | |
|
865 | 865 | Examples |
|
866 | 866 | -------- |
|
867 | 867 | embedded image data, works in qtconsole and notebook |
|
868 | 868 | when passed positionally, the first arg can be any of raw image data, |
|
869 | 869 | a URL, or a filename from which to load image data. |
|
870 | 870 | The result is always embedding image data for inline images. |
|
871 | 871 | |
|
872 | 872 | >>> Image('http://www.google.fr/images/srpr/logo3w.png') |
|
873 | 873 | <IPython.core.display.Image object> |
|
874 | 874 | |
|
875 | 875 | >>> Image('/path/to/image.jpg') |
|
876 | 876 | <IPython.core.display.Image object> |
|
877 | 877 | |
|
878 | 878 | >>> Image(b'RAW_PNG_DATA...') |
|
879 | 879 | <IPython.core.display.Image object> |
|
880 | 880 | |
|
881 | 881 | Specifying Image(url=...) does not embed the image data, |
|
882 | 882 | it only generates ``<img>`` tag with a link to the source. |
|
883 | 883 | This will not work in the qtconsole or offline. |
|
884 | 884 | |
|
885 | 885 | >>> Image(url='http://www.google.fr/images/srpr/logo3w.png') |
|
886 | 886 | <IPython.core.display.Image object> |
|
887 | 887 | |
|
888 | 888 | """ |
|
889 | 889 | if isinstance(data, (Path, PurePath)): |
|
890 | 890 | data = str(data) |
|
891 | 891 | |
|
892 | 892 | if filename is not None: |
|
893 | 893 | ext = self._find_ext(filename) |
|
894 | 894 | elif url is not None: |
|
895 | 895 | ext = self._find_ext(url) |
|
896 | 896 | elif data is None: |
|
897 | 897 | raise ValueError("No image data found. Expecting filename, url, or data.") |
|
898 | 898 | elif isinstance(data, str) and ( |
|
899 | 899 | data.startswith('http') or _safe_exists(data) |
|
900 | 900 | ): |
|
901 | 901 | ext = self._find_ext(data) |
|
902 | 902 | else: |
|
903 | 903 | ext = None |
|
904 | 904 | |
|
905 | 905 | if format is None: |
|
906 | 906 | if ext is not None: |
|
907 | 907 | if ext == u'jpg' or ext == u'jpeg': |
|
908 | 908 | format = self._FMT_JPEG |
|
909 | 909 | elif ext == u'png': |
|
910 | 910 | format = self._FMT_PNG |
|
911 | 911 | elif ext == u'gif': |
|
912 | 912 | format = self._FMT_GIF |
|
913 | 913 | else: |
|
914 | 914 | format = ext.lower() |
|
915 | 915 | elif isinstance(data, bytes): |
|
916 | 916 | # infer image type from image data header, |
|
917 | 917 | # only if format has not been specified. |
|
918 | 918 | if data[:2] == _JPEG: |
|
919 | 919 | format = self._FMT_JPEG |
|
920 | 920 | |
|
921 | 921 | # failed to detect format, default png |
|
922 | 922 | if format is None: |
|
923 | 923 | format = self._FMT_PNG |
|
924 | 924 | |
|
925 | 925 | if format.lower() == 'jpg': |
|
926 | 926 | # jpg->jpeg |
|
927 | 927 | format = self._FMT_JPEG |
|
928 | 928 | |
|
929 | 929 | self.format = format.lower() |
|
930 | 930 | self.embed = embed if embed is not None else (url is None) |
|
931 | 931 | |
|
932 | 932 | if self.embed and self.format not in self._ACCEPTABLE_EMBEDDINGS: |
|
933 | 933 | raise ValueError("Cannot embed the '%s' image format" % (self.format)) |
|
934 | 934 | if self.embed: |
|
935 | 935 | self._mimetype = self._MIMETYPES.get(self.format) |
|
936 | 936 | |
|
937 | 937 | self.width = width |
|
938 | 938 | self.height = height |
|
939 | 939 | self.retina = retina |
|
940 | 940 | self.unconfined = unconfined |
|
941 | 941 | self.alt = alt |
|
942 | 942 | super(Image, self).__init__(data=data, url=url, filename=filename, |
|
943 | 943 | metadata=metadata) |
|
944 | 944 | |
|
945 | 945 | if self.width is None and self.metadata.get('width', {}): |
|
946 | 946 | self.width = metadata['width'] |
|
947 | 947 | |
|
948 | 948 | if self.height is None and self.metadata.get('height', {}): |
|
949 | 949 | self.height = metadata['height'] |
|
950 | 950 | |
|
951 | 951 | if self.alt is None and self.metadata.get("alt", {}): |
|
952 | 952 | self.alt = metadata["alt"] |
|
953 | 953 | |
|
954 | 954 | if retina: |
|
955 | 955 | self._retina_shape() |
|
956 | 956 | |
|
957 | 957 | |
|
958 | 958 | def _retina_shape(self): |
|
959 | 959 | """load pixel-doubled width and height from image data""" |
|
960 | 960 | if not self.embed: |
|
961 | 961 | return |
|
962 | 962 | if self.format == self._FMT_PNG: |
|
963 | 963 | w, h = _pngxy(self.data) |
|
964 | 964 | elif self.format == self._FMT_JPEG: |
|
965 | 965 | w, h = _jpegxy(self.data) |
|
966 | 966 | elif self.format == self._FMT_GIF: |
|
967 | 967 | w, h = _gifxy(self.data) |
|
968 | 968 | else: |
|
969 | 969 | # retina only supports png |
|
970 | 970 | return |
|
971 | 971 | self.width = w // 2 |
|
972 | 972 | self.height = h // 2 |
|
973 | 973 | |
|
974 | 974 | def reload(self): |
|
975 | 975 | """Reload the raw data from file or URL.""" |
|
976 | 976 | if self.embed: |
|
977 | 977 | super(Image,self).reload() |
|
978 | 978 | if self.retina: |
|
979 | 979 | self._retina_shape() |
|
980 | 980 | |
|
981 | 981 | def _repr_html_(self): |
|
982 | 982 | if not self.embed: |
|
983 | 983 | width = height = klass = alt = "" |
|
984 | 984 | if self.width: |
|
985 | 985 | width = ' width="%d"' % self.width |
|
986 | 986 | if self.height: |
|
987 | 987 | height = ' height="%d"' % self.height |
|
988 | 988 | if self.unconfined: |
|
989 | 989 | klass = ' class="unconfined"' |
|
990 | 990 | if self.alt: |
|
991 | 991 | alt = ' alt="%s"' % html.escape(self.alt) |
|
992 | 992 | return '<img src="{url}"{width}{height}{klass}{alt}/>'.format( |
|
993 | 993 | url=self.url, |
|
994 | 994 | width=width, |
|
995 | 995 | height=height, |
|
996 | 996 | klass=klass, |
|
997 | 997 | alt=alt, |
|
998 | 998 | ) |
|
999 | 999 | |
|
1000 | 1000 | def _repr_mimebundle_(self, include=None, exclude=None): |
|
1001 | 1001 | """Return the image as a mimebundle |
|
1002 | 1002 | |
|
1003 | 1003 | Any new mimetype support should be implemented here. |
|
1004 | 1004 | """ |
|
1005 | 1005 | if self.embed: |
|
1006 | 1006 | mimetype = self._mimetype |
|
1007 | 1007 | data, metadata = self._data_and_metadata(always_both=True) |
|
1008 | 1008 | if metadata: |
|
1009 | 1009 | metadata = {mimetype: metadata} |
|
1010 | 1010 | return {mimetype: data}, metadata |
|
1011 | 1011 | else: |
|
1012 | 1012 | return {'text/html': self._repr_html_()} |
|
1013 | 1013 | |
|
1014 | 1014 | def _data_and_metadata(self, always_both=False): |
|
1015 | 1015 | """shortcut for returning metadata with shape information, if defined""" |
|
1016 | 1016 | try: |
|
1017 | 1017 | b64_data = b2a_base64(self.data).decode('ascii') |
|
1018 | 1018 | except TypeError as e: |
|
1019 | 1019 | raise FileNotFoundError( |
|
1020 | 1020 | "No such file or directory: '%s'" % (self.data)) from e |
|
1021 | 1021 | md = {} |
|
1022 | 1022 | if self.metadata: |
|
1023 | 1023 | md.update(self.metadata) |
|
1024 | 1024 | if self.width: |
|
1025 | 1025 | md['width'] = self.width |
|
1026 | 1026 | if self.height: |
|
1027 | 1027 | md['height'] = self.height |
|
1028 | 1028 | if self.unconfined: |
|
1029 | 1029 | md['unconfined'] = self.unconfined |
|
1030 | 1030 | if self.alt: |
|
1031 | 1031 | md["alt"] = self.alt |
|
1032 | 1032 | if md or always_both: |
|
1033 | 1033 | return b64_data, md |
|
1034 | 1034 | else: |
|
1035 | 1035 | return b64_data |
|
1036 | 1036 | |
|
1037 | 1037 | def _repr_png_(self): |
|
1038 | 1038 | if self.embed and self.format == self._FMT_PNG: |
|
1039 | 1039 | return self._data_and_metadata() |
|
1040 | 1040 | |
|
1041 | 1041 | def _repr_jpeg_(self): |
|
1042 | 1042 | if self.embed and self.format == self._FMT_JPEG: |
|
1043 | 1043 | return self._data_and_metadata() |
|
1044 | 1044 | |
|
1045 | 1045 | def _find_ext(self, s): |
|
1046 | 1046 | base, ext = splitext(s) |
|
1047 | 1047 | |
|
1048 | 1048 | if not ext: |
|
1049 | 1049 | return base |
|
1050 | 1050 | |
|
1051 | 1051 | # `splitext` includes leading period, so we skip it |
|
1052 | 1052 | return ext[1:].lower() |
|
1053 | 1053 | |
|
1054 | 1054 | |
|
1055 | 1055 | class Video(DisplayObject): |
|
1056 | 1056 | |
|
1057 | 1057 | def __init__(self, data=None, url=None, filename=None, embed=False, |
|
1058 | 1058 | mimetype=None, width=None, height=None, html_attributes="controls"): |
|
1059 | 1059 | """Create a video object given raw data or an URL. |
|
1060 | 1060 | |
|
1061 | 1061 | When this object is returned by an input cell or passed to the |
|
1062 | 1062 | display function, it will result in the video being displayed |
|
1063 | 1063 | in the frontend. |
|
1064 | 1064 | |
|
1065 | 1065 | Parameters |
|
1066 | 1066 | ---------- |
|
1067 | 1067 | data : unicode, str or bytes |
|
1068 | 1068 | The raw video data or a URL or filename to load the data from. |
|
1069 | 1069 | Raw data will require passing ``embed=True``. |
|
1070 | 1070 | url : unicode |
|
1071 | 1071 | A URL for the video. If you specify ``url=``, |
|
1072 | 1072 | the image data will not be embedded. |
|
1073 | 1073 | filename : unicode |
|
1074 | 1074 | Path to a local file containing the video. |
|
1075 | 1075 | Will be interpreted as a local URL unless ``embed=True``. |
|
1076 | 1076 | embed : bool |
|
1077 | 1077 | Should the video be embedded using a data URI (True) or be |
|
1078 | 1078 | loaded using a <video> tag (False). |
|
1079 | 1079 | |
|
1080 | 1080 | Since videos are large, embedding them should be avoided, if possible. |
|
1081 | 1081 | You must confirm embedding as your intention by passing ``embed=True``. |
|
1082 | 1082 | |
|
1083 | 1083 | Local files can be displayed with URLs without embedding the content, via:: |
|
1084 | 1084 | |
|
1085 | 1085 | Video('./video.mp4') |
|
1086 | 1086 | mimetype : unicode |
|
1087 | 1087 | Specify the mimetype for embedded videos. |
|
1088 | 1088 | Default will be guessed from file extension, if available. |
|
1089 | 1089 | width : int |
|
1090 | 1090 | Width in pixels to which to constrain the video in HTML. |
|
1091 | 1091 | If not supplied, defaults to the width of the video. |
|
1092 | 1092 | height : int |
|
1093 | 1093 | Height in pixels to which to constrain the video in html. |
|
1094 | 1094 | If not supplied, defaults to the height of the video. |
|
1095 | 1095 | html_attributes : str |
|
1096 | 1096 | Attributes for the HTML ``<video>`` block. |
|
1097 | 1097 | Default: ``"controls"`` to get video controls. |
|
1098 | 1098 | Other examples: ``"controls muted"`` for muted video with controls, |
|
1099 | 1099 | ``"loop autoplay"`` for looping autoplaying video without controls. |
|
1100 | 1100 | |
|
1101 | 1101 | Examples |
|
1102 | 1102 | -------- |
|
1103 | 1103 | :: |
|
1104 | 1104 | |
|
1105 | 1105 | Video('https://archive.org/download/Sita_Sings_the_Blues/Sita_Sings_the_Blues_small.mp4') |
|
1106 | 1106 | Video('path/to/video.mp4') |
|
1107 | 1107 | Video('path/to/video.mp4', embed=True) |
|
1108 | 1108 | Video('path/to/video.mp4', embed=True, html_attributes="controls muted autoplay") |
|
1109 | 1109 | Video(b'raw-videodata', embed=True) |
|
1110 | 1110 | """ |
|
1111 | 1111 | if isinstance(data, (Path, PurePath)): |
|
1112 | 1112 | data = str(data) |
|
1113 | 1113 | |
|
1114 | 1114 | if url is None and isinstance(data, str) and data.startswith(('http:', 'https:')): |
|
1115 | 1115 | url = data |
|
1116 | 1116 | data = None |
|
1117 | 1117 | elif data is not None and os.path.exists(data): |
|
1118 | 1118 | filename = data |
|
1119 | 1119 | data = None |
|
1120 | 1120 | |
|
1121 | 1121 | if data and not embed: |
|
1122 | 1122 | msg = ''.join([ |
|
1123 | 1123 | "To embed videos, you must pass embed=True ", |
|
1124 | 1124 | "(this may make your notebook files huge)\n", |
|
1125 | 1125 | "Consider passing Video(url='...')", |
|
1126 | 1126 | ]) |
|
1127 | 1127 | raise ValueError(msg) |
|
1128 | 1128 | |
|
1129 | 1129 | self.mimetype = mimetype |
|
1130 | 1130 | self.embed = embed |
|
1131 | 1131 | self.width = width |
|
1132 | 1132 | self.height = height |
|
1133 | 1133 | self.html_attributes = html_attributes |
|
1134 | 1134 | super(Video, self).__init__(data=data, url=url, filename=filename) |
|
1135 | 1135 | |
|
1136 | 1136 | def _repr_html_(self): |
|
1137 | 1137 | width = height = '' |
|
1138 | 1138 | if self.width: |
|
1139 | 1139 | width = ' width="%d"' % self.width |
|
1140 | 1140 | if self.height: |
|
1141 | 1141 | height = ' height="%d"' % self.height |
|
1142 | 1142 | |
|
1143 | 1143 | # External URLs and potentially local files are not embedded into the |
|
1144 | 1144 | # notebook output. |
|
1145 | 1145 | if not self.embed: |
|
1146 | 1146 | url = self.url if self.url is not None else self.filename |
|
1147 | 1147 | output = """<video src="{0}" {1} {2} {3}> |
|
1148 | 1148 | Your browser does not support the <code>video</code> element. |
|
1149 | 1149 | </video>""".format(url, self.html_attributes, width, height) |
|
1150 | 1150 | return output |
|
1151 | 1151 | |
|
1152 | 1152 | # Embedded videos are base64-encoded. |
|
1153 | 1153 | mimetype = self.mimetype |
|
1154 | 1154 | if self.filename is not None: |
|
1155 | 1155 | if not mimetype: |
|
1156 | 1156 | mimetype, _ = mimetypes.guess_type(self.filename) |
|
1157 | 1157 | |
|
1158 | 1158 | with open(self.filename, 'rb') as f: |
|
1159 | 1159 | video = f.read() |
|
1160 | 1160 | else: |
|
1161 | 1161 | video = self.data |
|
1162 | 1162 | if isinstance(video, str): |
|
1163 | 1163 | # unicode input is already b64-encoded |
|
1164 | 1164 | b64_video = video |
|
1165 | 1165 | else: |
|
1166 | 1166 | b64_video = b2a_base64(video).decode('ascii').rstrip() |
|
1167 | 1167 | |
|
1168 | 1168 | output = """<video {0} {1} {2}> |
|
1169 | 1169 | <source src="data:{3};base64,{4}" type="{3}"> |
|
1170 | 1170 | Your browser does not support the video tag. |
|
1171 | 1171 | </video>""".format(self.html_attributes, width, height, mimetype, b64_video) |
|
1172 | 1172 | return output |
|
1173 | 1173 | |
|
1174 | 1174 | def reload(self): |
|
1175 | 1175 | # TODO |
|
1176 | 1176 | pass |
|
1177 | 1177 | |
|
1178 | 1178 | |
|
1179 | 1179 | @skip_doctest |
|
1180 | 1180 | def set_matplotlib_formats(*formats, **kwargs): |
|
1181 | """Select figure formats for the inline backend. Optionally pass quality for JPEG. | |
|
1181 | """ | |
|
1182 | .. deprecated:: 7.23 | |
|
1183 | ||
|
1184 | use `matplotlib_inline.backend_inline.set_matplotlib_formats()` | |
|
1185 | ||
|
1186 | Select figure formats for the inline backend. Optionally pass quality for JPEG. | |
|
1182 | 1187 | |
|
1183 | 1188 | For example, this enables PNG and JPEG output with a JPEG quality of 90%:: |
|
1184 | 1189 | |
|
1185 | 1190 | In [1]: set_matplotlib_formats('png', 'jpeg', quality=90) |
|
1186 | 1191 | |
|
1187 | 1192 | To set this in your config files use the following:: |
|
1188 | 1193 | |
|
1189 | 1194 | c.InlineBackend.figure_formats = {'png', 'jpeg'} |
|
1190 | 1195 | c.InlineBackend.print_figure_kwargs.update({'quality' : 90}) |
|
1191 | 1196 | |
|
1192 | 1197 | Parameters |
|
1193 | 1198 | ---------- |
|
1194 | 1199 | *formats : strs |
|
1195 | 1200 | One or more figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'. |
|
1196 | 1201 | **kwargs |
|
1197 | 1202 | Keyword args will be relayed to ``figure.canvas.print_figure``. |
|
1198 | 1203 | """ |
|
1199 | from IPython.core.interactiveshell import InteractiveShell | |
|
1200 | from IPython.core.pylabtools import select_figure_formats | |
|
1201 | # build kwargs, starting with InlineBackend config | |
|
1202 | kw = {} | |
|
1203 | from ipykernel.pylab.config import InlineBackend | |
|
1204 | cfg = InlineBackend.instance() | |
|
1205 | kw.update(cfg.print_figure_kwargs) | |
|
1206 | kw.update(**kwargs) | |
|
1207 | shell = InteractiveShell.instance() | |
|
1208 | select_figure_formats(shell, formats, **kw) | |
|
1204 | warnings.warn( | |
|
1205 | "`set_matplotlib_formats` is deprecated since IPython 7.23, directly " | |
|
1206 | "use `matplotlib_inline.backend_inline.set_matplotlib_formats()`", | |
|
1207 | DeprecationWarning, | |
|
1208 | stacklevel=2, | |
|
1209 | ) | |
|
1210 | ||
|
1211 | from matplotlib_inline.backend_inline import ( | |
|
1212 | set_matplotlib_formats as set_matplotlib_formats_orig, | |
|
1213 | ) | |
|
1214 | ||
|
1215 | set_matplotlib_formats_orig(*formats, **kwargs) | |
|
1209 | 1216 | |
|
1210 | 1217 | @skip_doctest |
|
1211 | 1218 | def set_matplotlib_close(close=True): |
|
1212 | """Set whether the inline backend closes all figures automatically or not. | |
|
1219 | """ | |
|
1220 | .. deprecated:: 7.23 | |
|
1221 | ||
|
1222 | use `matplotlib_inline.backend_inline.set_matplotlib_close()` | |
|
1223 | ||
|
1224 | ||
|
1225 | Set whether the inline backend closes all figures automatically or not. | |
|
1213 | 1226 | |
|
1214 | 1227 | By default, the inline backend used in the IPython Notebook will close all |
|
1215 | 1228 | matplotlib figures automatically after each cell is run. This means that |
|
1216 | 1229 | plots in different cells won't interfere. Sometimes, you may want to make |
|
1217 | 1230 | a plot in one cell and then refine it in later cells. This can be accomplished |
|
1218 | 1231 | by:: |
|
1219 | 1232 | |
|
1220 | 1233 | In [1]: set_matplotlib_close(False) |
|
1221 | 1234 | |
|
1222 | 1235 | To set this in your config files use the following:: |
|
1223 | 1236 | |
|
1224 | 1237 | c.InlineBackend.close_figures = False |
|
1225 | 1238 | |
|
1226 | 1239 | Parameters |
|
1227 | 1240 | ---------- |
|
1228 | 1241 | close : bool |
|
1229 | 1242 | Should all matplotlib figures be automatically closed after each cell is |
|
1230 | 1243 | run? |
|
1231 | 1244 | """ |
|
1232 | from ipykernel.pylab.config import InlineBackend | |
|
1233 | cfg = InlineBackend.instance() | |
|
1234 | cfg.close_figures = close | |
|
1245 | warnings.warn( | |
|
1246 | "`set_matplotlib_close` is deprecated since IPython 7.23, directly " | |
|
1247 | "use `matplotlib_inline.backend_inline.set_matplotlib_close()`", | |
|
1248 | DeprecationWarning, | |
|
1249 | stacklevel=2, | |
|
1250 | ) | |
|
1251 | ||
|
1252 | from matplotlib_inline.backend_inline import ( | |
|
1253 | set_matplotlib_close as set_matplotlib_close_orig, | |
|
1254 | ) | |
|
1255 | ||
|
1256 | set_matplotlib_close_orig(close) |
@@ -1,367 +1,374 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Top-level display functions for displaying object in different formats.""" |
|
3 | 3 | |
|
4 | 4 | # Copyright (c) IPython Development Team. |
|
5 | 5 | # Distributed under the terms of the Modified BSD License. |
|
6 | 6 | |
|
7 | 7 | |
|
8 | 8 | from binascii import b2a_hex |
|
9 | 9 | import os |
|
10 | 10 | import sys |
|
11 | 11 | |
|
12 | 12 | __all__ = ['display', 'clear_output', 'publish_display_data', 'update_display', 'DisplayHandle'] |
|
13 | 13 | |
|
14 | 14 | #----------------------------------------------------------------------------- |
|
15 | 15 | # utility functions |
|
16 | 16 | #----------------------------------------------------------------------------- |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | def _merge(d1, d2): |
|
20 | 20 | """Like update, but merges sub-dicts instead of clobbering at the top level. |
|
21 | 21 | |
|
22 | 22 | Updates d1 in-place |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | if not isinstance(d2, dict) or not isinstance(d1, dict): |
|
26 | 26 | return d2 |
|
27 | 27 | for key, value in d2.items(): |
|
28 | 28 | d1[key] = _merge(d1.get(key), value) |
|
29 | 29 | return d1 |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | #----------------------------------------------------------------------------- |
|
33 | 33 | # Main functions |
|
34 | 34 | #----------------------------------------------------------------------------- |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | # use * to indicate transient is keyword-only |
|
38 | 38 | def publish_display_data(data, metadata=None, source=None, *, transient=None, **kwargs): |
|
39 | 39 | """Publish data and metadata to all frontends. |
|
40 | 40 | |
|
41 | 41 | See the ``display_data`` message in the messaging documentation for |
|
42 | 42 | more details about this message type. |
|
43 | 43 | |
|
44 | 44 | Keys of data and metadata can be any mime-type. |
|
45 | 45 | |
|
46 | 46 | Parameters |
|
47 | 47 | ---------- |
|
48 | 48 | data : dict |
|
49 | 49 | A dictionary having keys that are valid MIME types (like |
|
50 | 50 | 'text/plain' or 'image/svg+xml') and values that are the data for |
|
51 | 51 | that MIME type. The data itself must be a JSON'able data |
|
52 | 52 | structure. Minimally all data should have the 'text/plain' data, |
|
53 | 53 | which can be displayed by all frontends. If more than the plain |
|
54 | 54 | text is given, it is up to the frontend to decide which |
|
55 | 55 | representation to use. |
|
56 | 56 | metadata : dict |
|
57 | 57 | A dictionary for metadata related to the data. This can contain |
|
58 | 58 | arbitrary key, value pairs that frontends can use to interpret |
|
59 | 59 | the data. mime-type keys matching those in data can be used |
|
60 | 60 | to specify metadata about particular representations. |
|
61 | 61 | source : str, deprecated |
|
62 | 62 | Unused. |
|
63 | 63 | transient : dict, keyword-only |
|
64 | 64 | A dictionary of transient data, such as display_id. |
|
65 | 65 |
|
|
66 | 66 | from IPython.core.interactiveshell import InteractiveShell |
|
67 | 67 | |
|
68 | 68 | display_pub = InteractiveShell.instance().display_pub |
|
69 | 69 | |
|
70 | 70 | # only pass transient if supplied, |
|
71 | 71 | # to avoid errors with older ipykernel. |
|
72 | 72 | # TODO: We could check for ipykernel version and provide a detailed upgrade message. |
|
73 | 73 | if transient: |
|
74 | 74 | kwargs['transient'] = transient |
|
75 | 75 | |
|
76 | 76 | display_pub.publish( |
|
77 | 77 | data=data, |
|
78 | 78 | metadata=metadata, |
|
79 | 79 | **kwargs |
|
80 | 80 | ) |
|
81 | 81 | |
|
82 | 82 | |
|
83 | 83 | def _new_id(): |
|
84 | 84 | """Generate a new random text id with urandom""" |
|
85 | 85 | return b2a_hex(os.urandom(16)).decode('ascii') |
|
86 | 86 | |
|
87 | 87 | |
|
88 | def display(*objs, include=None, exclude=None, metadata=None, transient=None, display_id=None, **kwargs): | |
|
88 | def display( | |
|
89 | *objs, | |
|
90 | include=None, | |
|
91 | exclude=None, | |
|
92 | metadata=None, | |
|
93 | transient=None, | |
|
94 | display_id=None, | |
|
95 | raw=False, | |
|
96 | clear=False, | |
|
97 | **kwargs | |
|
98 | ): | |
|
89 | 99 | """Display a Python object in all frontends. |
|
90 | 100 | |
|
91 | 101 | By default all representations will be computed and sent to the frontends. |
|
92 | 102 | Frontends can decide which representation is used and how. |
|
93 | 103 | |
|
94 | 104 | In terminal IPython this will be similar to using :func:`print`, for use in richer |
|
95 | 105 | frontends see Jupyter notebook examples with rich display logic. |
|
96 | 106 | |
|
97 | 107 | Parameters |
|
98 | 108 | ---------- |
|
99 | 109 | *objs : object |
|
100 | 110 | The Python objects to display. |
|
101 | 111 | raw : bool, optional |
|
102 | 112 | Are the objects to be displayed already mimetype-keyed dicts of raw display data, |
|
103 | 113 | or Python objects that need to be formatted before display? [default: False] |
|
104 | 114 | include : list, tuple or set, optional |
|
105 | 115 | A list of format type strings (MIME types) to include in the |
|
106 | 116 | format data dict. If this is set *only* the format types included |
|
107 | 117 | in this list will be computed. |
|
108 | 118 | exclude : list, tuple or set, optional |
|
109 | 119 | A list of format type strings (MIME types) to exclude in the format |
|
110 | 120 | data dict. If this is set all format types will be computed, |
|
111 | 121 | except for those included in this argument. |
|
112 | 122 | metadata : dict, optional |
|
113 | 123 | A dictionary of metadata to associate with the output. |
|
114 | 124 | mime-type keys in this dictionary will be associated with the individual |
|
115 | 125 | representation formats, if they exist. |
|
116 | 126 | transient : dict, optional |
|
117 | 127 | A dictionary of transient data to associate with the output. |
|
118 | 128 | Data in this dict should not be persisted to files (e.g. notebooks). |
|
119 | 129 | display_id : str, bool optional |
|
120 | 130 | Set an id for the display. |
|
121 | 131 | This id can be used for updating this display area later via update_display. |
|
122 | 132 | If given as `True`, generate a new `display_id` |
|
123 | kwargs: additional keyword-args, optional | |
|
133 | clear : bool, optional | |
|
134 | Should the output area be cleared before displaying anything? If True, | |
|
135 | this will wait for additional output before clearing. [default: False] | |
|
136 | **kwargs : additional keyword-args, optional | |
|
124 | 137 | Additional keyword-arguments are passed through to the display publisher. |
|
125 | 138 | |
|
126 | 139 | Returns |
|
127 | 140 | ------- |
|
128 | ||
|
129 | 141 | handle: DisplayHandle |
|
130 | 142 | Returns a handle on updatable displays for use with :func:`update_display`, |
|
131 | 143 | if `display_id` is given. Returns :any:`None` if no `display_id` is given |
|
132 | 144 | (default). |
|
133 | 145 | |
|
134 | 146 | Examples |
|
135 | 147 | -------- |
|
136 | ||
|
137 | 148 | >>> class Json(object): |
|
138 | 149 | ... def __init__(self, json): |
|
139 | 150 | ... self.json = json |
|
140 | 151 | ... def _repr_pretty_(self, pp, cycle): |
|
141 | 152 | ... import json |
|
142 | 153 | ... pp.text(json.dumps(self.json, indent=2)) |
|
143 | 154 | ... def __repr__(self): |
|
144 | 155 | ... return str(self.json) |
|
145 | 156 | ... |
|
146 | 157 | |
|
147 | 158 | >>> d = Json({1:2, 3: {4:5}}) |
|
148 | 159 | |
|
149 | 160 | >>> print(d) |
|
150 | 161 | {1: 2, 3: {4: 5}} |
|
151 | 162 | |
|
152 | 163 | >>> display(d) |
|
153 | 164 | { |
|
154 | 165 | "1": 2, |
|
155 | 166 | "3": { |
|
156 | 167 | "4": 5 |
|
157 | 168 | } |
|
158 | 169 | } |
|
159 | 170 | |
|
160 | 171 | >>> def int_formatter(integer, pp, cycle): |
|
161 | 172 | ... pp.text('I'*integer) |
|
162 | 173 | |
|
163 | 174 | >>> plain = get_ipython().display_formatter.formatters['text/plain'] |
|
164 | 175 | >>> plain.for_type(int, int_formatter) |
|
165 | 176 | <function _repr_pprint at 0x...> |
|
166 | 177 | >>> display(7-5) |
|
167 | 178 | II |
|
168 | 179 | |
|
169 | 180 | >>> del plain.type_printers[int] |
|
170 | 181 | >>> display(7-5) |
|
171 | 182 | 2 |
|
172 | 183 | |
|
173 | 184 | See Also |
|
174 | 185 | -------- |
|
175 | ||
|
176 | 186 | :func:`update_display` |
|
177 | 187 | |
|
178 | 188 | Notes |
|
179 | 189 | ----- |
|
180 | ||
|
181 | 190 | In Python, objects can declare their textual representation using the |
|
182 | 191 | `__repr__` method. IPython expands on this idea and allows objects to declare |
|
183 | 192 | other, rich representations including: |
|
184 | 193 | |
|
185 | 194 | - HTML |
|
186 | 195 | - JSON |
|
187 | 196 | - PNG |
|
188 | 197 | - JPEG |
|
189 | 198 | - SVG |
|
190 | 199 | - LaTeX |
|
191 | 200 | |
|
192 | 201 | A single object can declare some or all of these representations; all are |
|
193 | 202 | handled by IPython's display system. |
|
194 | 203 | |
|
195 | 204 | The main idea of the first approach is that you have to implement special |
|
196 | 205 | display methods when you define your class, one for each representation you |
|
197 | 206 | want to use. Here is a list of the names of the special methods and the |
|
198 | 207 | values they must return: |
|
199 | 208 | |
|
200 | 209 | - `_repr_html_`: return raw HTML as a string, or a tuple (see below). |
|
201 | 210 | - `_repr_json_`: return a JSONable dict, or a tuple (see below). |
|
202 | 211 | - `_repr_jpeg_`: return raw JPEG data, or a tuple (see below). |
|
203 | 212 | - `_repr_png_`: return raw PNG data, or a tuple (see below). |
|
204 | 213 | - `_repr_svg_`: return raw SVG data as a string, or a tuple (see below). |
|
205 | 214 | - `_repr_latex_`: return LaTeX commands in a string surrounded by "$", |
|
206 | 215 | or a tuple (see below). |
|
207 | 216 | - `_repr_mimebundle_`: return a full mimebundle containing the mapping |
|
208 | 217 | from all mimetypes to data. |
|
209 | 218 | Use this for any mime-type not listed above. |
|
210 | 219 | |
|
211 | 220 | The above functions may also return the object's metadata alonside the |
|
212 | 221 | data. If the metadata is available, the functions will return a tuple |
|
213 | 222 | containing the data and metadata, in that order. If there is no metadata |
|
214 | 223 | available, then the functions will return the data only. |
|
215 | 224 | |
|
216 | 225 | When you are directly writing your own classes, you can adapt them for |
|
217 | 226 | display in IPython by following the above approach. But in practice, you |
|
218 | 227 | often need to work with existing classes that you can't easily modify. |
|
219 | 228 | |
|
220 | 229 | You can refer to the documentation on integrating with the display system in |
|
221 | 230 | order to register custom formatters for already existing types |
|
222 | 231 | (:ref:`integrating_rich_display`). |
|
223 | 232 | |
|
224 | 233 | .. versionadded:: 5.4 display available without import |
|
225 | 234 | .. versionadded:: 6.1 display available without import |
|
226 | 235 | |
|
227 | 236 | Since IPython 5.4 and 6.1 :func:`display` is automatically made available to |
|
228 | 237 | the user without import. If you are using display in a document that might |
|
229 | 238 | be used in a pure python context or with older version of IPython, use the |
|
230 | 239 | following import at the top of your file:: |
|
231 | 240 | |
|
232 | 241 | from IPython.display import display |
|
233 | 242 | |
|
234 | 243 | """ |
|
235 | 244 | from IPython.core.interactiveshell import InteractiveShell |
|
236 | 245 | |
|
237 | 246 | if not InteractiveShell.initialized(): |
|
238 | 247 | # Directly print objects. |
|
239 | 248 | print(*objs) |
|
240 | 249 | return |
|
241 | 250 | |
|
242 | raw = kwargs.pop('raw', False) | |
|
243 | 251 | if transient is None: |
|
244 | 252 | transient = {} |
|
245 | 253 | if metadata is None: |
|
246 | 254 | metadata={} |
|
247 | 255 | if display_id: |
|
248 | 256 | if display_id is True: |
|
249 | 257 | display_id = _new_id() |
|
250 | 258 | transient['display_id'] = display_id |
|
251 | 259 | if kwargs.get('update') and 'display_id' not in transient: |
|
252 | 260 | raise TypeError('display_id required for update_display') |
|
253 | 261 | if transient: |
|
254 | 262 | kwargs['transient'] = transient |
|
255 | 263 | |
|
256 | 264 | if not objs and display_id: |
|
257 | 265 | # if given no objects, but still a request for a display_id, |
|
258 | 266 | # we assume the user wants to insert an empty output that |
|
259 | 267 | # can be updated later |
|
260 | 268 | objs = [{}] |
|
261 | 269 | raw = True |
|
262 | 270 | |
|
263 | 271 | if not raw: |
|
264 | 272 | format = InteractiveShell.instance().display_formatter.format |
|
265 | 273 | |
|
274 | if clear: | |
|
275 | clear_output(wait=True) | |
|
276 | ||
|
266 | 277 | for obj in objs: |
|
267 | 278 | if raw: |
|
268 | 279 | publish_display_data(data=obj, metadata=metadata, **kwargs) |
|
269 | 280 | else: |
|
270 | 281 | format_dict, md_dict = format(obj, include=include, exclude=exclude) |
|
271 | 282 | if not format_dict: |
|
272 | 283 | # nothing to display (e.g. _ipython_display_ took over) |
|
273 | 284 | continue |
|
274 | 285 | if metadata: |
|
275 | 286 | # kwarg-specified metadata gets precedence |
|
276 | 287 | _merge(md_dict, metadata) |
|
277 | 288 | publish_display_data(data=format_dict, metadata=md_dict, **kwargs) |
|
278 | 289 | if display_id: |
|
279 | 290 | return DisplayHandle(display_id) |
|
280 | 291 | |
|
281 | 292 | |
|
282 | 293 | # use * for keyword-only display_id arg |
|
283 | 294 | def update_display(obj, *, display_id, **kwargs): |
|
284 | 295 | """Update an existing display by id |
|
285 | 296 | |
|
286 | 297 | Parameters |
|
287 | 298 | ---------- |
|
288 | ||
|
289 | obj: | |
|
299 | obj | |
|
290 | 300 | The object with which to update the display |
|
291 | 301 | display_id: keyword-only |
|
292 | 302 | The id of the display to update |
|
293 | 303 | |
|
294 | 304 | See Also |
|
295 | 305 | -------- |
|
296 | ||
|
297 | 306 | :func:`display` |
|
298 | 307 | """ |
|
299 | 308 | kwargs['update'] = True |
|
300 | 309 | display(obj, display_id=display_id, **kwargs) |
|
301 | 310 | |
|
302 | 311 | |
|
303 | 312 | class DisplayHandle(object): |
|
304 | 313 | """A handle on an updatable display |
|
305 | 314 | |
|
306 | 315 | Call `.update(obj)` to display a new object. |
|
307 | 316 | |
|
308 | 317 | Call `.display(obj`) to add a new instance of this display, |
|
309 | 318 | and update existing instances. |
|
310 | 319 | |
|
311 | 320 | See Also |
|
312 | 321 | -------- |
|
313 | 322 | |
|
314 | 323 | :func:`display`, :func:`update_display` |
|
315 | 324 | |
|
316 | 325 | """ |
|
317 | 326 | |
|
318 | 327 | def __init__(self, display_id=None): |
|
319 | 328 | if display_id is None: |
|
320 | 329 | display_id = _new_id() |
|
321 | 330 | self.display_id = display_id |
|
322 | 331 | |
|
323 | 332 | def __repr__(self): |
|
324 | 333 | return "<%s display_id=%s>" % (self.__class__.__name__, self.display_id) |
|
325 | 334 | |
|
326 | 335 | def display(self, obj, **kwargs): |
|
327 | 336 | """Make a new display with my id, updating existing instances. |
|
328 | 337 | |
|
329 | 338 | Parameters |
|
330 | 339 | ---------- |
|
331 | ||
|
332 | obj: | |
|
340 | obj | |
|
333 | 341 | object to display |
|
334 |
**kwargs |
|
|
342 | **kwargs | |
|
335 | 343 | additional keyword arguments passed to display |
|
336 | 344 | """ |
|
337 | 345 | display(obj, display_id=self.display_id, **kwargs) |
|
338 | 346 | |
|
339 | 347 | def update(self, obj, **kwargs): |
|
340 | 348 | """Update existing displays with my id |
|
341 | 349 | |
|
342 | 350 | Parameters |
|
343 | 351 | ---------- |
|
344 | ||
|
345 | obj: | |
|
352 | obj | |
|
346 | 353 | object to display |
|
347 |
**kwargs |
|
|
354 | **kwargs | |
|
348 | 355 | additional keyword arguments passed to update_display |
|
349 | 356 | """ |
|
350 | 357 | update_display(obj, display_id=self.display_id, **kwargs) |
|
351 | 358 | |
|
352 | 359 | |
|
353 | 360 | def clear_output(wait=False): |
|
354 | 361 | """Clear the output of the current cell receiving output. |
|
355 | 362 | |
|
356 | 363 | Parameters |
|
357 | 364 | ---------- |
|
358 | 365 | wait : bool [default: false] |
|
359 | 366 | Wait to clear the output until new output is available to replace it.""" |
|
360 | 367 | from IPython.core.interactiveshell import InteractiveShell |
|
361 | 368 | if InteractiveShell.initialized(): |
|
362 | 369 | InteractiveShell.instance().display_pub.clear_output(wait) |
|
363 | 370 | else: |
|
364 | 371 | print('\033[2K\r', end='') |
|
365 | 372 | sys.stdout.flush() |
|
366 | 373 | print('\033[2K\r', end='') |
|
367 | 374 | sys.stderr.flush() |
|
1 | NO CONTENT: modified file |
|
1 | NO CONTENT: modified file |
@@ -1,161 +1,161 b'' | |||
|
1 | 1 | """Infrastructure for registering and firing callbacks on application events. |
|
2 | 2 | |
|
3 | 3 | Unlike :mod:`IPython.core.hooks`, which lets end users set single functions to |
|
4 | 4 | be called at specific times, or a collection of alternative methods to try, |
|
5 | 5 | callbacks are designed to be used by extension authors. A number of callbacks |
|
6 | 6 | can be registered for the same event without needing to be aware of one another. |
|
7 | 7 | |
|
8 | 8 | The functions defined in this module are no-ops indicating the names of available |
|
9 | 9 | events and the arguments which will be passed to them. |
|
10 | 10 | |
|
11 | 11 | .. note:: |
|
12 | 12 | |
|
13 | 13 | This API is experimental in IPython 2.0, and may be revised in future versions. |
|
14 | 14 | """ |
|
15 | 15 | |
|
16 | 16 | from backcall import callback_prototype |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | class EventManager(object): |
|
20 | 20 | """Manage a collection of events and a sequence of callbacks for each. |
|
21 | 21 | |
|
22 | 22 | This is attached to :class:`~IPython.core.interactiveshell.InteractiveShell` |
|
23 | 23 | instances as an ``events`` attribute. |
|
24 | 24 | |
|
25 | 25 | .. note:: |
|
26 | 26 | |
|
27 | 27 | This API is experimental in IPython 2.0, and may be revised in future versions. |
|
28 | 28 | """ |
|
29 | 29 | def __init__(self, shell, available_events): |
|
30 | 30 | """Initialise the :class:`CallbackManager`. |
|
31 | 31 | |
|
32 | 32 | Parameters |
|
33 | 33 | ---------- |
|
34 | 34 | shell |
|
35 | 35 | The :class:`~IPython.core.interactiveshell.InteractiveShell` instance |
|
36 |
available_ |
|
|
36 | available_events | |
|
37 | 37 | An iterable of names for callback events. |
|
38 | 38 | """ |
|
39 | 39 | self.shell = shell |
|
40 | 40 | self.callbacks = {n:[] for n in available_events} |
|
41 | 41 | |
|
42 | 42 | def register(self, event, function): |
|
43 | 43 | """Register a new event callback. |
|
44 | 44 | |
|
45 | 45 | Parameters |
|
46 | 46 | ---------- |
|
47 | 47 | event : str |
|
48 | 48 | The event for which to register this callback. |
|
49 | 49 | function : callable |
|
50 | 50 | A function to be called on the given event. It should take the same |
|
51 | 51 | parameters as the appropriate callback prototype. |
|
52 | 52 | |
|
53 | 53 | Raises |
|
54 | 54 | ------ |
|
55 | 55 | TypeError |
|
56 | 56 | If ``function`` is not callable. |
|
57 | 57 | KeyError |
|
58 | 58 | If ``event`` is not one of the known events. |
|
59 | 59 | """ |
|
60 | 60 | if not callable(function): |
|
61 | 61 | raise TypeError('Need a callable, got %r' % function) |
|
62 | 62 | callback_proto = available_events.get(event) |
|
63 | 63 | if function not in self.callbacks[event]: |
|
64 | 64 | self.callbacks[event].append(callback_proto.adapt(function)) |
|
65 | 65 | |
|
66 | 66 | def unregister(self, event, function): |
|
67 | 67 | """Remove a callback from the given event.""" |
|
68 | 68 | if function in self.callbacks[event]: |
|
69 | 69 | return self.callbacks[event].remove(function) |
|
70 | 70 | |
|
71 | 71 | # Remove callback in case ``function`` was adapted by `backcall`. |
|
72 | 72 | for callback in self.callbacks[event]: |
|
73 | 73 | try: |
|
74 | 74 | if callback.__wrapped__ is function: |
|
75 | 75 | return self.callbacks[event].remove(callback) |
|
76 | 76 | except AttributeError: |
|
77 | 77 | pass |
|
78 | 78 | |
|
79 | 79 | raise ValueError('Function {!r} is not registered as a {} callback'.format(function, event)) |
|
80 | 80 | |
|
81 | 81 | def trigger(self, event, *args, **kwargs): |
|
82 | 82 | """Call callbacks for ``event``. |
|
83 | 83 | |
|
84 | 84 | Any additional arguments are passed to all callbacks registered for this |
|
85 | 85 | event. Exceptions raised by callbacks are caught, and a message printed. |
|
86 | 86 | """ |
|
87 | 87 | for func in self.callbacks[event][:]: |
|
88 | 88 | try: |
|
89 | 89 | func(*args, **kwargs) |
|
90 | 90 | except (Exception, KeyboardInterrupt): |
|
91 | 91 | print("Error in callback {} (for {}):".format(func, event)) |
|
92 | 92 | self.shell.showtraceback() |
|
93 | 93 | |
|
94 | 94 | # event_name -> prototype mapping |
|
95 | 95 | available_events = {} |
|
96 | 96 | |
|
97 | 97 | def _define_event(callback_function): |
|
98 | 98 | callback_proto = callback_prototype(callback_function) |
|
99 | 99 | available_events[callback_function.__name__] = callback_proto |
|
100 | 100 | return callback_proto |
|
101 | 101 | |
|
102 | 102 | # ------------------------------------------------------------------------------ |
|
103 | 103 | # Callback prototypes |
|
104 | 104 | # |
|
105 | 105 | # No-op functions which describe the names of available events and the |
|
106 | 106 | # signatures of callbacks for those events. |
|
107 | 107 | # ------------------------------------------------------------------------------ |
|
108 | 108 | |
|
109 | 109 | @_define_event |
|
110 | 110 | def pre_execute(): |
|
111 | 111 | """Fires before code is executed in response to user/frontend action. |
|
112 | 112 | |
|
113 | 113 | This includes comm and widget messages and silent execution, as well as user |
|
114 | 114 | code cells. |
|
115 | 115 | """ |
|
116 | 116 | pass |
|
117 | 117 | |
|
118 | 118 | @_define_event |
|
119 | 119 | def pre_run_cell(info): |
|
120 | 120 | """Fires before user-entered code runs. |
|
121 | 121 | |
|
122 | 122 | Parameters |
|
123 | 123 | ---------- |
|
124 | 124 | info : :class:`~IPython.core.interactiveshell.ExecutionInfo` |
|
125 | 125 | An object containing information used for the code execution. |
|
126 | 126 | """ |
|
127 | 127 | pass |
|
128 | 128 | |
|
129 | 129 | @_define_event |
|
130 | 130 | def post_execute(): |
|
131 | 131 | """Fires after code is executed in response to user/frontend action. |
|
132 | 132 | |
|
133 | 133 | This includes comm and widget messages and silent execution, as well as user |
|
134 | 134 | code cells. |
|
135 | 135 | """ |
|
136 | 136 | pass |
|
137 | 137 | |
|
138 | 138 | @_define_event |
|
139 | 139 | def post_run_cell(result): |
|
140 | 140 | """Fires after user-entered code runs. |
|
141 | 141 | |
|
142 | 142 | Parameters |
|
143 | 143 | ---------- |
|
144 | 144 | result : :class:`~IPython.core.interactiveshell.ExecutionResult` |
|
145 | 145 | The object which will be returned as the execution result. |
|
146 | 146 | """ |
|
147 | 147 | pass |
|
148 | 148 | |
|
149 | 149 | @_define_event |
|
150 | 150 | def shell_initialized(ip): |
|
151 | 151 | """Fires after initialisation of :class:`~IPython.core.interactiveshell.InteractiveShell`. |
|
152 | 152 | |
|
153 | 153 | This is before extensions and startup scripts are loaded, so it can only be |
|
154 | 154 | set by subclassing. |
|
155 | 155 | |
|
156 | 156 | Parameters |
|
157 | 157 | ---------- |
|
158 | 158 | ip : :class:`~IPython.core.interactiveshell.InteractiveShell` |
|
159 | 159 | The newly initialised shell. |
|
160 | 160 | """ |
|
161 | 161 | pass |
|
1 | NO CONTENT: modified file |
@@ -1,1024 +1,1026 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Display formatters. |
|
3 | 3 | |
|
4 | 4 | Inheritance diagram: |
|
5 | 5 | |
|
6 | 6 | .. inheritance-diagram:: IPython.core.formatters |
|
7 | 7 | :parts: 3 |
|
8 | 8 | """ |
|
9 | 9 | |
|
10 | 10 | # Copyright (c) IPython Development Team. |
|
11 | 11 | # Distributed under the terms of the Modified BSD License. |
|
12 | 12 | |
|
13 | 13 | import abc |
|
14 | 14 | import json |
|
15 | 15 | import sys |
|
16 | 16 | import traceback |
|
17 | 17 | import warnings |
|
18 | 18 | from io import StringIO |
|
19 | 19 | |
|
20 | 20 | from decorator import decorator |
|
21 | 21 | |
|
22 | 22 | from traitlets.config.configurable import Configurable |
|
23 | 23 | from .getipython import get_ipython |
|
24 | 24 | from ..utils.sentinel import Sentinel |
|
25 | 25 | from ..utils.dir2 import get_real_method |
|
26 | 26 | from ..lib import pretty |
|
27 | 27 | from traitlets import ( |
|
28 | 28 | Bool, Dict, Integer, Unicode, CUnicode, ObjectName, List, |
|
29 | 29 | ForwardDeclaredInstance, |
|
30 | 30 | default, observe, |
|
31 | 31 | ) |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | class DisplayFormatter(Configurable): |
|
35 | 35 | |
|
36 | 36 | active_types = List(Unicode(), |
|
37 | 37 | help="""List of currently active mime-types to display. |
|
38 | 38 | You can use this to set a white-list for formats to display. |
|
39 | 39 | |
|
40 | 40 | Most users will not need to change this value. |
|
41 | 41 | """).tag(config=True) |
|
42 | 42 | |
|
43 | 43 | @default('active_types') |
|
44 | 44 | def _active_types_default(self): |
|
45 | 45 | return self.format_types |
|
46 | 46 | |
|
47 | 47 | @observe('active_types') |
|
48 | 48 | def _active_types_changed(self, change): |
|
49 | 49 | for key, formatter in self.formatters.items(): |
|
50 | 50 | if key in change['new']: |
|
51 | 51 | formatter.enabled = True |
|
52 | 52 | else: |
|
53 | 53 | formatter.enabled = False |
|
54 | 54 | |
|
55 | 55 | ipython_display_formatter = ForwardDeclaredInstance('FormatterABC') |
|
56 | 56 | @default('ipython_display_formatter') |
|
57 | 57 | def _default_formatter(self): |
|
58 | 58 | return IPythonDisplayFormatter(parent=self) |
|
59 | 59 | |
|
60 | 60 | mimebundle_formatter = ForwardDeclaredInstance('FormatterABC') |
|
61 | 61 | @default('mimebundle_formatter') |
|
62 | 62 | def _default_mime_formatter(self): |
|
63 | 63 | return MimeBundleFormatter(parent=self) |
|
64 | 64 | |
|
65 | 65 | # A dict of formatter whose keys are format types (MIME types) and whose |
|
66 | 66 | # values are subclasses of BaseFormatter. |
|
67 | 67 | formatters = Dict() |
|
68 | 68 | @default('formatters') |
|
69 | 69 | def _formatters_default(self): |
|
70 | 70 | """Activate the default formatters.""" |
|
71 | 71 | formatter_classes = [ |
|
72 | 72 | PlainTextFormatter, |
|
73 | 73 | HTMLFormatter, |
|
74 | 74 | MarkdownFormatter, |
|
75 | 75 | SVGFormatter, |
|
76 | 76 | PNGFormatter, |
|
77 | 77 | PDFFormatter, |
|
78 | 78 | JPEGFormatter, |
|
79 | 79 | LatexFormatter, |
|
80 | 80 | JSONFormatter, |
|
81 | 81 | JavascriptFormatter |
|
82 | 82 | ] |
|
83 | 83 | d = {} |
|
84 | 84 | for cls in formatter_classes: |
|
85 | 85 | f = cls(parent=self) |
|
86 | 86 | d[f.format_type] = f |
|
87 | 87 | return d |
|
88 | 88 | |
|
89 | 89 | def format(self, obj, include=None, exclude=None): |
|
90 | 90 | """Return a format data dict for an object. |
|
91 | 91 | |
|
92 | 92 | By default all format types will be computed. |
|
93 | 93 | |
|
94 | 94 | The following MIME types are usually implemented: |
|
95 | 95 | |
|
96 | 96 | * text/plain |
|
97 | 97 | * text/html |
|
98 | 98 | * text/markdown |
|
99 | 99 | * text/latex |
|
100 | 100 | * application/json |
|
101 | 101 | * application/javascript |
|
102 | 102 | * application/pdf |
|
103 | 103 | * image/png |
|
104 | 104 | * image/jpeg |
|
105 | 105 | * image/svg+xml |
|
106 | 106 | |
|
107 | 107 | Parameters |
|
108 | 108 | ---------- |
|
109 | 109 | obj : object |
|
110 | 110 | The Python object whose format data will be computed. |
|
111 | 111 | include : list, tuple or set; optional |
|
112 | 112 | A list of format type strings (MIME types) to include in the |
|
113 | 113 | format data dict. If this is set *only* the format types included |
|
114 | 114 | in this list will be computed. |
|
115 | 115 | exclude : list, tuple or set; optional |
|
116 | 116 | A list of format type string (MIME types) to exclude in the format |
|
117 | 117 | data dict. If this is set all format types will be computed, |
|
118 | 118 | except for those included in this argument. |
|
119 | 119 | Mimetypes present in exclude will take precedence over the ones in include |
|
120 | 120 | |
|
121 | 121 | Returns |
|
122 | 122 | ------- |
|
123 | 123 | (format_dict, metadata_dict) : tuple of two dicts |
|
124 | ||
|
125 | 124 | format_dict is a dictionary of key/value pairs, one of each format that was |
|
126 | 125 | generated for the object. The keys are the format types, which |
|
127 | 126 | will usually be MIME type strings and the values and JSON'able |
|
128 | 127 | data structure containing the raw data for the representation in |
|
129 | 128 | that format. |
|
130 | 129 | |
|
131 | 130 | metadata_dict is a dictionary of metadata about each mime-type output. |
|
132 | 131 | Its keys will be a strict subset of the keys in format_dict. |
|
133 | 132 | |
|
134 | 133 | Notes |
|
135 | 134 | ----- |
|
136 | ||
|
137 | 135 | If an object implement `_repr_mimebundle_` as well as various |
|
138 | 136 | `_repr_*_`, the data returned by `_repr_mimebundle_` will take |
|
139 | 137 | precedence and the corresponding `_repr_*_` for this mimetype will |
|
140 | 138 | not be called. |
|
141 | 139 | |
|
142 | 140 | """ |
|
143 | 141 | format_dict = {} |
|
144 | 142 | md_dict = {} |
|
145 | 143 | |
|
146 | 144 | if self.ipython_display_formatter(obj): |
|
147 | 145 | # object handled itself, don't proceed |
|
148 | 146 | return {}, {} |
|
149 | 147 | |
|
150 | 148 | format_dict, md_dict = self.mimebundle_formatter(obj, include=include, exclude=exclude) |
|
151 | 149 | |
|
152 | 150 | if format_dict or md_dict: |
|
153 | 151 | if include: |
|
154 | 152 | format_dict = {k:v for k,v in format_dict.items() if k in include} |
|
155 | 153 | md_dict = {k:v for k,v in md_dict.items() if k in include} |
|
156 | 154 | if exclude: |
|
157 | 155 | format_dict = {k:v for k,v in format_dict.items() if k not in exclude} |
|
158 | 156 | md_dict = {k:v for k,v in md_dict.items() if k not in exclude} |
|
159 | 157 | |
|
160 | 158 | for format_type, formatter in self.formatters.items(): |
|
161 | 159 | if format_type in format_dict: |
|
162 | 160 | # already got it from mimebundle, maybe don't render again. |
|
163 | 161 | # exception: manually registered per-mime renderer |
|
164 | 162 | # check priority: |
|
165 | 163 | # 1. user-registered per-mime formatter |
|
166 | 164 | # 2. mime-bundle (user-registered or repr method) |
|
167 | 165 | # 3. default per-mime formatter (e.g. repr method) |
|
168 | 166 | try: |
|
169 | 167 | formatter.lookup(obj) |
|
170 | 168 | except KeyError: |
|
171 | 169 | # no special formatter, use mime-bundle-provided value |
|
172 | 170 | continue |
|
173 | 171 | if include and format_type not in include: |
|
174 | 172 | continue |
|
175 | 173 | if exclude and format_type in exclude: |
|
176 | 174 | continue |
|
177 | 175 | |
|
178 | 176 | md = None |
|
179 | 177 | try: |
|
180 | 178 | data = formatter(obj) |
|
181 | 179 | except: |
|
182 | 180 | # FIXME: log the exception |
|
183 | 181 | raise |
|
184 | 182 | |
|
185 | 183 | # formatters can return raw data or (data, metadata) |
|
186 | 184 | if isinstance(data, tuple) and len(data) == 2: |
|
187 | 185 | data, md = data |
|
188 | 186 | |
|
189 | 187 | if data is not None: |
|
190 | 188 | format_dict[format_type] = data |
|
191 | 189 | if md is not None: |
|
192 | 190 | md_dict[format_type] = md |
|
193 | 191 | return format_dict, md_dict |
|
194 | 192 | |
|
195 | 193 | @property |
|
196 | 194 | def format_types(self): |
|
197 | 195 | """Return the format types (MIME types) of the active formatters.""" |
|
198 | 196 | return list(self.formatters.keys()) |
|
199 | 197 | |
|
200 | 198 | |
|
201 | 199 | #----------------------------------------------------------------------------- |
|
202 | 200 | # Formatters for specific format types (text, html, svg, etc.) |
|
203 | 201 | #----------------------------------------------------------------------------- |
|
204 | 202 | |
|
205 | 203 | |
|
206 | 204 | def _safe_repr(obj): |
|
207 | 205 | """Try to return a repr of an object |
|
208 | 206 | |
|
209 | 207 | always returns a string, at least. |
|
210 | 208 | """ |
|
211 | 209 | try: |
|
212 | 210 | return repr(obj) |
|
213 | 211 | except Exception as e: |
|
214 | 212 | return "un-repr-able object (%r)" % e |
|
215 | 213 | |
|
216 | 214 | |
|
217 | 215 | class FormatterWarning(UserWarning): |
|
218 | 216 | """Warning class for errors in formatters""" |
|
219 | 217 | |
|
220 | 218 | @decorator |
|
221 | 219 | def catch_format_error(method, self, *args, **kwargs): |
|
222 | 220 | """show traceback on failed format call""" |
|
223 | 221 | try: |
|
224 | 222 | r = method(self, *args, **kwargs) |
|
225 | 223 | except NotImplementedError: |
|
226 | 224 | # don't warn on NotImplementedErrors |
|
227 | 225 | return self._check_return(None, args[0]) |
|
228 | 226 | except Exception: |
|
229 | 227 | exc_info = sys.exc_info() |
|
230 | 228 | ip = get_ipython() |
|
231 | 229 | if ip is not None: |
|
232 | 230 | ip.showtraceback(exc_info) |
|
233 | 231 | else: |
|
234 | 232 | traceback.print_exception(*exc_info) |
|
235 | 233 | return self._check_return(None, args[0]) |
|
236 | 234 | return self._check_return(r, args[0]) |
|
237 | 235 | |
|
238 | 236 | |
|
239 | 237 | class FormatterABC(metaclass=abc.ABCMeta): |
|
240 | 238 | """ Abstract base class for Formatters. |
|
241 | 239 | |
|
242 | 240 | A formatter is a callable class that is responsible for computing the |
|
243 | 241 | raw format data for a particular format type (MIME type). For example, |
|
244 | 242 | an HTML formatter would have a format type of `text/html` and would return |
|
245 | 243 | the HTML representation of the object when called. |
|
246 | 244 | """ |
|
247 | 245 | |
|
248 | 246 | # The format type of the data returned, usually a MIME type. |
|
249 | 247 | format_type = 'text/plain' |
|
250 | 248 | |
|
251 | 249 | # Is the formatter enabled... |
|
252 | 250 | enabled = True |
|
253 | 251 | |
|
254 | 252 | @abc.abstractmethod |
|
255 | 253 | def __call__(self, obj): |
|
256 | 254 | """Return a JSON'able representation of the object. |
|
257 | 255 | |
|
258 | 256 | If the object cannot be formatted by this formatter, |
|
259 | 257 | warn and return None. |
|
260 | 258 | """ |
|
261 | 259 | return repr(obj) |
|
262 | 260 | |
|
263 | 261 | |
|
264 | 262 | def _mod_name_key(typ): |
|
265 | 263 | """Return a (__module__, __name__) tuple for a type. |
|
266 | 264 | |
|
267 | 265 | Used as key in Formatter.deferred_printers. |
|
268 | 266 | """ |
|
269 | 267 | module = getattr(typ, '__module__', None) |
|
270 | 268 | name = getattr(typ, '__name__', None) |
|
271 | 269 | return (module, name) |
|
272 | 270 | |
|
273 | 271 | |
|
274 | 272 | def _get_type(obj): |
|
275 | 273 | """Return the type of an instance (old and new-style)""" |
|
276 | 274 | return getattr(obj, '__class__', None) or type(obj) |
|
277 | 275 | |
|
278 | 276 | |
|
279 | 277 | _raise_key_error = Sentinel('_raise_key_error', __name__, |
|
280 | 278 | """ |
|
281 | 279 | Special value to raise a KeyError |
|
282 | 280 | |
|
283 | 281 | Raise KeyError in `BaseFormatter.pop` if passed as the default value to `pop` |
|
284 | 282 | """) |
|
285 | 283 | |
|
286 | 284 | |
|
287 | 285 | class BaseFormatter(Configurable): |
|
288 | 286 | """A base formatter class that is configurable. |
|
289 | 287 | |
|
290 | 288 | This formatter should usually be used as the base class of all formatters. |
|
291 | 289 | It is a traited :class:`Configurable` class and includes an extensible |
|
292 | 290 | API for users to determine how their objects are formatted. The following |
|
293 | 291 | logic is used to find a function to format an given object. |
|
294 | 292 | |
|
295 | 293 | 1. The object is introspected to see if it has a method with the name |
|
296 | 294 | :attr:`print_method`. If is does, that object is passed to that method |
|
297 | 295 | for formatting. |
|
298 | 296 | 2. If no print method is found, three internal dictionaries are consulted |
|
299 | 297 | to find print method: :attr:`singleton_printers`, :attr:`type_printers` |
|
300 | 298 | and :attr:`deferred_printers`. |
|
301 | 299 | |
|
302 | 300 | Users should use these dictionaries to register functions that will be |
|
303 | 301 | used to compute the format data for their objects (if those objects don't |
|
304 | 302 | have the special print methods). The easiest way of using these |
|
305 | 303 | dictionaries is through the :meth:`for_type` and :meth:`for_type_by_name` |
|
306 | 304 | methods. |
|
307 | 305 | |
|
308 | 306 | If no function/callable is found to compute the format data, ``None`` is |
|
309 | 307 | returned and this format type is not used. |
|
310 | 308 | """ |
|
311 | 309 | |
|
312 | 310 | format_type = Unicode('text/plain') |
|
313 | 311 | _return_type = str |
|
314 | 312 | |
|
315 | 313 | enabled = Bool(True).tag(config=True) |
|
316 | 314 | |
|
317 | 315 | print_method = ObjectName('__repr__') |
|
318 | 316 | |
|
319 | 317 | # The singleton printers. |
|
320 | 318 | # Maps the IDs of the builtin singleton objects to the format functions. |
|
321 | 319 | singleton_printers = Dict().tag(config=True) |
|
322 | 320 | |
|
323 | 321 | # The type-specific printers. |
|
324 | 322 | # Map type objects to the format functions. |
|
325 | 323 | type_printers = Dict().tag(config=True) |
|
326 | 324 | |
|
327 | 325 | # The deferred-import type-specific printers. |
|
328 | 326 | # Map (modulename, classname) pairs to the format functions. |
|
329 | 327 | deferred_printers = Dict().tag(config=True) |
|
330 | 328 | |
|
331 | 329 | @catch_format_error |
|
332 | 330 | def __call__(self, obj): |
|
333 | 331 | """Compute the format for an object.""" |
|
334 | 332 | if self.enabled: |
|
335 | 333 | # lookup registered printer |
|
336 | 334 | try: |
|
337 | 335 | printer = self.lookup(obj) |
|
338 | 336 | except KeyError: |
|
339 | 337 | pass |
|
340 | 338 | else: |
|
341 | 339 | return printer(obj) |
|
342 | 340 | # Finally look for special method names |
|
343 | 341 | method = get_real_method(obj, self.print_method) |
|
344 | 342 | if method is not None: |
|
345 | 343 | return method() |
|
346 | 344 | return None |
|
347 | 345 | else: |
|
348 | 346 | return None |
|
349 | 347 | |
|
350 | 348 | def __contains__(self, typ): |
|
351 | 349 | """map in to lookup_by_type""" |
|
352 | 350 | try: |
|
353 | 351 | self.lookup_by_type(typ) |
|
354 | 352 | except KeyError: |
|
355 | 353 | return False |
|
356 | 354 | else: |
|
357 | 355 | return True |
|
358 | 356 | |
|
359 | 357 | def _check_return(self, r, obj): |
|
360 | 358 | """Check that a return value is appropriate |
|
361 | 359 | |
|
362 | 360 | Return the value if so, None otherwise, warning if invalid. |
|
363 | 361 | """ |
|
364 | 362 | if r is None or isinstance(r, self._return_type) or \ |
|
365 | 363 | (isinstance(r, tuple) and r and isinstance(r[0], self._return_type)): |
|
366 | 364 | return r |
|
367 | 365 | else: |
|
368 | 366 | warnings.warn( |
|
369 | 367 | "%s formatter returned invalid type %s (expected %s) for object: %s" % \ |
|
370 | 368 | (self.format_type, type(r), self._return_type, _safe_repr(obj)), |
|
371 | 369 | FormatterWarning |
|
372 | 370 | ) |
|
373 | 371 | |
|
374 | 372 | def lookup(self, obj): |
|
375 | 373 | """Look up the formatter for a given instance. |
|
376 | 374 | |
|
377 | 375 | Parameters |
|
378 | 376 | ---------- |
|
379 | 377 |
obj |
|
380 | 378 | |
|
381 | 379 | Returns |
|
382 | 380 | ------- |
|
383 | 381 | f : callable |
|
384 | 382 | The registered formatting callable for the type. |
|
385 | 383 | |
|
386 | 384 | Raises |
|
387 | 385 | ------ |
|
388 | 386 | KeyError if the type has not been registered. |
|
389 | 387 | """ |
|
390 | 388 | # look for singleton first |
|
391 | 389 | obj_id = id(obj) |
|
392 | 390 | if obj_id in self.singleton_printers: |
|
393 | 391 | return self.singleton_printers[obj_id] |
|
394 | 392 | # then lookup by type |
|
395 | 393 | return self.lookup_by_type(_get_type(obj)) |
|
396 | 394 | |
|
397 | 395 | def lookup_by_type(self, typ): |
|
398 | 396 | """Look up the registered formatter for a type. |
|
399 | 397 | |
|
400 | 398 | Parameters |
|
401 | 399 | ---------- |
|
402 | 400 |
typ |
|
403 | 401 | |
|
404 | 402 | Returns |
|
405 | 403 | ------- |
|
406 | 404 | f : callable |
|
407 | 405 | The registered formatting callable for the type. |
|
408 | 406 | |
|
409 | 407 | Raises |
|
410 | 408 | ------ |
|
411 | 409 | KeyError if the type has not been registered. |
|
412 | 410 | """ |
|
413 | 411 | if isinstance(typ, str): |
|
414 | 412 | typ_key = tuple(typ.rsplit('.',1)) |
|
415 | 413 | if typ_key not in self.deferred_printers: |
|
416 | 414 | # We may have it cached in the type map. We will have to |
|
417 | 415 | # iterate over all of the types to check. |
|
418 | 416 | for cls in self.type_printers: |
|
419 | 417 | if _mod_name_key(cls) == typ_key: |
|
420 | 418 | return self.type_printers[cls] |
|
421 | 419 | else: |
|
422 | 420 | return self.deferred_printers[typ_key] |
|
423 | 421 | else: |
|
424 | 422 | for cls in pretty._get_mro(typ): |
|
425 | 423 | if cls in self.type_printers or self._in_deferred_types(cls): |
|
426 | 424 | return self.type_printers[cls] |
|
427 | 425 | |
|
428 | 426 | # If we have reached here, the lookup failed. |
|
429 | 427 | raise KeyError("No registered printer for {0!r}".format(typ)) |
|
430 | 428 | |
|
431 | 429 | def for_type(self, typ, func=None): |
|
432 | 430 | """Add a format function for a given type. |
|
433 | 431 | |
|
434 | 432 | Parameters |
|
435 | 433 | ---------- |
|
436 | 434 | typ : type or '__module__.__name__' string for a type |
|
437 | 435 | The class of the object that will be formatted using `func`. |
|
438 | 436 | func : callable |
|
439 | 437 | A callable for computing the format data. |
|
440 | 438 | `func` will be called with the object to be formatted, |
|
441 | 439 | and will return the raw data in this formatter's format. |
|
442 | 440 | Subclasses may use a different call signature for the |
|
443 | 441 | `func` argument. |
|
444 | 442 | |
|
445 | 443 | If `func` is None or not specified, there will be no change, |
|
446 | 444 | only returning the current value. |
|
447 | 445 | |
|
448 | 446 | Returns |
|
449 | 447 | ------- |
|
450 | 448 | oldfunc : callable |
|
451 | 449 | The currently registered callable. |
|
452 | 450 | If you are registering a new formatter, |
|
453 | 451 | this will be the previous value (to enable restoring later). |
|
454 | 452 | """ |
|
455 | 453 | # if string given, interpret as 'pkg.module.class_name' |
|
456 | 454 | if isinstance(typ, str): |
|
457 | 455 | type_module, type_name = typ.rsplit('.', 1) |
|
458 | 456 | return self.for_type_by_name(type_module, type_name, func) |
|
459 | 457 | |
|
460 | 458 | try: |
|
461 | 459 | oldfunc = self.lookup_by_type(typ) |
|
462 | 460 | except KeyError: |
|
463 | 461 | oldfunc = None |
|
464 | 462 | |
|
465 | 463 | if func is not None: |
|
466 | 464 | self.type_printers[typ] = func |
|
467 | 465 | |
|
468 | 466 | return oldfunc |
|
469 | 467 | |
|
470 | 468 | def for_type_by_name(self, type_module, type_name, func=None): |
|
471 | 469 | """Add a format function for a type specified by the full dotted |
|
472 | 470 | module and name of the type, rather than the type of the object. |
|
473 | 471 | |
|
474 | 472 | Parameters |
|
475 | 473 | ---------- |
|
476 | 474 | type_module : str |
|
477 | 475 | The full dotted name of the module the type is defined in, like |
|
478 | 476 | ``numpy``. |
|
479 | 477 | type_name : str |
|
480 | 478 | The name of the type (the class name), like ``dtype`` |
|
481 | 479 | func : callable |
|
482 | 480 | A callable for computing the format data. |
|
483 | 481 | `func` will be called with the object to be formatted, |
|
484 | 482 | and will return the raw data in this formatter's format. |
|
485 | 483 | Subclasses may use a different call signature for the |
|
486 | 484 | `func` argument. |
|
487 | 485 | |
|
488 | 486 | If `func` is None or unspecified, there will be no change, |
|
489 | 487 | only returning the current value. |
|
490 | 488 | |
|
491 | 489 | Returns |
|
492 | 490 | ------- |
|
493 | 491 | oldfunc : callable |
|
494 | 492 | The currently registered callable. |
|
495 | 493 | If you are registering a new formatter, |
|
496 | 494 | this will be the previous value (to enable restoring later). |
|
497 | 495 | """ |
|
498 | 496 | key = (type_module, type_name) |
|
499 | 497 | |
|
500 | 498 | try: |
|
501 | 499 | oldfunc = self.lookup_by_type("%s.%s" % key) |
|
502 | 500 | except KeyError: |
|
503 | 501 | oldfunc = None |
|
504 | 502 | |
|
505 | 503 | if func is not None: |
|
506 | 504 | self.deferred_printers[key] = func |
|
507 | 505 | return oldfunc |
|
508 | 506 | |
|
509 | 507 | def pop(self, typ, default=_raise_key_error): |
|
510 | 508 | """Pop a formatter for the given type. |
|
511 | 509 | |
|
512 | 510 | Parameters |
|
513 | 511 | ---------- |
|
514 | 512 | typ : type or '__module__.__name__' string for a type |
|
515 | 513 | default : object |
|
516 | 514 | value to be returned if no formatter is registered for typ. |
|
517 | 515 | |
|
518 | 516 | Returns |
|
519 | 517 | ------- |
|
520 | 518 | obj : object |
|
521 | 519 | The last registered object for the type. |
|
522 | 520 | |
|
523 | 521 | Raises |
|
524 | 522 | ------ |
|
525 | 523 | KeyError if the type is not registered and default is not specified. |
|
526 | 524 | """ |
|
527 | 525 | |
|
528 | 526 | if isinstance(typ, str): |
|
529 | 527 | typ_key = tuple(typ.rsplit('.',1)) |
|
530 | 528 | if typ_key not in self.deferred_printers: |
|
531 | 529 | # We may have it cached in the type map. We will have to |
|
532 | 530 | # iterate over all of the types to check. |
|
533 | 531 | for cls in self.type_printers: |
|
534 | 532 | if _mod_name_key(cls) == typ_key: |
|
535 | 533 | old = self.type_printers.pop(cls) |
|
536 | 534 | break |
|
537 | 535 | else: |
|
538 | 536 | old = default |
|
539 | 537 | else: |
|
540 | 538 | old = self.deferred_printers.pop(typ_key) |
|
541 | 539 | else: |
|
542 | 540 | if typ in self.type_printers: |
|
543 | 541 | old = self.type_printers.pop(typ) |
|
544 | 542 | else: |
|
545 | 543 | old = self.deferred_printers.pop(_mod_name_key(typ), default) |
|
546 | 544 | if old is _raise_key_error: |
|
547 | 545 | raise KeyError("No registered value for {0!r}".format(typ)) |
|
548 | 546 | return old |
|
549 | 547 | |
|
550 | 548 | def _in_deferred_types(self, cls): |
|
551 | 549 | """ |
|
552 | 550 | Check if the given class is specified in the deferred type registry. |
|
553 | 551 | |
|
554 | 552 | Successful matches will be moved to the regular type registry for future use. |
|
555 | 553 | """ |
|
556 | 554 | mod = getattr(cls, '__module__', None) |
|
557 | 555 | name = getattr(cls, '__name__', None) |
|
558 | 556 | key = (mod, name) |
|
559 | 557 | if key in self.deferred_printers: |
|
560 | 558 | # Move the printer over to the regular registry. |
|
561 | 559 | printer = self.deferred_printers.pop(key) |
|
562 | 560 | self.type_printers[cls] = printer |
|
563 | 561 | return True |
|
564 | 562 | return False |
|
565 | 563 | |
|
566 | 564 | |
|
567 | 565 | class PlainTextFormatter(BaseFormatter): |
|
568 | 566 | """The default pretty-printer. |
|
569 | 567 | |
|
570 | 568 | This uses :mod:`IPython.lib.pretty` to compute the format data of |
|
571 | 569 | the object. If the object cannot be pretty printed, :func:`repr` is used. |
|
572 | 570 | See the documentation of :mod:`IPython.lib.pretty` for details on |
|
573 | 571 | how to write pretty printers. Here is a simple example:: |
|
574 | 572 | |
|
575 | 573 | def dtype_pprinter(obj, p, cycle): |
|
576 | 574 | if cycle: |
|
577 | 575 | return p.text('dtype(...)') |
|
578 | 576 | if hasattr(obj, 'fields'): |
|
579 | 577 | if obj.fields is None: |
|
580 | 578 | p.text(repr(obj)) |
|
581 | 579 | else: |
|
582 | 580 | p.begin_group(7, 'dtype([') |
|
583 | 581 | for i, field in enumerate(obj.descr): |
|
584 | 582 | if i > 0: |
|
585 | 583 | p.text(',') |
|
586 | 584 | p.breakable() |
|
587 | 585 | p.pretty(field) |
|
588 | 586 | p.end_group(7, '])') |
|
589 | 587 | """ |
|
590 | 588 | |
|
591 | 589 | # The format type of data returned. |
|
592 | 590 | format_type = Unicode('text/plain') |
|
593 | 591 | |
|
594 | 592 | # This subclass ignores this attribute as it always need to return |
|
595 | 593 | # something. |
|
596 | 594 | enabled = Bool(True).tag(config=False) |
|
597 | 595 | |
|
598 | 596 | max_seq_length = Integer(pretty.MAX_SEQ_LENGTH, |
|
599 | 597 | help="""Truncate large collections (lists, dicts, tuples, sets) to this size. |
|
600 | 598 | |
|
601 | 599 | Set to 0 to disable truncation. |
|
602 | 600 | """ |
|
603 | 601 | ).tag(config=True) |
|
604 | 602 | |
|
605 | 603 | # Look for a _repr_pretty_ methods to use for pretty printing. |
|
606 | 604 | print_method = ObjectName('_repr_pretty_') |
|
607 | 605 | |
|
608 | 606 | # Whether to pretty-print or not. |
|
609 | 607 | pprint = Bool(True).tag(config=True) |
|
610 | 608 | |
|
611 | 609 | # Whether to be verbose or not. |
|
612 | 610 | verbose = Bool(False).tag(config=True) |
|
613 | 611 | |
|
614 | 612 | # The maximum width. |
|
615 | 613 | max_width = Integer(79).tag(config=True) |
|
616 | 614 | |
|
617 | 615 | # The newline character. |
|
618 | 616 | newline = Unicode('\n').tag(config=True) |
|
619 | 617 | |
|
620 | 618 | # format-string for pprinting floats |
|
621 | 619 | float_format = Unicode('%r') |
|
622 | 620 | # setter for float precision, either int or direct format-string |
|
623 | 621 | float_precision = CUnicode('').tag(config=True) |
|
624 | 622 | |
|
625 | 623 | @observe('float_precision') |
|
626 | 624 | def _float_precision_changed(self, change): |
|
627 | 625 | """float_precision changed, set float_format accordingly. |
|
628 | 626 | |
|
629 | 627 | float_precision can be set by int or str. |
|
630 | 628 | This will set float_format, after interpreting input. |
|
631 | 629 | If numpy has been imported, numpy print precision will also be set. |
|
632 | 630 | |
|
633 | 631 | integer `n` sets format to '%.nf', otherwise, format set directly. |
|
634 | 632 | |
|
635 | 633 | An empty string returns to defaults (repr for float, 8 for numpy). |
|
636 | 634 | |
|
637 | 635 | This parameter can be set via the '%precision' magic. |
|
638 | 636 | """ |
|
639 | ||
|
640 | 637 | new = change['new'] |
|
641 | 638 | if '%' in new: |
|
642 | 639 | # got explicit format string |
|
643 | 640 | fmt = new |
|
644 | 641 | try: |
|
645 | 642 | fmt%3.14159 |
|
646 | 643 | except Exception as e: |
|
647 | 644 | raise ValueError("Precision must be int or format string, not %r"%new) from e |
|
648 | 645 | elif new: |
|
649 | 646 | # otherwise, should be an int |
|
650 | 647 | try: |
|
651 | 648 | i = int(new) |
|
652 | 649 | assert i >= 0 |
|
653 | 650 | except ValueError as e: |
|
654 | 651 | raise ValueError("Precision must be int or format string, not %r"%new) from e |
|
655 | 652 | except AssertionError as e: |
|
656 | 653 | raise ValueError("int precision must be non-negative, not %r"%i) from e |
|
657 | 654 | |
|
658 | 655 | fmt = '%%.%if'%i |
|
659 | 656 | if 'numpy' in sys.modules: |
|
660 | 657 | # set numpy precision if it has been imported |
|
661 | 658 | import numpy |
|
662 | 659 | numpy.set_printoptions(precision=i) |
|
663 | 660 | else: |
|
664 | 661 | # default back to repr |
|
665 | 662 | fmt = '%r' |
|
666 | 663 | if 'numpy' in sys.modules: |
|
667 | 664 | import numpy |
|
668 | 665 | # numpy default is 8 |
|
669 | 666 | numpy.set_printoptions(precision=8) |
|
670 | 667 | self.float_format = fmt |
|
671 | 668 | |
|
672 | 669 | # Use the default pretty printers from IPython.lib.pretty. |
|
673 | 670 | @default('singleton_printers') |
|
674 | 671 | def _singleton_printers_default(self): |
|
675 | 672 | return pretty._singleton_pprinters.copy() |
|
676 | 673 | |
|
677 | 674 | @default('type_printers') |
|
678 | 675 | def _type_printers_default(self): |
|
679 | 676 | d = pretty._type_pprinters.copy() |
|
680 | 677 | d[float] = lambda obj,p,cycle: p.text(self.float_format%obj) |
|
678 | # if NumPy is used, set precision for its float64 type | |
|
679 | if "numpy" in sys.modules: | |
|
680 | import numpy | |
|
681 | ||
|
682 | d[numpy.float64] = lambda obj, p, cycle: p.text(self.float_format % obj) | |
|
681 | 683 | return d |
|
682 | 684 | |
|
683 | 685 | @default('deferred_printers') |
|
684 | 686 | def _deferred_printers_default(self): |
|
685 | 687 | return pretty._deferred_type_pprinters.copy() |
|
686 | 688 | |
|
687 | 689 | #### FormatterABC interface #### |
|
688 | 690 | |
|
689 | 691 | @catch_format_error |
|
690 | 692 | def __call__(self, obj): |
|
691 | 693 | """Compute the pretty representation of the object.""" |
|
692 | 694 | if not self.pprint: |
|
693 | 695 | return repr(obj) |
|
694 | 696 | else: |
|
695 | 697 | stream = StringIO() |
|
696 | 698 | printer = pretty.RepresentationPrinter(stream, self.verbose, |
|
697 | 699 | self.max_width, self.newline, |
|
698 | 700 | max_seq_length=self.max_seq_length, |
|
699 | 701 | singleton_pprinters=self.singleton_printers, |
|
700 | 702 | type_pprinters=self.type_printers, |
|
701 | 703 | deferred_pprinters=self.deferred_printers) |
|
702 | 704 | printer.pretty(obj) |
|
703 | 705 | printer.flush() |
|
704 | 706 | return stream.getvalue() |
|
705 | 707 | |
|
706 | 708 | |
|
707 | 709 | class HTMLFormatter(BaseFormatter): |
|
708 | 710 | """An HTML formatter. |
|
709 | 711 | |
|
710 | 712 | To define the callables that compute the HTML representation of your |
|
711 | 713 | objects, define a :meth:`_repr_html_` method or use the :meth:`for_type` |
|
712 | 714 | or :meth:`for_type_by_name` methods to register functions that handle |
|
713 | 715 | this. |
|
714 | 716 | |
|
715 | 717 | The return value of this formatter should be a valid HTML snippet that |
|
716 | 718 | could be injected into an existing DOM. It should *not* include the |
|
717 | 719 | ```<html>`` or ```<body>`` tags. |
|
718 | 720 | """ |
|
719 | 721 | format_type = Unicode('text/html') |
|
720 | 722 | |
|
721 | 723 | print_method = ObjectName('_repr_html_') |
|
722 | 724 | |
|
723 | 725 | |
|
724 | 726 | class MarkdownFormatter(BaseFormatter): |
|
725 | 727 | """A Markdown formatter. |
|
726 | 728 | |
|
727 | 729 | To define the callables that compute the Markdown representation of your |
|
728 | 730 | objects, define a :meth:`_repr_markdown_` method or use the :meth:`for_type` |
|
729 | 731 | or :meth:`for_type_by_name` methods to register functions that handle |
|
730 | 732 | this. |
|
731 | 733 | |
|
732 | 734 | The return value of this formatter should be a valid Markdown. |
|
733 | 735 | """ |
|
734 | 736 | format_type = Unicode('text/markdown') |
|
735 | 737 | |
|
736 | 738 | print_method = ObjectName('_repr_markdown_') |
|
737 | 739 | |
|
738 | 740 | class SVGFormatter(BaseFormatter): |
|
739 | 741 | """An SVG formatter. |
|
740 | 742 | |
|
741 | 743 | To define the callables that compute the SVG representation of your |
|
742 | 744 | objects, define a :meth:`_repr_svg_` method or use the :meth:`for_type` |
|
743 | 745 | or :meth:`for_type_by_name` methods to register functions that handle |
|
744 | 746 | this. |
|
745 | 747 | |
|
746 | 748 | The return value of this formatter should be valid SVG enclosed in |
|
747 | 749 | ```<svg>``` tags, that could be injected into an existing DOM. It should |
|
748 | 750 | *not* include the ```<html>`` or ```<body>`` tags. |
|
749 | 751 | """ |
|
750 | 752 | format_type = Unicode('image/svg+xml') |
|
751 | 753 | |
|
752 | 754 | print_method = ObjectName('_repr_svg_') |
|
753 | 755 | |
|
754 | 756 | |
|
755 | 757 | class PNGFormatter(BaseFormatter): |
|
756 | 758 | """A PNG formatter. |
|
757 | 759 | |
|
758 | 760 | To define the callables that compute the PNG representation of your |
|
759 | 761 | objects, define a :meth:`_repr_png_` method or use the :meth:`for_type` |
|
760 | 762 | or :meth:`for_type_by_name` methods to register functions that handle |
|
761 | 763 | this. |
|
762 | 764 | |
|
763 | 765 | The return value of this formatter should be raw PNG data, *not* |
|
764 | 766 | base64 encoded. |
|
765 | 767 | """ |
|
766 | 768 | format_type = Unicode('image/png') |
|
767 | 769 | |
|
768 | 770 | print_method = ObjectName('_repr_png_') |
|
769 | 771 | |
|
770 | 772 | _return_type = (bytes, str) |
|
771 | 773 | |
|
772 | 774 | |
|
773 | 775 | class JPEGFormatter(BaseFormatter): |
|
774 | 776 | """A JPEG formatter. |
|
775 | 777 | |
|
776 | 778 | To define the callables that compute the JPEG representation of your |
|
777 | 779 | objects, define a :meth:`_repr_jpeg_` method or use the :meth:`for_type` |
|
778 | 780 | or :meth:`for_type_by_name` methods to register functions that handle |
|
779 | 781 | this. |
|
780 | 782 | |
|
781 | 783 | The return value of this formatter should be raw JPEG data, *not* |
|
782 | 784 | base64 encoded. |
|
783 | 785 | """ |
|
784 | 786 | format_type = Unicode('image/jpeg') |
|
785 | 787 | |
|
786 | 788 | print_method = ObjectName('_repr_jpeg_') |
|
787 | 789 | |
|
788 | 790 | _return_type = (bytes, str) |
|
789 | 791 | |
|
790 | 792 | |
|
791 | 793 | class LatexFormatter(BaseFormatter): |
|
792 | 794 | """A LaTeX formatter. |
|
793 | 795 | |
|
794 | 796 | To define the callables that compute the LaTeX representation of your |
|
795 | 797 | objects, define a :meth:`_repr_latex_` method or use the :meth:`for_type` |
|
796 | 798 | or :meth:`for_type_by_name` methods to register functions that handle |
|
797 | 799 | this. |
|
798 | 800 | |
|
799 | 801 | The return value of this formatter should be a valid LaTeX equation, |
|
800 | 802 | enclosed in either ```$```, ```$$``` or another LaTeX equation |
|
801 | 803 | environment. |
|
802 | 804 | """ |
|
803 | 805 | format_type = Unicode('text/latex') |
|
804 | 806 | |
|
805 | 807 | print_method = ObjectName('_repr_latex_') |
|
806 | 808 | |
|
807 | 809 | |
|
808 | 810 | class JSONFormatter(BaseFormatter): |
|
809 | 811 | """A JSON string formatter. |
|
810 | 812 | |
|
811 | 813 | To define the callables that compute the JSONable representation of |
|
812 | 814 | your objects, define a :meth:`_repr_json_` method or use the :meth:`for_type` |
|
813 | 815 | or :meth:`for_type_by_name` methods to register functions that handle |
|
814 | 816 | this. |
|
815 | 817 | |
|
816 | 818 | The return value of this formatter should be a JSONable list or dict. |
|
817 | 819 | JSON scalars (None, number, string) are not allowed, only dict or list containers. |
|
818 | 820 | """ |
|
819 | 821 | format_type = Unicode('application/json') |
|
820 | 822 | _return_type = (list, dict) |
|
821 | 823 | |
|
822 | 824 | print_method = ObjectName('_repr_json_') |
|
823 | 825 | |
|
824 | 826 | def _check_return(self, r, obj): |
|
825 | 827 | """Check that a return value is appropriate |
|
826 | 828 | |
|
827 | 829 | Return the value if so, None otherwise, warning if invalid. |
|
828 | 830 | """ |
|
829 | 831 | if r is None: |
|
830 | 832 | return |
|
831 | 833 | md = None |
|
832 | 834 | if isinstance(r, tuple): |
|
833 | 835 | # unpack data, metadata tuple for type checking on first element |
|
834 | 836 | r, md = r |
|
835 | 837 | |
|
836 | 838 | # handle deprecated JSON-as-string form from IPython < 3 |
|
837 | 839 | if isinstance(r, str): |
|
838 | 840 | warnings.warn("JSON expects JSONable list/dict containers, not JSON strings", |
|
839 | 841 | FormatterWarning) |
|
840 | 842 | r = json.loads(r) |
|
841 | 843 | |
|
842 | 844 | if md is not None: |
|
843 | 845 | # put the tuple back together |
|
844 | 846 | r = (r, md) |
|
845 | 847 | return super(JSONFormatter, self)._check_return(r, obj) |
|
846 | 848 | |
|
847 | 849 | |
|
848 | 850 | class JavascriptFormatter(BaseFormatter): |
|
849 | 851 | """A Javascript formatter. |
|
850 | 852 | |
|
851 | 853 | To define the callables that compute the Javascript representation of |
|
852 | 854 | your objects, define a :meth:`_repr_javascript_` method or use the |
|
853 | 855 | :meth:`for_type` or :meth:`for_type_by_name` methods to register functions |
|
854 | 856 | that handle this. |
|
855 | 857 | |
|
856 | 858 | The return value of this formatter should be valid Javascript code and |
|
857 | 859 | should *not* be enclosed in ```<script>``` tags. |
|
858 | 860 | """ |
|
859 | 861 | format_type = Unicode('application/javascript') |
|
860 | 862 | |
|
861 | 863 | print_method = ObjectName('_repr_javascript_') |
|
862 | 864 | |
|
863 | 865 | |
|
864 | 866 | class PDFFormatter(BaseFormatter): |
|
865 | 867 | """A PDF formatter. |
|
866 | 868 | |
|
867 | 869 | To define the callables that compute the PDF representation of your |
|
868 | 870 | objects, define a :meth:`_repr_pdf_` method or use the :meth:`for_type` |
|
869 | 871 | or :meth:`for_type_by_name` methods to register functions that handle |
|
870 | 872 | this. |
|
871 | 873 | |
|
872 | 874 | The return value of this formatter should be raw PDF data, *not* |
|
873 | 875 | base64 encoded. |
|
874 | 876 | """ |
|
875 | 877 | format_type = Unicode('application/pdf') |
|
876 | 878 | |
|
877 | 879 | print_method = ObjectName('_repr_pdf_') |
|
878 | 880 | |
|
879 | 881 | _return_type = (bytes, str) |
|
880 | 882 | |
|
881 | 883 | class IPythonDisplayFormatter(BaseFormatter): |
|
882 | 884 | """An escape-hatch Formatter for objects that know how to display themselves. |
|
883 | 885 | |
|
884 | 886 | To define the callables that compute the representation of your |
|
885 | 887 | objects, define a :meth:`_ipython_display_` method or use the :meth:`for_type` |
|
886 | 888 | or :meth:`for_type_by_name` methods to register functions that handle |
|
887 | 889 | this. Unlike mime-type displays, this method should not return anything, |
|
888 | 890 | instead calling any appropriate display methods itself. |
|
889 | 891 | |
|
890 | 892 | This display formatter has highest priority. |
|
891 | 893 | If it fires, no other display formatter will be called. |
|
892 | 894 | |
|
893 | 895 | Prior to IPython 6.1, `_ipython_display_` was the only way to display custom mime-types |
|
894 | 896 | without registering a new Formatter. |
|
895 | 897 | |
|
896 | 898 | IPython 6.1 introduces `_repr_mimebundle_` for displaying custom mime-types, |
|
897 | 899 | so `_ipython_display_` should only be used for objects that require unusual |
|
898 | 900 | display patterns, such as multiple display calls. |
|
899 | 901 | """ |
|
900 | 902 | print_method = ObjectName('_ipython_display_') |
|
901 | 903 | _return_type = (type(None), bool) |
|
902 | 904 | |
|
903 | 905 | @catch_format_error |
|
904 | 906 | def __call__(self, obj): |
|
905 | 907 | """Compute the format for an object.""" |
|
906 | 908 | if self.enabled: |
|
907 | 909 | # lookup registered printer |
|
908 | 910 | try: |
|
909 | 911 | printer = self.lookup(obj) |
|
910 | 912 | except KeyError: |
|
911 | 913 | pass |
|
912 | 914 | else: |
|
913 | 915 | printer(obj) |
|
914 | 916 | return True |
|
915 | 917 | # Finally look for special method names |
|
916 | 918 | method = get_real_method(obj, self.print_method) |
|
917 | 919 | if method is not None: |
|
918 | 920 | method() |
|
919 | 921 | return True |
|
920 | 922 | |
|
921 | 923 | |
|
922 | 924 | class MimeBundleFormatter(BaseFormatter): |
|
923 | 925 | """A Formatter for arbitrary mime-types. |
|
924 | 926 | |
|
925 | 927 | Unlike other `_repr_<mimetype>_` methods, |
|
926 | 928 | `_repr_mimebundle_` should return mime-bundle data, |
|
927 | 929 | either the mime-keyed `data` dictionary or the tuple `(data, metadata)`. |
|
928 | 930 | Any mime-type is valid. |
|
929 | 931 | |
|
930 | 932 | To define the callables that compute the mime-bundle representation of your |
|
931 | 933 | objects, define a :meth:`_repr_mimebundle_` method or use the :meth:`for_type` |
|
932 | 934 | or :meth:`for_type_by_name` methods to register functions that handle |
|
933 | 935 | this. |
|
934 | 936 | |
|
935 | 937 | .. versionadded:: 6.1 |
|
936 | 938 | """ |
|
937 | 939 | print_method = ObjectName('_repr_mimebundle_') |
|
938 | 940 | _return_type = dict |
|
939 | 941 | |
|
940 | 942 | def _check_return(self, r, obj): |
|
941 | 943 | r = super(MimeBundleFormatter, self)._check_return(r, obj) |
|
942 | 944 | # always return (data, metadata): |
|
943 | 945 | if r is None: |
|
944 | 946 | return {}, {} |
|
945 | 947 | if not isinstance(r, tuple): |
|
946 | 948 | return r, {} |
|
947 | 949 | return r |
|
948 | 950 | |
|
949 | 951 | @catch_format_error |
|
950 | 952 | def __call__(self, obj, include=None, exclude=None): |
|
951 | 953 | """Compute the format for an object. |
|
952 | 954 | |
|
953 | 955 | Identical to parent's method but we pass extra parameters to the method. |
|
954 | 956 | |
|
955 | 957 | Unlike other _repr_*_ `_repr_mimebundle_` should allow extra kwargs, in |
|
956 | 958 | particular `include` and `exclude`. |
|
957 | 959 | """ |
|
958 | 960 | if self.enabled: |
|
959 | 961 | # lookup registered printer |
|
960 | 962 | try: |
|
961 | 963 | printer = self.lookup(obj) |
|
962 | 964 | except KeyError: |
|
963 | 965 | pass |
|
964 | 966 | else: |
|
965 | 967 | return printer(obj) |
|
966 | 968 | # Finally look for special method names |
|
967 | 969 | method = get_real_method(obj, self.print_method) |
|
968 | 970 | |
|
969 | 971 | if method is not None: |
|
970 | 972 | return method(include=include, exclude=exclude) |
|
971 | 973 | return None |
|
972 | 974 | else: |
|
973 | 975 | return None |
|
974 | 976 | |
|
975 | 977 | |
|
976 | 978 | FormatterABC.register(BaseFormatter) |
|
977 | 979 | FormatterABC.register(PlainTextFormatter) |
|
978 | 980 | FormatterABC.register(HTMLFormatter) |
|
979 | 981 | FormatterABC.register(MarkdownFormatter) |
|
980 | 982 | FormatterABC.register(SVGFormatter) |
|
981 | 983 | FormatterABC.register(PNGFormatter) |
|
982 | 984 | FormatterABC.register(PDFFormatter) |
|
983 | 985 | FormatterABC.register(JPEGFormatter) |
|
984 | 986 | FormatterABC.register(LatexFormatter) |
|
985 | 987 | FormatterABC.register(JSONFormatter) |
|
986 | 988 | FormatterABC.register(JavascriptFormatter) |
|
987 | 989 | FormatterABC.register(IPythonDisplayFormatter) |
|
988 | 990 | FormatterABC.register(MimeBundleFormatter) |
|
989 | 991 | |
|
990 | 992 | |
|
991 | 993 | def format_display_data(obj, include=None, exclude=None): |
|
992 | 994 | """Return a format data dict for an object. |
|
993 | 995 | |
|
994 | 996 | By default all format types will be computed. |
|
995 | 997 | |
|
996 | 998 | Parameters |
|
997 | 999 | ---------- |
|
998 | 1000 | obj : object |
|
999 | 1001 | The Python object whose format data will be computed. |
|
1000 | 1002 | |
|
1001 | 1003 | Returns |
|
1002 | 1004 | ------- |
|
1003 | 1005 | format_dict : dict |
|
1004 | 1006 | A dictionary of key/value pairs, one or each format that was |
|
1005 | 1007 | generated for the object. The keys are the format types, which |
|
1006 | 1008 | will usually be MIME type strings and the values and JSON'able |
|
1007 | 1009 | data structure containing the raw data for the representation in |
|
1008 | 1010 | that format. |
|
1009 | 1011 | include : list or tuple, optional |
|
1010 | 1012 | A list of format type strings (MIME types) to include in the |
|
1011 | 1013 | format data dict. If this is set *only* the format types included |
|
1012 | 1014 | in this list will be computed. |
|
1013 | 1015 | exclude : list or tuple, optional |
|
1014 | 1016 | A list of format type string (MIME types) to exclude in the format |
|
1015 | 1017 | data dict. If this is set all format types will be computed, |
|
1016 | 1018 | except for those included in this argument. |
|
1017 | 1019 | """ |
|
1018 | 1020 | from .interactiveshell import InteractiveShell |
|
1019 | 1021 | |
|
1020 | 1022 | return InteractiveShell.instance().display_formatter.format( |
|
1021 | 1023 | obj, |
|
1022 | 1024 | include, |
|
1023 | 1025 | exclude |
|
1024 | 1026 | ) |
@@ -1,897 +1,907 b'' | |||
|
1 | 1 | """ History related magics and functionality """ |
|
2 | 2 | |
|
3 | 3 | # Copyright (c) IPython Development Team. |
|
4 | 4 | # Distributed under the terms of the Modified BSD License. |
|
5 | 5 | |
|
6 | 6 | |
|
7 | 7 | import atexit |
|
8 | 8 | import datetime |
|
9 | 9 | from pathlib import Path |
|
10 | 10 | import re |
|
11 | 11 | import sqlite3 |
|
12 | 12 | import threading |
|
13 | 13 | |
|
14 | 14 | from traitlets.config.configurable import LoggingConfigurable |
|
15 | 15 | from decorator import decorator |
|
16 | 16 | from IPython.utils.decorators import undoc |
|
17 | 17 | from IPython.paths import locate_profile |
|
18 | 18 | from traitlets import ( |
|
19 | 19 | Any, |
|
20 | 20 | Bool, |
|
21 | 21 | Dict, |
|
22 | 22 | Instance, |
|
23 | 23 | Integer, |
|
24 | 24 | List, |
|
25 | 25 | Unicode, |
|
26 | 26 | Union, |
|
27 | 27 | TraitError, |
|
28 | 28 | default, |
|
29 | 29 | observe, |
|
30 | 30 | ) |
|
31 | 31 | |
|
32 | 32 | #----------------------------------------------------------------------------- |
|
33 | 33 | # Classes and functions |
|
34 | 34 | #----------------------------------------------------------------------------- |
|
35 | 35 | |
|
36 | 36 | @undoc |
|
37 | 37 | class DummyDB(object): |
|
38 | 38 | """Dummy DB that will act as a black hole for history. |
|
39 | 39 | |
|
40 | 40 | Only used in the absence of sqlite""" |
|
41 | 41 | def execute(*args, **kwargs): |
|
42 | 42 | return [] |
|
43 | 43 | |
|
44 | 44 | def commit(self, *args, **kwargs): |
|
45 | 45 | pass |
|
46 | 46 | |
|
47 | 47 | def __enter__(self, *args, **kwargs): |
|
48 | 48 | pass |
|
49 | 49 | |
|
50 | 50 | def __exit__(self, *args, **kwargs): |
|
51 | 51 | pass |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | @decorator |
|
55 | 55 | def only_when_enabled(f, self, *a, **kw): |
|
56 | 56 | """Decorator: return an empty list in the absence of sqlite.""" |
|
57 | 57 | if not self.enabled: |
|
58 | 58 | return [] |
|
59 | 59 | else: |
|
60 | 60 | return f(self, *a, **kw) |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | # use 16kB as threshold for whether a corrupt history db should be saved |
|
64 | 64 | # that should be at least 100 entries or so |
|
65 | 65 | _SAVE_DB_SIZE = 16384 |
|
66 | 66 | |
|
67 | 67 | @decorator |
|
68 | 68 | def catch_corrupt_db(f, self, *a, **kw): |
|
69 | 69 | """A decorator which wraps HistoryAccessor method calls to catch errors from |
|
70 | 70 | a corrupt SQLite database, move the old database out of the way, and create |
|
71 | 71 | a new one. |
|
72 | 72 | |
|
73 | 73 | We avoid clobbering larger databases because this may be triggered due to filesystem issues, |
|
74 | 74 | not just a corrupt file. |
|
75 | 75 | """ |
|
76 | 76 | try: |
|
77 | 77 | return f(self, *a, **kw) |
|
78 | 78 | except (sqlite3.DatabaseError, sqlite3.OperationalError) as e: |
|
79 | 79 | self._corrupt_db_counter += 1 |
|
80 | 80 | self.log.error("Failed to open SQLite history %s (%s).", self.hist_file, e) |
|
81 | 81 | if self.hist_file != ':memory:': |
|
82 | 82 | if self._corrupt_db_counter > self._corrupt_db_limit: |
|
83 | 83 | self.hist_file = ':memory:' |
|
84 | 84 | self.log.error("Failed to load history too many times, history will not be saved.") |
|
85 | 85 | elif self.hist_file.is_file(): |
|
86 | 86 | # move the file out of the way |
|
87 | 87 | base = str(self.hist_file.parent / self.hist_file.stem) |
|
88 | 88 | ext = self.hist_file.suffix |
|
89 | 89 | size = self.hist_file.stat().st_size |
|
90 | 90 | if size >= _SAVE_DB_SIZE: |
|
91 | 91 | # if there's significant content, avoid clobbering |
|
92 | 92 | now = datetime.datetime.now().isoformat().replace(':', '.') |
|
93 | 93 | newpath = base + '-corrupt-' + now + ext |
|
94 | 94 | # don't clobber previous corrupt backups |
|
95 | 95 | for i in range(100): |
|
96 | 96 | if not Path(newpath).exists(): |
|
97 | 97 | break |
|
98 | 98 | else: |
|
99 | 99 | newpath = base + '-corrupt-' + now + (u'-%i' % i) + ext |
|
100 | 100 | else: |
|
101 | 101 | # not much content, possibly empty; don't worry about clobbering |
|
102 | 102 | # maybe we should just delete it? |
|
103 | 103 | newpath = base + '-corrupt' + ext |
|
104 | 104 | self.hist_file.rename(newpath) |
|
105 | 105 | self.log.error("History file was moved to %s and a new file created.", newpath) |
|
106 | 106 | self.init_db() |
|
107 | 107 | return [] |
|
108 | 108 | else: |
|
109 | 109 | # Failed with :memory:, something serious is wrong |
|
110 | 110 | raise |
|
111 | 111 | |
|
112 | 112 | |
|
113 | 113 | class HistoryAccessorBase(LoggingConfigurable): |
|
114 | 114 | """An abstract class for History Accessors """ |
|
115 | 115 | |
|
116 | 116 | def get_tail(self, n=10, raw=True, output=False, include_latest=False): |
|
117 | 117 | raise NotImplementedError |
|
118 | 118 | |
|
119 | 119 | def search(self, pattern="*", raw=True, search_raw=True, |
|
120 | 120 | output=False, n=None, unique=False): |
|
121 | 121 | raise NotImplementedError |
|
122 | 122 | |
|
123 | 123 | def get_range(self, session, start=1, stop=None, raw=True,output=False): |
|
124 | 124 | raise NotImplementedError |
|
125 | 125 | |
|
126 | 126 | def get_range_by_str(self, rangestr, raw=True, output=False): |
|
127 | 127 | raise NotImplementedError |
|
128 | 128 | |
|
129 | 129 | |
|
130 | 130 | class HistoryAccessor(HistoryAccessorBase): |
|
131 | 131 | """Access the history database without adding to it. |
|
132 | 132 | |
|
133 | 133 | This is intended for use by standalone history tools. IPython shells use |
|
134 | 134 | HistoryManager, below, which is a subclass of this.""" |
|
135 | 135 | |
|
136 | 136 | # counter for init_db retries, so we don't keep trying over and over |
|
137 | 137 | _corrupt_db_counter = 0 |
|
138 | 138 | # after two failures, fallback on :memory: |
|
139 | 139 | _corrupt_db_limit = 2 |
|
140 | 140 | |
|
141 | 141 | # String holding the path to the history file |
|
142 | 142 | hist_file = Union( |
|
143 | 143 | [Instance(Path), Unicode()], |
|
144 | 144 | help="""Path to file to use for SQLite history database. |
|
145 | 145 | |
|
146 | 146 | By default, IPython will put the history database in the IPython |
|
147 | 147 | profile directory. If you would rather share one history among |
|
148 | 148 | profiles, you can set this value in each, so that they are consistent. |
|
149 | 149 | |
|
150 | 150 | Due to an issue with fcntl, SQLite is known to misbehave on some NFS |
|
151 | 151 | mounts. If you see IPython hanging, try setting this to something on a |
|
152 | 152 | local disk, e.g:: |
|
153 | 153 | |
|
154 | 154 | ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite |
|
155 | 155 | |
|
156 | 156 | you can also use the specific value `:memory:` (including the colon |
|
157 | 157 | at both end but not the back ticks), to avoid creating an history file. |
|
158 | 158 | |
|
159 | 159 | """, |
|
160 | 160 | ).tag(config=True) |
|
161 | 161 | |
|
162 | 162 | enabled = Bool(True, |
|
163 | 163 | help="""enable the SQLite history |
|
164 | 164 | |
|
165 | 165 | set enabled=False to disable the SQLite history, |
|
166 | 166 | in which case there will be no stored history, no SQLite connection, |
|
167 | 167 | and no background saving thread. This may be necessary in some |
|
168 | 168 | threaded environments where IPython is embedded. |
|
169 | 169 | """ |
|
170 | 170 | ).tag(config=True) |
|
171 | 171 | |
|
172 | 172 | connection_options = Dict( |
|
173 | 173 | help="""Options for configuring the SQLite connection |
|
174 | 174 | |
|
175 | 175 | These options are passed as keyword args to sqlite3.connect |
|
176 | 176 | when establishing database connections. |
|
177 | 177 | """ |
|
178 | 178 | ).tag(config=True) |
|
179 | 179 | |
|
180 | 180 | # The SQLite database |
|
181 | 181 | db = Any() |
|
182 | 182 | @observe('db') |
|
183 | 183 | def _db_changed(self, change): |
|
184 | 184 | """validate the db, since it can be an Instance of two different types""" |
|
185 | 185 | new = change['new'] |
|
186 | 186 | connection_types = (DummyDB, sqlite3.Connection) |
|
187 | 187 | if not isinstance(new, connection_types): |
|
188 | 188 | msg = "%s.db must be sqlite3 Connection or DummyDB, not %r" % \ |
|
189 | 189 | (self.__class__.__name__, new) |
|
190 | 190 | raise TraitError(msg) |
|
191 | 191 | |
|
192 | 192 | def __init__(self, profile="default", hist_file="", **traits): |
|
193 | 193 | """Create a new history accessor. |
|
194 | 194 | |
|
195 | 195 | Parameters |
|
196 | 196 | ---------- |
|
197 | 197 | profile : str |
|
198 | 198 | The name of the profile from which to open history. |
|
199 | 199 | hist_file : str |
|
200 | 200 | Path to an SQLite history database stored by IPython. If specified, |
|
201 | 201 | hist_file overrides profile. |
|
202 | 202 | config : :class:`~traitlets.config.loader.Config` |
|
203 | 203 | Config object. hist_file can also be set through this. |
|
204 | 204 | """ |
|
205 | 205 | # We need a pointer back to the shell for various tasks. |
|
206 | 206 | super(HistoryAccessor, self).__init__(**traits) |
|
207 | 207 | # defer setting hist_file from kwarg until after init, |
|
208 | 208 | # otherwise the default kwarg value would clobber any value |
|
209 | 209 | # set by config |
|
210 | 210 | if hist_file: |
|
211 | 211 | self.hist_file = hist_file |
|
212 | 212 | |
|
213 | 213 | try: |
|
214 | 214 | self.hist_file |
|
215 | 215 | except TraitError: |
|
216 | 216 | # No one has set the hist_file, yet. |
|
217 | 217 | self.hist_file = self._get_hist_file_name(profile) |
|
218 | 218 | |
|
219 | 219 | self.init_db() |
|
220 | 220 | |
|
221 | 221 | def _get_hist_file_name(self, profile='default'): |
|
222 | 222 | """Find the history file for the given profile name. |
|
223 | 223 | |
|
224 | 224 | This is overridden by the HistoryManager subclass, to use the shell's |
|
225 | 225 | active profile. |
|
226 | 226 | |
|
227 | 227 | Parameters |
|
228 | 228 | ---------- |
|
229 | 229 | profile : str |
|
230 | 230 | The name of a profile which has a history file. |
|
231 | 231 | """ |
|
232 | 232 | return Path(locate_profile(profile)) / "history.sqlite" |
|
233 | 233 | |
|
234 | 234 | @catch_corrupt_db |
|
235 | 235 | def init_db(self): |
|
236 | 236 | """Connect to the database, and create tables if necessary.""" |
|
237 | 237 | if not self.enabled: |
|
238 | 238 | self.db = DummyDB() |
|
239 | 239 | return |
|
240 | 240 | |
|
241 | 241 | # use detect_types so that timestamps return datetime objects |
|
242 | 242 | kwargs = dict(detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) |
|
243 | 243 | kwargs.update(self.connection_options) |
|
244 | 244 | self.db = sqlite3.connect(str(self.hist_file), **kwargs) |
|
245 | 245 | self.db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer |
|
246 | 246 | primary key autoincrement, start timestamp, |
|
247 | 247 | end timestamp, num_cmds integer, remark text)""") |
|
248 | 248 | self.db.execute("""CREATE TABLE IF NOT EXISTS history |
|
249 | 249 | (session integer, line integer, source text, source_raw text, |
|
250 | 250 | PRIMARY KEY (session, line))""") |
|
251 | 251 | # Output history is optional, but ensure the table's there so it can be |
|
252 | 252 | # enabled later. |
|
253 | 253 | self.db.execute("""CREATE TABLE IF NOT EXISTS output_history |
|
254 | 254 | (session integer, line integer, output text, |
|
255 | 255 | PRIMARY KEY (session, line))""") |
|
256 | 256 | self.db.commit() |
|
257 | 257 | # success! reset corrupt db count |
|
258 | 258 | self._corrupt_db_counter = 0 |
|
259 | 259 | |
|
260 | 260 | def writeout_cache(self): |
|
261 | 261 | """Overridden by HistoryManager to dump the cache before certain |
|
262 | 262 | database lookups.""" |
|
263 | 263 | pass |
|
264 | 264 | |
|
265 | 265 | ## ------------------------------- |
|
266 | 266 | ## Methods for retrieving history: |
|
267 | 267 | ## ------------------------------- |
|
268 | 268 | def _run_sql(self, sql, params, raw=True, output=False): |
|
269 | 269 | """Prepares and runs an SQL query for the history database. |
|
270 | 270 | |
|
271 | 271 | Parameters |
|
272 | 272 | ---------- |
|
273 | 273 | sql : str |
|
274 | 274 | Any filtering expressions to go after SELECT ... FROM ... |
|
275 | 275 | params : tuple |
|
276 | 276 | Parameters passed to the SQL query (to replace "?") |
|
277 | 277 | raw, output : bool |
|
278 | 278 | See :meth:`get_range` |
|
279 | 279 | |
|
280 | 280 | Returns |
|
281 | 281 | ------- |
|
282 | 282 | Tuples as :meth:`get_range` |
|
283 | 283 | """ |
|
284 | 284 | toget = 'source_raw' if raw else 'source' |
|
285 | 285 | sqlfrom = "history" |
|
286 | 286 | if output: |
|
287 | 287 | sqlfrom = "history LEFT JOIN output_history USING (session, line)" |
|
288 | 288 | toget = "history.%s, output_history.output" % toget |
|
289 | 289 | cur = self.db.execute("SELECT session, line, %s FROM %s " %\ |
|
290 | 290 | (toget, sqlfrom) + sql, params) |
|
291 | 291 | if output: # Regroup into 3-tuples, and parse JSON |
|
292 | 292 | return ((ses, lin, (inp, out)) for ses, lin, inp, out in cur) |
|
293 | 293 | return cur |
|
294 | 294 | |
|
295 | 295 | @only_when_enabled |
|
296 | 296 | @catch_corrupt_db |
|
297 | 297 | def get_session_info(self, session): |
|
298 | 298 | """Get info about a session. |
|
299 | 299 | |
|
300 | 300 | Parameters |
|
301 | 301 | ---------- |
|
302 | 302 | |
|
303 | 303 | session : int |
|
304 | 304 | Session number to retrieve. |
|
305 | 305 | |
|
306 | 306 | Returns |
|
307 | 307 | ------- |
|
308 | 308 | |
|
309 | 309 | session_id : int |
|
310 | 310 | Session ID number |
|
311 | 311 | start : datetime |
|
312 | 312 | Timestamp for the start of the session. |
|
313 | 313 | end : datetime |
|
314 | 314 | Timestamp for the end of the session, or None if IPython crashed. |
|
315 | 315 | num_cmds : int |
|
316 | 316 | Number of commands run, or None if IPython crashed. |
|
317 | 317 | remark : unicode |
|
318 | 318 | A manually set description. |
|
319 | 319 | """ |
|
320 | 320 | query = "SELECT * from sessions where session == ?" |
|
321 | 321 | return self.db.execute(query, (session,)).fetchone() |
|
322 | 322 | |
|
323 | 323 | @catch_corrupt_db |
|
324 | 324 | def get_last_session_id(self): |
|
325 | 325 | """Get the last session ID currently in the database. |
|
326 | 326 | |
|
327 | 327 | Within IPython, this should be the same as the value stored in |
|
328 | 328 | :attr:`HistoryManager.session_number`. |
|
329 | 329 | """ |
|
330 | 330 | for record in self.get_tail(n=1, include_latest=True): |
|
331 | 331 | return record[0] |
|
332 | 332 | |
|
333 | 333 | @catch_corrupt_db |
|
334 | 334 | def get_tail(self, n=10, raw=True, output=False, include_latest=False): |
|
335 | 335 | """Get the last n lines from the history database. |
|
336 | 336 | |
|
337 | 337 | Parameters |
|
338 | 338 | ---------- |
|
339 | 339 | n : int |
|
340 | 340 | The number of lines to get |
|
341 | 341 | raw, output : bool |
|
342 | 342 | See :meth:`get_range` |
|
343 | 343 | include_latest : bool |
|
344 | 344 | If False (default), n+1 lines are fetched, and the latest one |
|
345 | 345 | is discarded. This is intended to be used where the function |
|
346 | 346 | is called by a user command, which it should not return. |
|
347 | 347 | |
|
348 | 348 | Returns |
|
349 | 349 | ------- |
|
350 | 350 | Tuples as :meth:`get_range` |
|
351 | 351 | """ |
|
352 | 352 | self.writeout_cache() |
|
353 | 353 | if not include_latest: |
|
354 | 354 | n += 1 |
|
355 | 355 | cur = self._run_sql("ORDER BY session DESC, line DESC LIMIT ?", |
|
356 | 356 | (n,), raw=raw, output=output) |
|
357 | 357 | if not include_latest: |
|
358 | 358 | return reversed(list(cur)[1:]) |
|
359 | 359 | return reversed(list(cur)) |
|
360 | 360 | |
|
361 | 361 | @catch_corrupt_db |
|
362 | 362 | def search(self, pattern="*", raw=True, search_raw=True, |
|
363 | 363 | output=False, n=None, unique=False): |
|
364 | 364 | """Search the database using unix glob-style matching (wildcards |
|
365 | 365 | * and ?). |
|
366 | 366 | |
|
367 | 367 | Parameters |
|
368 | 368 | ---------- |
|
369 | 369 | pattern : str |
|
370 | 370 | The wildcarded pattern to match when searching |
|
371 | 371 | search_raw : bool |
|
372 | 372 | If True, search the raw input, otherwise, the parsed input |
|
373 | 373 | raw, output : bool |
|
374 | 374 | See :meth:`get_range` |
|
375 | 375 | n : None or int |
|
376 | 376 | If an integer is given, it defines the limit of |
|
377 | 377 | returned entries. |
|
378 | 378 | unique : bool |
|
379 | 379 | When it is true, return only unique entries. |
|
380 | 380 | |
|
381 | 381 | Returns |
|
382 | 382 | ------- |
|
383 | 383 | Tuples as :meth:`get_range` |
|
384 | 384 | """ |
|
385 | 385 | tosearch = "source_raw" if search_raw else "source" |
|
386 | 386 | if output: |
|
387 | 387 | tosearch = "history." + tosearch |
|
388 | 388 | self.writeout_cache() |
|
389 | 389 | sqlform = "WHERE %s GLOB ?" % tosearch |
|
390 | 390 | params = (pattern,) |
|
391 | 391 | if unique: |
|
392 | 392 | sqlform += ' GROUP BY {0}'.format(tosearch) |
|
393 | 393 | if n is not None: |
|
394 | 394 | sqlform += " ORDER BY session DESC, line DESC LIMIT ?" |
|
395 | 395 | params += (n,) |
|
396 | 396 | elif unique: |
|
397 | 397 | sqlform += " ORDER BY session, line" |
|
398 | 398 | cur = self._run_sql(sqlform, params, raw=raw, output=output) |
|
399 | 399 | if n is not None: |
|
400 | 400 | return reversed(list(cur)) |
|
401 | 401 | return cur |
|
402 | 402 | |
|
403 | 403 | @catch_corrupt_db |
|
404 | 404 | def get_range(self, session, start=1, stop=None, raw=True,output=False): |
|
405 | 405 | """Retrieve input by session. |
|
406 | 406 | |
|
407 | 407 | Parameters |
|
408 | 408 | ---------- |
|
409 | 409 | session : int |
|
410 | 410 | Session number to retrieve. |
|
411 | 411 | start : int |
|
412 | 412 | First line to retrieve. |
|
413 | 413 | stop : int |
|
414 | 414 | End of line range (excluded from output itself). If None, retrieve |
|
415 | 415 | to the end of the session. |
|
416 | 416 | raw : bool |
|
417 | 417 | If True, return untranslated input |
|
418 | 418 | output : bool |
|
419 | 419 | If True, attempt to include output. This will be 'real' Python |
|
420 | 420 | objects for the current session, or text reprs from previous |
|
421 | 421 | sessions if db_log_output was enabled at the time. Where no output |
|
422 | 422 | is found, None is used. |
|
423 | 423 | |
|
424 | 424 | Returns |
|
425 | 425 | ------- |
|
426 | 426 | entries |
|
427 | 427 | An iterator over the desired lines. Each line is a 3-tuple, either |
|
428 | 428 | (session, line, input) if output is False, or |
|
429 | 429 | (session, line, (input, output)) if output is True. |
|
430 | 430 | """ |
|
431 | 431 | if stop: |
|
432 | 432 | lineclause = "line >= ? AND line < ?" |
|
433 | 433 | params = (session, start, stop) |
|
434 | 434 | else: |
|
435 | 435 | lineclause = "line>=?" |
|
436 | 436 | params = (session, start) |
|
437 | 437 | |
|
438 | 438 | return self._run_sql("WHERE session==? AND %s" % lineclause, |
|
439 | 439 | params, raw=raw, output=output) |
|
440 | 440 | |
|
441 | 441 | def get_range_by_str(self, rangestr, raw=True, output=False): |
|
442 | 442 | """Get lines of history from a string of ranges, as used by magic |
|
443 | 443 | commands %hist, %save, %macro, etc. |
|
444 | 444 | |
|
445 | 445 | Parameters |
|
446 | 446 | ---------- |
|
447 | 447 | rangestr : str |
|
448 |
A string specifying ranges, e.g. "5 ~2/1-4". |
|
|
449 | :func:`magic_history` for full details. | |
|
448 | A string specifying ranges, e.g. "5 ~2/1-4". If empty string is used, | |
|
449 | this will return everything from current session's history. | |
|
450 | ||
|
451 | See the documentation of :func:`%history` for the full details. | |
|
452 | ||
|
450 | 453 | raw, output : bool |
|
451 | 454 | As :meth:`get_range` |
|
452 | 455 | |
|
453 | 456 | Returns |
|
454 | 457 | ------- |
|
455 | 458 | Tuples as :meth:`get_range` |
|
456 | 459 | """ |
|
457 | 460 | for sess, s, e in extract_hist_ranges(rangestr): |
|
458 | 461 | for line in self.get_range(sess, s, e, raw=raw, output=output): |
|
459 | 462 | yield line |
|
460 | 463 | |
|
461 | 464 | |
|
462 | 465 | class HistoryManager(HistoryAccessor): |
|
463 | 466 | """A class to organize all history-related functionality in one place. |
|
464 | 467 | """ |
|
465 | 468 | # Public interface |
|
466 | 469 | |
|
467 | 470 | # An instance of the IPython shell we are attached to |
|
468 | 471 | shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', |
|
469 | 472 | allow_none=True) |
|
470 | 473 | # Lists to hold processed and raw history. These start with a blank entry |
|
471 | 474 | # so that we can index them starting from 1 |
|
472 | 475 | input_hist_parsed = List([""]) |
|
473 | 476 | input_hist_raw = List([""]) |
|
474 | 477 | # A list of directories visited during session |
|
475 | 478 | dir_hist = List() |
|
476 | 479 | @default('dir_hist') |
|
477 | 480 | def _dir_hist_default(self): |
|
478 | 481 | try: |
|
479 | 482 | return [Path.cwd()] |
|
480 | 483 | except OSError: |
|
481 | 484 | return [] |
|
482 | 485 | |
|
483 | 486 | # A dict of output history, keyed with ints from the shell's |
|
484 | 487 | # execution count. |
|
485 | 488 | output_hist = Dict() |
|
486 | 489 | # The text/plain repr of outputs. |
|
487 | 490 | output_hist_reprs = Dict() |
|
488 | 491 | |
|
489 | 492 | # The number of the current session in the history database |
|
490 | 493 | session_number = Integer() |
|
491 | 494 | |
|
492 | 495 | db_log_output = Bool(False, |
|
493 | 496 | help="Should the history database include output? (default: no)" |
|
494 | 497 | ).tag(config=True) |
|
495 | 498 | db_cache_size = Integer(0, |
|
496 | 499 | help="Write to database every x commands (higher values save disk access & power).\n" |
|
497 | 500 | "Values of 1 or less effectively disable caching." |
|
498 | 501 | ).tag(config=True) |
|
499 | 502 | # The input and output caches |
|
500 | 503 | db_input_cache = List() |
|
501 | 504 | db_output_cache = List() |
|
502 | 505 | |
|
503 | 506 | # History saving in separate thread |
|
504 | 507 | save_thread = Instance('IPython.core.history.HistorySavingThread', |
|
505 | 508 | allow_none=True) |
|
506 | 509 | save_flag = Instance(threading.Event, allow_none=True) |
|
507 | 510 | |
|
508 | 511 | # Private interface |
|
509 | 512 | # Variables used to store the three last inputs from the user. On each new |
|
510 | 513 | # history update, we populate the user's namespace with these, shifted as |
|
511 | 514 | # necessary. |
|
512 | 515 | _i00 = Unicode(u'') |
|
513 | 516 | _i = Unicode(u'') |
|
514 | 517 | _ii = Unicode(u'') |
|
515 | 518 | _iii = Unicode(u'') |
|
516 | 519 | |
|
517 | 520 | # A regex matching all forms of the exit command, so that we don't store |
|
518 | 521 | # them in the history (it's annoying to rewind the first entry and land on |
|
519 | 522 | # an exit call). |
|
520 | 523 | _exit_re = re.compile(r"(exit|quit)(\s*\(.*\))?$") |
|
521 | 524 | |
|
522 | 525 | def __init__(self, shell=None, config=None, **traits): |
|
523 | 526 | """Create a new history manager associated with a shell instance. |
|
524 | 527 | """ |
|
525 | 528 | # We need a pointer back to the shell for various tasks. |
|
526 | 529 | super(HistoryManager, self).__init__(shell=shell, config=config, |
|
527 | 530 | **traits) |
|
528 | 531 | self.save_flag = threading.Event() |
|
529 | 532 | self.db_input_cache_lock = threading.Lock() |
|
530 | 533 | self.db_output_cache_lock = threading.Lock() |
|
531 | 534 | |
|
532 | 535 | try: |
|
533 | 536 | self.new_session() |
|
534 | 537 | except sqlite3.OperationalError: |
|
535 | 538 | self.log.error("Failed to create history session in %s. History will not be saved.", |
|
536 | 539 | self.hist_file, exc_info=True) |
|
537 | 540 | self.hist_file = ':memory:' |
|
538 | 541 | |
|
539 | 542 | if self.enabled and self.hist_file != ':memory:': |
|
540 | 543 | self.save_thread = HistorySavingThread(self) |
|
541 | 544 | self.save_thread.start() |
|
542 | 545 | |
|
543 | 546 | def _get_hist_file_name(self, profile=None): |
|
544 | 547 | """Get default history file name based on the Shell's profile. |
|
545 | 548 | |
|
546 | 549 | The profile parameter is ignored, but must exist for compatibility with |
|
547 | 550 | the parent class.""" |
|
548 | 551 | profile_dir = self.shell.profile_dir.location |
|
549 | 552 | return Path(profile_dir) / "history.sqlite" |
|
550 | 553 | |
|
551 | 554 | @only_when_enabled |
|
552 | 555 | def new_session(self, conn=None): |
|
553 | 556 | """Get a new session number.""" |
|
554 | 557 | if conn is None: |
|
555 | 558 | conn = self.db |
|
556 | 559 | |
|
557 | 560 | with conn: |
|
558 | 561 | cur = conn.execute("""INSERT INTO sessions VALUES (NULL, ?, NULL, |
|
559 | 562 | NULL, "") """, (datetime.datetime.now(),)) |
|
560 | 563 | self.session_number = cur.lastrowid |
|
561 | 564 | |
|
562 | 565 | def end_session(self): |
|
563 | 566 | """Close the database session, filling in the end time and line count.""" |
|
564 | 567 | self.writeout_cache() |
|
565 | 568 | with self.db: |
|
566 | 569 | self.db.execute("""UPDATE sessions SET end=?, num_cmds=? WHERE |
|
567 | 570 | session==?""", (datetime.datetime.now(), |
|
568 | 571 | len(self.input_hist_parsed)-1, self.session_number)) |
|
569 | 572 | self.session_number = 0 |
|
570 | 573 | |
|
571 | 574 | def name_session(self, name): |
|
572 | 575 | """Give the current session a name in the history database.""" |
|
573 | 576 | with self.db: |
|
574 | 577 | self.db.execute("UPDATE sessions SET remark=? WHERE session==?", |
|
575 | 578 | (name, self.session_number)) |
|
576 | 579 | |
|
577 | 580 | def reset(self, new_session=True): |
|
578 | 581 | """Clear the session history, releasing all object references, and |
|
579 | 582 | optionally open a new session.""" |
|
580 | 583 | self.output_hist.clear() |
|
581 | 584 | # The directory history can't be completely empty |
|
582 | 585 | self.dir_hist[:] = [Path.cwd()] |
|
583 | 586 | |
|
584 | 587 | if new_session: |
|
585 | 588 | if self.session_number: |
|
586 | 589 | self.end_session() |
|
587 | 590 | self.input_hist_parsed[:] = [""] |
|
588 | 591 | self.input_hist_raw[:] = [""] |
|
589 | 592 | self.new_session() |
|
590 | 593 | |
|
591 | 594 | # ------------------------------ |
|
592 | 595 | # Methods for retrieving history |
|
593 | 596 | # ------------------------------ |
|
594 | 597 | def get_session_info(self, session=0): |
|
595 | 598 | """Get info about a session. |
|
596 | 599 | |
|
597 | 600 | Parameters |
|
598 | 601 | ---------- |
|
599 | 602 | |
|
600 | 603 | session : int |
|
601 | 604 | Session number to retrieve. The current session is 0, and negative |
|
602 | 605 | numbers count back from current session, so -1 is the previous session. |
|
603 | 606 | |
|
604 | 607 | Returns |
|
605 | 608 | ------- |
|
606 | 609 | |
|
607 | 610 | session_id : int |
|
608 | 611 | Session ID number |
|
609 | 612 | start : datetime |
|
610 | 613 | Timestamp for the start of the session. |
|
611 | 614 | end : datetime |
|
612 | 615 | Timestamp for the end of the session, or None if IPython crashed. |
|
613 | 616 | num_cmds : int |
|
614 | 617 | Number of commands run, or None if IPython crashed. |
|
615 | 618 | remark : unicode |
|
616 | 619 | A manually set description. |
|
617 | 620 | """ |
|
618 | 621 | if session <= 0: |
|
619 | 622 | session += self.session_number |
|
620 | 623 | |
|
621 | 624 | return super(HistoryManager, self).get_session_info(session=session) |
|
622 | 625 | |
|
623 | 626 | def _get_range_session(self, start=1, stop=None, raw=True, output=False): |
|
624 | 627 | """Get input and output history from the current session. Called by |
|
625 | 628 | get_range, and takes similar parameters.""" |
|
626 | 629 | input_hist = self.input_hist_raw if raw else self.input_hist_parsed |
|
627 | 630 | |
|
628 | 631 | n = len(input_hist) |
|
629 | 632 | if start < 0: |
|
630 | 633 | start += n |
|
631 | 634 | if not stop or (stop > n): |
|
632 | 635 | stop = n |
|
633 | 636 | elif stop < 0: |
|
634 | 637 | stop += n |
|
635 | 638 | |
|
636 | 639 | for i in range(start, stop): |
|
637 | 640 | if output: |
|
638 | 641 | line = (input_hist[i], self.output_hist_reprs.get(i)) |
|
639 | 642 | else: |
|
640 | 643 | line = input_hist[i] |
|
641 | 644 | yield (0, i, line) |
|
642 | 645 | |
|
643 | 646 | def get_range(self, session=0, start=1, stop=None, raw=True,output=False): |
|
644 | 647 | """Retrieve input by session. |
|
645 | 648 | |
|
646 | 649 | Parameters |
|
647 | 650 | ---------- |
|
648 | 651 | session : int |
|
649 | 652 | Session number to retrieve. The current session is 0, and negative |
|
650 | 653 | numbers count back from current session, so -1 is previous session. |
|
651 | 654 | start : int |
|
652 | 655 | First line to retrieve. |
|
653 | 656 | stop : int |
|
654 | 657 | End of line range (excluded from output itself). If None, retrieve |
|
655 | 658 | to the end of the session. |
|
656 | 659 | raw : bool |
|
657 | 660 | If True, return untranslated input |
|
658 | 661 | output : bool |
|
659 | 662 | If True, attempt to include output. This will be 'real' Python |
|
660 | 663 | objects for the current session, or text reprs from previous |
|
661 | 664 | sessions if db_log_output was enabled at the time. Where no output |
|
662 | 665 | is found, None is used. |
|
663 | 666 | |
|
664 | 667 | Returns |
|
665 | 668 | ------- |
|
666 | 669 | entries |
|
667 | 670 | An iterator over the desired lines. Each line is a 3-tuple, either |
|
668 | 671 | (session, line, input) if output is False, or |
|
669 | 672 | (session, line, (input, output)) if output is True. |
|
670 | 673 | """ |
|
671 | 674 | if session <= 0: |
|
672 | 675 | session += self.session_number |
|
673 | 676 | if session==self.session_number: # Current session |
|
674 | 677 | return self._get_range_session(start, stop, raw, output) |
|
675 | 678 | return super(HistoryManager, self).get_range(session, start, stop, raw, |
|
676 | 679 | output) |
|
677 | 680 | |
|
678 | 681 | ## ---------------------------- |
|
679 | 682 | ## Methods for storing history: |
|
680 | 683 | ## ---------------------------- |
|
681 | 684 | def store_inputs(self, line_num, source, source_raw=None): |
|
682 | 685 | """Store source and raw input in history and create input cache |
|
683 | 686 | variables ``_i*``. |
|
684 | 687 | |
|
685 | 688 | Parameters |
|
686 | 689 | ---------- |
|
687 | 690 | line_num : int |
|
688 | 691 | The prompt number of this input. |
|
689 | 692 | |
|
690 | 693 | source : str |
|
691 | 694 | Python input. |
|
692 | 695 | |
|
693 | 696 | source_raw : str, optional |
|
694 | 697 | If given, this is the raw input without any IPython transformations |
|
695 | 698 | applied to it. If not given, ``source`` is used. |
|
696 | 699 | """ |
|
697 | 700 | if source_raw is None: |
|
698 | 701 | source_raw = source |
|
699 | 702 | source = source.rstrip('\n') |
|
700 | 703 | source_raw = source_raw.rstrip('\n') |
|
701 | 704 | |
|
702 | 705 | # do not store exit/quit commands |
|
703 | 706 | if self._exit_re.match(source_raw.strip()): |
|
704 | 707 | return |
|
705 | 708 | |
|
706 | 709 | self.input_hist_parsed.append(source) |
|
707 | 710 | self.input_hist_raw.append(source_raw) |
|
708 | 711 | |
|
709 | 712 | with self.db_input_cache_lock: |
|
710 | 713 | self.db_input_cache.append((line_num, source, source_raw)) |
|
711 | 714 | # Trigger to flush cache and write to DB. |
|
712 | 715 | if len(self.db_input_cache) >= self.db_cache_size: |
|
713 | 716 | self.save_flag.set() |
|
714 | 717 | |
|
715 | 718 | # update the auto _i variables |
|
716 | 719 | self._iii = self._ii |
|
717 | 720 | self._ii = self._i |
|
718 | 721 | self._i = self._i00 |
|
719 | 722 | self._i00 = source_raw |
|
720 | 723 | |
|
721 | 724 | # hackish access to user namespace to create _i1,_i2... dynamically |
|
722 | 725 | new_i = '_i%s' % line_num |
|
723 | 726 | to_main = {'_i': self._i, |
|
724 | 727 | '_ii': self._ii, |
|
725 | 728 | '_iii': self._iii, |
|
726 | 729 | new_i : self._i00 } |
|
727 | 730 | |
|
728 | 731 | if self.shell is not None: |
|
729 | 732 | self.shell.push(to_main, interactive=False) |
|
730 | 733 | |
|
731 | 734 | def store_output(self, line_num): |
|
732 | 735 | """If database output logging is enabled, this saves all the |
|
733 | 736 | outputs from the indicated prompt number to the database. It's |
|
734 | 737 | called by run_cell after code has been executed. |
|
735 | 738 | |
|
736 | 739 | Parameters |
|
737 | 740 | ---------- |
|
738 | 741 | line_num : int |
|
739 | 742 | The line number from which to save outputs |
|
740 | 743 | """ |
|
741 | 744 | if (not self.db_log_output) or (line_num not in self.output_hist_reprs): |
|
742 | 745 | return |
|
743 | 746 | output = self.output_hist_reprs[line_num] |
|
744 | 747 | |
|
745 | 748 | with self.db_output_cache_lock: |
|
746 | 749 | self.db_output_cache.append((line_num, output)) |
|
747 | 750 | if self.db_cache_size <= 1: |
|
748 | 751 | self.save_flag.set() |
|
749 | 752 | |
|
750 | 753 | def _writeout_input_cache(self, conn): |
|
751 | 754 | with conn: |
|
752 | 755 | for line in self.db_input_cache: |
|
753 | 756 | conn.execute("INSERT INTO history VALUES (?, ?, ?, ?)", |
|
754 | 757 | (self.session_number,)+line) |
|
755 | 758 | |
|
756 | 759 | def _writeout_output_cache(self, conn): |
|
757 | 760 | with conn: |
|
758 | 761 | for line in self.db_output_cache: |
|
759 | 762 | conn.execute("INSERT INTO output_history VALUES (?, ?, ?)", |
|
760 | 763 | (self.session_number,)+line) |
|
761 | 764 | |
|
762 | 765 | @only_when_enabled |
|
763 | 766 | def writeout_cache(self, conn=None): |
|
764 | 767 | """Write any entries in the cache to the database.""" |
|
765 | 768 | if conn is None: |
|
766 | 769 | conn = self.db |
|
767 | 770 | |
|
768 | 771 | with self.db_input_cache_lock: |
|
769 | 772 | try: |
|
770 | 773 | self._writeout_input_cache(conn) |
|
771 | 774 | except sqlite3.IntegrityError: |
|
772 | 775 | self.new_session(conn) |
|
773 | 776 | print("ERROR! Session/line number was not unique in", |
|
774 | 777 | "database. History logging moved to new session", |
|
775 | 778 | self.session_number) |
|
776 | 779 | try: |
|
777 | 780 | # Try writing to the new session. If this fails, don't |
|
778 | 781 | # recurse |
|
779 | 782 | self._writeout_input_cache(conn) |
|
780 | 783 | except sqlite3.IntegrityError: |
|
781 | 784 | pass |
|
782 | 785 | finally: |
|
783 | 786 | self.db_input_cache = [] |
|
784 | 787 | |
|
785 | 788 | with self.db_output_cache_lock: |
|
786 | 789 | try: |
|
787 | 790 | self._writeout_output_cache(conn) |
|
788 | 791 | except sqlite3.IntegrityError: |
|
789 | 792 | print("!! Session/line number for output was not unique", |
|
790 | 793 | "in database. Output will not be stored.") |
|
791 | 794 | finally: |
|
792 | 795 | self.db_output_cache = [] |
|
793 | 796 | |
|
794 | 797 | |
|
795 | 798 | class HistorySavingThread(threading.Thread): |
|
796 | 799 | """This thread takes care of writing history to the database, so that |
|
797 | 800 | the UI isn't held up while that happens. |
|
798 | 801 | |
|
799 | 802 | It waits for the HistoryManager's save_flag to be set, then writes out |
|
800 | 803 | the history cache. The main thread is responsible for setting the flag when |
|
801 | 804 | the cache size reaches a defined threshold.""" |
|
802 | 805 | daemon = True |
|
803 | 806 | stop_now = False |
|
804 | 807 | enabled = True |
|
805 | 808 | def __init__(self, history_manager): |
|
806 | 809 | super(HistorySavingThread, self).__init__(name="IPythonHistorySavingThread") |
|
807 | 810 | self.history_manager = history_manager |
|
808 | 811 | self.enabled = history_manager.enabled |
|
809 | 812 | atexit.register(self.stop) |
|
810 | 813 | |
|
811 | 814 | @only_when_enabled |
|
812 | 815 | def run(self): |
|
813 | 816 | # We need a separate db connection per thread: |
|
814 | 817 | try: |
|
815 | 818 | self.db = sqlite3.connect( |
|
816 | 819 | str(self.history_manager.hist_file), |
|
817 | 820 | **self.history_manager.connection_options |
|
818 | 821 | ) |
|
819 | 822 | while True: |
|
820 | 823 | self.history_manager.save_flag.wait() |
|
821 | 824 | if self.stop_now: |
|
822 | 825 | self.db.close() |
|
823 | 826 | return |
|
824 | 827 | self.history_manager.save_flag.clear() |
|
825 | 828 | self.history_manager.writeout_cache(self.db) |
|
826 | 829 | except Exception as e: |
|
827 | 830 | print(("The history saving thread hit an unexpected error (%s)." |
|
828 | 831 | "History will not be written to the database.") % repr(e)) |
|
829 | 832 | |
|
830 | 833 | def stop(self): |
|
831 | 834 | """This can be called from the main thread to safely stop this thread. |
|
832 | 835 | |
|
833 | 836 | Note that it does not attempt to write out remaining history before |
|
834 | 837 | exiting. That should be done by calling the HistoryManager's |
|
835 | 838 | end_session method.""" |
|
836 | 839 | self.stop_now = True |
|
837 | 840 | self.history_manager.save_flag.set() |
|
838 | 841 | self.join() |
|
839 | 842 | |
|
840 | 843 | |
|
841 | 844 | # To match, e.g. ~5/8-~2/3 |
|
842 | 845 | range_re = re.compile(r""" |
|
843 | 846 | ((?P<startsess>~?\d+)/)? |
|
844 | 847 | (?P<start>\d+)? |
|
845 | 848 | ((?P<sep>[\-:]) |
|
846 | 849 | ((?P<endsess>~?\d+)/)? |
|
847 | 850 | (?P<end>\d+))? |
|
848 | 851 | $""", re.VERBOSE) |
|
849 | 852 | |
|
850 | 853 | |
|
851 | 854 | def extract_hist_ranges(ranges_str): |
|
852 | 855 | """Turn a string of history ranges into 3-tuples of (session, start, stop). |
|
853 | 856 | |
|
857 | Empty string results in a `[(0, 1, None)]`, i.e. "everything from current | |
|
858 | session". | |
|
859 | ||
|
854 | 860 | Examples |
|
855 | 861 | -------- |
|
856 | 862 | >>> list(extract_hist_ranges("~8/5-~7/4 2")) |
|
857 | 863 | [(-8, 5, None), (-7, 1, 5), (0, 2, 3)] |
|
858 | 864 | """ |
|
865 | if ranges_str == "": | |
|
866 | yield (0, 1, None) # Everything from current session | |
|
867 | return | |
|
868 | ||
|
859 | 869 | for range_str in ranges_str.split(): |
|
860 | 870 | rmatch = range_re.match(range_str) |
|
861 | 871 | if not rmatch: |
|
862 | 872 | continue |
|
863 | 873 | start = rmatch.group("start") |
|
864 | 874 | if start: |
|
865 | 875 | start = int(start) |
|
866 | 876 | end = rmatch.group("end") |
|
867 | 877 | # If no end specified, get (a, a + 1) |
|
868 | 878 | end = int(end) if end else start + 1 |
|
869 | 879 | else: # start not specified |
|
870 | 880 | if not rmatch.group('startsess'): # no startsess |
|
871 | 881 | continue |
|
872 | 882 | start = 1 |
|
873 | 883 | end = None # provide the entire session hist |
|
874 | 884 | |
|
875 | 885 | if rmatch.group("sep") == "-": # 1-3 == 1:4 --> [1, 2, 3] |
|
876 | 886 | end += 1 |
|
877 | 887 | startsess = rmatch.group("startsess") or "0" |
|
878 | 888 | endsess = rmatch.group("endsess") or startsess |
|
879 | 889 | startsess = int(startsess.replace("~","-")) |
|
880 | 890 | endsess = int(endsess.replace("~","-")) |
|
881 | 891 | assert endsess >= startsess, "start session must be earlier than end session" |
|
882 | 892 | |
|
883 | 893 | if endsess == startsess: |
|
884 | 894 | yield (startsess, start, end) |
|
885 | 895 | continue |
|
886 | 896 | # Multiple sessions in one range: |
|
887 | 897 | yield (startsess, start, None) |
|
888 | 898 | for sess in range(startsess+1, endsess): |
|
889 | 899 | yield (sess, 1, None) |
|
890 | 900 | yield (endsess, 1, end) |
|
891 | 901 | |
|
892 | 902 | |
|
893 | 903 | def _format_lineno(session, line): |
|
894 | 904 | """Helper function to format line numbers properly.""" |
|
895 | 905 | if session == 0: |
|
896 | 906 | return str(line) |
|
897 | 907 | return "%s#%s" % (session, line) |
@@ -1,729 +1,799 b'' | |||
|
1 | 1 | """Input transformer machinery to support IPython special syntax. |
|
2 | 2 | |
|
3 | 3 | This includes the machinery to recognise and transform ``%magic`` commands, |
|
4 | 4 | ``!system`` commands, ``help?`` querying, prompt stripping, and so forth. |
|
5 | 5 | |
|
6 | 6 | Added: IPython 7.0. Replaces inputsplitter and inputtransformer which were |
|
7 | 7 | deprecated in 7.0. |
|
8 | 8 | """ |
|
9 | 9 | |
|
10 | 10 | # Copyright (c) IPython Development Team. |
|
11 | 11 | # Distributed under the terms of the Modified BSD License. |
|
12 | 12 | |
|
13 | from codeop import compile_command | |
|
13 | import ast | |
|
14 | import sys | |
|
15 | from codeop import CommandCompiler, Compile | |
|
14 | 16 | import re |
|
15 | 17 | import tokenize |
|
16 | 18 | from typing import List, Tuple, Optional, Any |
|
17 | 19 | import warnings |
|
18 | 20 | |
|
19 | 21 | _indent_re = re.compile(r'^[ \t]+') |
|
20 | 22 | |
|
21 | 23 | def leading_empty_lines(lines): |
|
22 | 24 | """Remove leading empty lines |
|
23 | 25 | |
|
24 | 26 | If the leading lines are empty or contain only whitespace, they will be |
|
25 | 27 | removed. |
|
26 | 28 | """ |
|
27 | 29 | if not lines: |
|
28 | 30 | return lines |
|
29 | 31 | for i, line in enumerate(lines): |
|
30 | 32 | if line and not line.isspace(): |
|
31 | 33 | return lines[i:] |
|
32 | 34 | return lines |
|
33 | 35 | |
|
34 | 36 | def leading_indent(lines): |
|
35 | 37 | """Remove leading indentation. |
|
36 | 38 | |
|
37 | 39 | If the first line starts with a spaces or tabs, the same whitespace will be |
|
38 | 40 | removed from each following line in the cell. |
|
39 | 41 | """ |
|
40 | 42 | if not lines: |
|
41 | 43 | return lines |
|
42 | 44 | m = _indent_re.match(lines[0]) |
|
43 | 45 | if not m: |
|
44 | 46 | return lines |
|
45 | 47 | space = m.group(0) |
|
46 | 48 | n = len(space) |
|
47 | 49 | return [l[n:] if l.startswith(space) else l |
|
48 | 50 | for l in lines] |
|
49 | 51 | |
|
50 | 52 | class PromptStripper: |
|
51 | 53 | """Remove matching input prompts from a block of input. |
|
52 | 54 | |
|
53 | 55 | Parameters |
|
54 | 56 | ---------- |
|
55 | 57 | prompt_re : regular expression |
|
56 | 58 | A regular expression matching any input prompt (including continuation, |
|
57 | 59 | e.g. ``...``) |
|
58 | 60 | initial_re : regular expression, optional |
|
59 | 61 | A regular expression matching only the initial prompt, but not continuation. |
|
60 | 62 | If no initial expression is given, prompt_re will be used everywhere. |
|
61 | 63 | Used mainly for plain Python prompts (``>>>``), where the continuation prompt |
|
62 | 64 | ``...`` is a valid Python expression in Python 3, so shouldn't be stripped. |
|
63 | 65 | |
|
64 | 66 | Notes |
|
65 | 67 | ----- |
|
66 | 68 | |
|
67 | 69 | If initial_re and prompt_re differ, |
|
68 | 70 | only initial_re will be tested against the first line. |
|
69 | 71 | If any prompt is found on the first two lines, |
|
70 | 72 | prompts will be stripped from the rest of the block. |
|
71 | 73 | """ |
|
72 | 74 | def __init__(self, prompt_re, initial_re=None): |
|
73 | 75 | self.prompt_re = prompt_re |
|
74 | 76 | self.initial_re = initial_re or prompt_re |
|
75 | 77 | |
|
76 | 78 | def _strip(self, lines): |
|
77 | 79 | return [self.prompt_re.sub('', l, count=1) for l in lines] |
|
78 | 80 | |
|
79 | 81 | def __call__(self, lines): |
|
80 | 82 | if not lines: |
|
81 | 83 | return lines |
|
82 | 84 | if self.initial_re.match(lines[0]) or \ |
|
83 | 85 | (len(lines) > 1 and self.prompt_re.match(lines[1])): |
|
84 | 86 | return self._strip(lines) |
|
85 | 87 | return lines |
|
86 | 88 | |
|
87 | 89 | classic_prompt = PromptStripper( |
|
88 | 90 | prompt_re=re.compile(r'^(>>>|\.\.\.)( |$)'), |
|
89 | 91 | initial_re=re.compile(r'^>>>( |$)') |
|
90 | 92 | ) |
|
91 | 93 | |
|
92 | ipython_prompt = PromptStripper(re.compile(r'^(In \[\d+\]: |\s*\.{3,}: ?)')) | |
|
94 | ipython_prompt = PromptStripper( | |
|
95 | re.compile( | |
|
96 | r""" | |
|
97 | ^( # Match from the beginning of a line, either: | |
|
98 | ||
|
99 | # 1. First-line prompt: | |
|
100 | ((\[nav\]|\[ins\])?\ )? # Vi editing mode prompt, if it's there | |
|
101 | In\ # The 'In' of the prompt, with a space | |
|
102 | \[\d+\]: # Command index, as displayed in the prompt | |
|
103 | \ # With a mandatory trailing space | |
|
104 | ||
|
105 | | # ... or ... | |
|
106 | ||
|
107 | # 2. The three dots of the multiline prompt | |
|
108 | \s* # All leading whitespace characters | |
|
109 | \.{3,}: # The three (or more) dots | |
|
110 | \ ? # With an optional trailing space | |
|
111 | ||
|
112 | ) | |
|
113 | """, | |
|
114 | re.VERBOSE, | |
|
115 | ) | |
|
116 | ) | |
|
117 | ||
|
93 | 118 | |
|
94 | 119 | def cell_magic(lines): |
|
95 | 120 | if not lines or not lines[0].startswith('%%'): |
|
96 | 121 | return lines |
|
97 | 122 | if re.match(r'%%\w+\?', lines[0]): |
|
98 | 123 | # This case will be handled by help_end |
|
99 | 124 | return lines |
|
100 | 125 | magic_name, _, first_line = lines[0][2:].rstrip().partition(' ') |
|
101 | 126 | body = ''.join(lines[1:]) |
|
102 | 127 | return ['get_ipython().run_cell_magic(%r, %r, %r)\n' |
|
103 | 128 | % (magic_name, first_line, body)] |
|
104 | 129 | |
|
105 | 130 | |
|
106 | 131 | def _find_assign_op(token_line) -> Optional[int]: |
|
107 | 132 | """Get the index of the first assignment in the line ('=' not inside brackets) |
|
108 | 133 | |
|
109 | 134 | Note: We don't try to support multiple special assignment (a = b = %foo) |
|
110 | 135 | """ |
|
111 | 136 | paren_level = 0 |
|
112 | 137 | for i, ti in enumerate(token_line): |
|
113 | 138 | s = ti.string |
|
114 | 139 | if s == '=' and paren_level == 0: |
|
115 | 140 | return i |
|
116 | 141 | if s in {'(','[','{'}: |
|
117 | 142 | paren_level += 1 |
|
118 | 143 | elif s in {')', ']', '}'}: |
|
119 | 144 | if paren_level > 0: |
|
120 | 145 | paren_level -= 1 |
|
121 | 146 | return None |
|
122 | 147 | |
|
123 | 148 | def find_end_of_continued_line(lines, start_line: int): |
|
124 | 149 | """Find the last line of a line explicitly extended using backslashes. |
|
125 | 150 | |
|
126 | 151 | Uses 0-indexed line numbers. |
|
127 | 152 | """ |
|
128 | 153 | end_line = start_line |
|
129 | 154 | while lines[end_line].endswith('\\\n'): |
|
130 | 155 | end_line += 1 |
|
131 | 156 | if end_line >= len(lines): |
|
132 | 157 | break |
|
133 | 158 | return end_line |
|
134 | 159 | |
|
135 | 160 | def assemble_continued_line(lines, start: Tuple[int, int], end_line: int): |
|
136 | 161 | r"""Assemble a single line from multiple continued line pieces |
|
137 | 162 | |
|
138 | 163 | Continued lines are lines ending in ``\``, and the line following the last |
|
139 | 164 | ``\`` in the block. |
|
140 | 165 | |
|
141 | 166 | For example, this code continues over multiple lines:: |
|
142 | 167 | |
|
143 | 168 | if (assign_ix is not None) \ |
|
144 | 169 | and (len(line) >= assign_ix + 2) \ |
|
145 | 170 | and (line[assign_ix+1].string == '%') \ |
|
146 | 171 | and (line[assign_ix+2].type == tokenize.NAME): |
|
147 | 172 | |
|
148 | 173 | This statement contains four continued line pieces. |
|
149 | 174 | Assembling these pieces into a single line would give:: |
|
150 | 175 | |
|
151 | 176 | if (assign_ix is not None) and (len(line) >= assign_ix + 2) and (line[... |
|
152 | 177 | |
|
153 | 178 | This uses 0-indexed line numbers. *start* is (lineno, colno). |
|
154 | 179 | |
|
155 | 180 | Used to allow ``%magic`` and ``!system`` commands to be continued over |
|
156 | 181 | multiple lines. |
|
157 | 182 | """ |
|
158 | 183 | parts = [lines[start[0]][start[1]:]] + lines[start[0]+1:end_line+1] |
|
159 | 184 | return ' '.join([p.rstrip()[:-1] for p in parts[:-1]] # Strip backslash+newline |
|
160 | 185 | + [parts[-1].rstrip()]) # Strip newline from last line |
|
161 | 186 | |
|
162 | 187 | class TokenTransformBase: |
|
163 | 188 | """Base class for transformations which examine tokens. |
|
164 | 189 | |
|
165 | 190 | Special syntax should not be transformed when it occurs inside strings or |
|
166 | 191 | comments. This is hard to reliably avoid with regexes. The solution is to |
|
167 | 192 | tokenise the code as Python, and recognise the special syntax in the tokens. |
|
168 | 193 | |
|
169 | 194 | IPython's special syntax is not valid Python syntax, so tokenising may go |
|
170 | 195 | wrong after the special syntax starts. These classes therefore find and |
|
171 | 196 | transform *one* instance of special syntax at a time into regular Python |
|
172 | 197 | syntax. After each transformation, tokens are regenerated to find the next |
|
173 | 198 | piece of special syntax. |
|
174 | 199 | |
|
175 | 200 | Subclasses need to implement one class method (find) |
|
176 | 201 | and one regular method (transform). |
|
177 | 202 | |
|
178 | 203 | The priority attribute can select which transformation to apply if multiple |
|
179 | 204 | transformers match in the same place. Lower numbers have higher priority. |
|
180 | 205 | This allows "%magic?" to be turned into a help call rather than a magic call. |
|
181 | 206 | """ |
|
182 | 207 | # Lower numbers -> higher priority (for matches in the same location) |
|
183 | 208 | priority = 10 |
|
184 | 209 | |
|
185 | 210 | def sortby(self): |
|
186 | 211 | return self.start_line, self.start_col, self.priority |
|
187 | 212 | |
|
188 | 213 | def __init__(self, start): |
|
189 | 214 | self.start_line = start[0] - 1 # Shift from 1-index to 0-index |
|
190 | 215 | self.start_col = start[1] |
|
191 | 216 | |
|
192 | 217 | @classmethod |
|
193 | 218 | def find(cls, tokens_by_line): |
|
194 | 219 | """Find one instance of special syntax in the provided tokens. |
|
195 | 220 | |
|
196 | 221 | Tokens are grouped into logical lines for convenience, |
|
197 | 222 | so it is easy to e.g. look at the first token of each line. |
|
198 | 223 | *tokens_by_line* is a list of lists of tokenize.TokenInfo objects. |
|
199 | 224 | |
|
200 | 225 | This should return an instance of its class, pointing to the start |
|
201 | 226 | position it has found, or None if it found no match. |
|
202 | 227 | """ |
|
203 | 228 | raise NotImplementedError |
|
204 | 229 | |
|
205 | 230 | def transform(self, lines: List[str]): |
|
206 | 231 | """Transform one instance of special syntax found by ``find()`` |
|
207 | 232 | |
|
208 | 233 | Takes a list of strings representing physical lines, |
|
209 | 234 | returns a similar list of transformed lines. |
|
210 | 235 | """ |
|
211 | 236 | raise NotImplementedError |
|
212 | 237 | |
|
213 | 238 | class MagicAssign(TokenTransformBase): |
|
214 | 239 | """Transformer for assignments from magics (a = %foo)""" |
|
215 | 240 | @classmethod |
|
216 | 241 | def find(cls, tokens_by_line): |
|
217 | 242 | """Find the first magic assignment (a = %foo) in the cell. |
|
218 | 243 | """ |
|
219 | 244 | for line in tokens_by_line: |
|
220 | 245 | assign_ix = _find_assign_op(line) |
|
221 | 246 | if (assign_ix is not None) \ |
|
222 | 247 | and (len(line) >= assign_ix + 2) \ |
|
223 | 248 | and (line[assign_ix+1].string == '%') \ |
|
224 | 249 | and (line[assign_ix+2].type == tokenize.NAME): |
|
225 | 250 | return cls(line[assign_ix+1].start) |
|
226 | 251 | |
|
227 | 252 | def transform(self, lines: List[str]): |
|
228 | 253 | """Transform a magic assignment found by the ``find()`` classmethod. |
|
229 | 254 | """ |
|
230 | 255 | start_line, start_col = self.start_line, self.start_col |
|
231 | 256 | lhs = lines[start_line][:start_col] |
|
232 | 257 | end_line = find_end_of_continued_line(lines, start_line) |
|
233 | 258 | rhs = assemble_continued_line(lines, (start_line, start_col), end_line) |
|
234 | 259 | assert rhs.startswith('%'), rhs |
|
235 | 260 | magic_name, _, args = rhs[1:].partition(' ') |
|
236 | 261 | |
|
237 | 262 | lines_before = lines[:start_line] |
|
238 | 263 | call = "get_ipython().run_line_magic({!r}, {!r})".format(magic_name, args) |
|
239 | 264 | new_line = lhs + call + '\n' |
|
240 | 265 | lines_after = lines[end_line+1:] |
|
241 | 266 | |
|
242 | 267 | return lines_before + [new_line] + lines_after |
|
243 | 268 | |
|
244 | 269 | |
|
245 | 270 | class SystemAssign(TokenTransformBase): |
|
246 | 271 | """Transformer for assignments from system commands (a = !foo)""" |
|
247 | 272 | @classmethod |
|
248 | 273 | def find(cls, tokens_by_line): |
|
249 | 274 | """Find the first system assignment (a = !foo) in the cell. |
|
250 | 275 | """ |
|
251 | 276 | for line in tokens_by_line: |
|
252 | 277 | assign_ix = _find_assign_op(line) |
|
253 | 278 | if (assign_ix is not None) \ |
|
254 | 279 | and not line[assign_ix].line.strip().startswith('=') \ |
|
255 | 280 | and (len(line) >= assign_ix + 2) \ |
|
256 | 281 | and (line[assign_ix + 1].type == tokenize.ERRORTOKEN): |
|
257 | 282 | ix = assign_ix + 1 |
|
258 | 283 | |
|
259 | 284 | while ix < len(line) and line[ix].type == tokenize.ERRORTOKEN: |
|
260 | 285 | if line[ix].string == '!': |
|
261 | 286 | return cls(line[ix].start) |
|
262 | 287 | elif not line[ix].string.isspace(): |
|
263 | 288 | break |
|
264 | 289 | ix += 1 |
|
265 | 290 | |
|
266 | 291 | def transform(self, lines: List[str]): |
|
267 | 292 | """Transform a system assignment found by the ``find()`` classmethod. |
|
268 | 293 | """ |
|
269 | 294 | start_line, start_col = self.start_line, self.start_col |
|
270 | 295 | |
|
271 | 296 | lhs = lines[start_line][:start_col] |
|
272 | 297 | end_line = find_end_of_continued_line(lines, start_line) |
|
273 | 298 | rhs = assemble_continued_line(lines, (start_line, start_col), end_line) |
|
274 | 299 | assert rhs.startswith('!'), rhs |
|
275 | 300 | cmd = rhs[1:] |
|
276 | 301 | |
|
277 | 302 | lines_before = lines[:start_line] |
|
278 | 303 | call = "get_ipython().getoutput({!r})".format(cmd) |
|
279 | 304 | new_line = lhs + call + '\n' |
|
280 | 305 | lines_after = lines[end_line + 1:] |
|
281 | 306 | |
|
282 | 307 | return lines_before + [new_line] + lines_after |
|
283 | 308 | |
|
284 | 309 | # The escape sequences that define the syntax transformations IPython will |
|
285 | 310 | # apply to user input. These can NOT be just changed here: many regular |
|
286 | 311 | # expressions and other parts of the code may use their hardcoded values, and |
|
287 | 312 | # for all intents and purposes they constitute the 'IPython syntax', so they |
|
288 | 313 | # should be considered fixed. |
|
289 | 314 | |
|
290 | 315 | ESC_SHELL = '!' # Send line to underlying system shell |
|
291 | 316 | ESC_SH_CAP = '!!' # Send line to system shell and capture output |
|
292 | 317 | ESC_HELP = '?' # Find information about object |
|
293 | 318 | ESC_HELP2 = '??' # Find extra-detailed information about object |
|
294 | 319 | ESC_MAGIC = '%' # Call magic function |
|
295 | 320 | ESC_MAGIC2 = '%%' # Call cell-magic function |
|
296 | 321 | ESC_QUOTE = ',' # Split args on whitespace, quote each as string and call |
|
297 | 322 | ESC_QUOTE2 = ';' # Quote all args as a single string, call |
|
298 | 323 | ESC_PAREN = '/' # Call first argument with rest of line as arguments |
|
299 | 324 | |
|
300 | 325 | ESCAPE_SINGLES = {'!', '?', '%', ',', ';', '/'} |
|
301 | 326 | ESCAPE_DOUBLES = {'!!', '??'} # %% (cell magic) is handled separately |
|
302 | 327 | |
|
303 | 328 | def _make_help_call(target, esc, next_input=None): |
|
304 | 329 | """Prepares a pinfo(2)/psearch call from a target name and the escape |
|
305 | 330 | (i.e. ? or ??)""" |
|
306 | 331 | method = 'pinfo2' if esc == '??' \ |
|
307 | 332 | else 'psearch' if '*' in target \ |
|
308 | 333 | else 'pinfo' |
|
309 | 334 | arg = " ".join([method, target]) |
|
310 | 335 | #Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args) |
|
311 | 336 | t_magic_name, _, t_magic_arg_s = arg.partition(' ') |
|
312 | 337 | t_magic_name = t_magic_name.lstrip(ESC_MAGIC) |
|
313 | 338 | if next_input is None: |
|
314 | 339 | return 'get_ipython().run_line_magic(%r, %r)' % (t_magic_name, t_magic_arg_s) |
|
315 | 340 | else: |
|
316 | 341 | return 'get_ipython().set_next_input(%r);get_ipython().run_line_magic(%r, %r)' % \ |
|
317 | 342 | (next_input, t_magic_name, t_magic_arg_s) |
|
318 | 343 | |
|
319 | 344 | def _tr_help(content): |
|
320 | 345 | """Translate lines escaped with: ? |
|
321 | 346 | |
|
322 | 347 | A naked help line should fire the intro help screen (shell.show_usage()) |
|
323 | 348 | """ |
|
324 | 349 | if not content: |
|
325 | 350 | return 'get_ipython().show_usage()' |
|
326 | 351 | |
|
327 | 352 | return _make_help_call(content, '?') |
|
328 | 353 | |
|
329 | 354 | def _tr_help2(content): |
|
330 | 355 | """Translate lines escaped with: ?? |
|
331 | 356 | |
|
332 | 357 | A naked help line should fire the intro help screen (shell.show_usage()) |
|
333 | 358 | """ |
|
334 | 359 | if not content: |
|
335 | 360 | return 'get_ipython().show_usage()' |
|
336 | 361 | |
|
337 | 362 | return _make_help_call(content, '??') |
|
338 | 363 | |
|
339 | 364 | def _tr_magic(content): |
|
340 | 365 | "Translate lines escaped with a percent sign: %" |
|
341 | 366 | name, _, args = content.partition(' ') |
|
342 | 367 | return 'get_ipython().run_line_magic(%r, %r)' % (name, args) |
|
343 | 368 | |
|
344 | 369 | def _tr_quote(content): |
|
345 | 370 | "Translate lines escaped with a comma: ," |
|
346 | 371 | name, _, args = content.partition(' ') |
|
347 | 372 | return '%s("%s")' % (name, '", "'.join(args.split()) ) |
|
348 | 373 | |
|
349 | 374 | def _tr_quote2(content): |
|
350 | 375 | "Translate lines escaped with a semicolon: ;" |
|
351 | 376 | name, _, args = content.partition(' ') |
|
352 | 377 | return '%s("%s")' % (name, args) |
|
353 | 378 | |
|
354 | 379 | def _tr_paren(content): |
|
355 | 380 | "Translate lines escaped with a slash: /" |
|
356 | 381 | name, _, args = content.partition(' ') |
|
357 | 382 | return '%s(%s)' % (name, ", ".join(args.split())) |
|
358 | 383 | |
|
359 | 384 | tr = { ESC_SHELL : 'get_ipython().system({!r})'.format, |
|
360 | 385 | ESC_SH_CAP : 'get_ipython().getoutput({!r})'.format, |
|
361 | 386 | ESC_HELP : _tr_help, |
|
362 | 387 | ESC_HELP2 : _tr_help2, |
|
363 | 388 | ESC_MAGIC : _tr_magic, |
|
364 | 389 | ESC_QUOTE : _tr_quote, |
|
365 | 390 | ESC_QUOTE2 : _tr_quote2, |
|
366 | 391 | ESC_PAREN : _tr_paren } |
|
367 | 392 | |
|
368 | 393 | class EscapedCommand(TokenTransformBase): |
|
369 | 394 | """Transformer for escaped commands like %foo, !foo, or /foo""" |
|
370 | 395 | @classmethod |
|
371 | 396 | def find(cls, tokens_by_line): |
|
372 | 397 | """Find the first escaped command (%foo, !foo, etc.) in the cell. |
|
373 | 398 | """ |
|
374 | 399 | for line in tokens_by_line: |
|
375 | 400 | if not line: |
|
376 | 401 | continue |
|
377 | 402 | ix = 0 |
|
378 | 403 | ll = len(line) |
|
379 | 404 | while ll > ix and line[ix].type in {tokenize.INDENT, tokenize.DEDENT}: |
|
380 | 405 | ix += 1 |
|
381 | 406 | if ix >= ll: |
|
382 | 407 | continue |
|
383 | 408 | if line[ix].string in ESCAPE_SINGLES: |
|
384 | 409 | return cls(line[ix].start) |
|
385 | 410 | |
|
386 | 411 | def transform(self, lines): |
|
387 | 412 | """Transform an escaped line found by the ``find()`` classmethod. |
|
388 | 413 | """ |
|
389 | 414 | start_line, start_col = self.start_line, self.start_col |
|
390 | 415 | |
|
391 | 416 | indent = lines[start_line][:start_col] |
|
392 | 417 | end_line = find_end_of_continued_line(lines, start_line) |
|
393 | 418 | line = assemble_continued_line(lines, (start_line, start_col), end_line) |
|
394 | 419 | |
|
395 | 420 | if len(line) > 1 and line[:2] in ESCAPE_DOUBLES: |
|
396 | 421 | escape, content = line[:2], line[2:] |
|
397 | 422 | else: |
|
398 | 423 | escape, content = line[:1], line[1:] |
|
399 | 424 | |
|
400 | 425 | if escape in tr: |
|
401 | 426 | call = tr[escape](content) |
|
402 | 427 | else: |
|
403 | 428 | call = '' |
|
404 | 429 | |
|
405 | 430 | lines_before = lines[:start_line] |
|
406 | 431 | new_line = indent + call + '\n' |
|
407 | 432 | lines_after = lines[end_line + 1:] |
|
408 | 433 | |
|
409 | 434 | return lines_before + [new_line] + lines_after |
|
410 | 435 | |
|
411 | 436 | _help_end_re = re.compile(r"""(%{0,2} |
|
412 | 437 | (?!\d)[\w*]+ # Variable name |
|
413 | 438 | (\.(?!\d)[\w*]+)* # .etc.etc |
|
414 | 439 | ) |
|
415 | 440 | (\?\??)$ # ? or ?? |
|
416 | 441 | """, |
|
417 | 442 | re.VERBOSE) |
|
418 | 443 | |
|
419 | 444 | class HelpEnd(TokenTransformBase): |
|
420 | 445 | """Transformer for help syntax: obj? and obj??""" |
|
421 | 446 | # This needs to be higher priority (lower number) than EscapedCommand so |
|
422 | 447 | # that inspecting magics (%foo?) works. |
|
423 | 448 | priority = 5 |
|
424 | 449 | |
|
425 | 450 | def __init__(self, start, q_locn): |
|
426 | 451 | super().__init__(start) |
|
427 | 452 | self.q_line = q_locn[0] - 1 # Shift from 1-indexed to 0-indexed |
|
428 | 453 | self.q_col = q_locn[1] |
|
429 | 454 | |
|
430 | 455 | @classmethod |
|
431 | 456 | def find(cls, tokens_by_line): |
|
432 | 457 | """Find the first help command (foo?) in the cell. |
|
433 | 458 | """ |
|
434 | 459 | for line in tokens_by_line: |
|
435 | 460 | # Last token is NEWLINE; look at last but one |
|
436 | 461 | if len(line) > 2 and line[-2].string == '?': |
|
437 | 462 | # Find the first token that's not INDENT/DEDENT |
|
438 | 463 | ix = 0 |
|
439 | 464 | while line[ix].type in {tokenize.INDENT, tokenize.DEDENT}: |
|
440 | 465 | ix += 1 |
|
441 | 466 | return cls(line[ix].start, line[-2].start) |
|
442 | 467 | |
|
443 | 468 | def transform(self, lines): |
|
444 | 469 | """Transform a help command found by the ``find()`` classmethod. |
|
445 | 470 | """ |
|
446 | 471 | piece = ''.join(lines[self.start_line:self.q_line+1]) |
|
447 | 472 | indent, content = piece[:self.start_col], piece[self.start_col:] |
|
448 | 473 | lines_before = lines[:self.start_line] |
|
449 | 474 | lines_after = lines[self.q_line + 1:] |
|
450 | 475 | |
|
451 | 476 | m = _help_end_re.search(content) |
|
452 | 477 | if not m: |
|
453 | 478 | raise SyntaxError(content) |
|
454 | 479 | assert m is not None, content |
|
455 | 480 | target = m.group(1) |
|
456 | 481 | esc = m.group(3) |
|
457 | 482 | |
|
458 | 483 | # If we're mid-command, put it back on the next prompt for the user. |
|
459 | 484 | next_input = None |
|
460 | 485 | if (not lines_before) and (not lines_after) \ |
|
461 | 486 | and content.strip() != m.group(0): |
|
462 | 487 | next_input = content.rstrip('?\n') |
|
463 | 488 | |
|
464 | 489 | call = _make_help_call(target, esc, next_input=next_input) |
|
465 | 490 | new_line = indent + call + '\n' |
|
466 | 491 | |
|
467 | 492 | return lines_before + [new_line] + lines_after |
|
468 | 493 | |
|
469 | 494 | def make_tokens_by_line(lines:List[str]): |
|
470 | 495 | """Tokenize a series of lines and group tokens by line. |
|
471 | 496 | |
|
472 | 497 | The tokens for a multiline Python string or expression are grouped as one |
|
473 | 498 | line. All lines except the last lines should keep their line ending ('\\n', |
|
474 | 499 | '\\r\\n') for this to properly work. Use `.splitlines(keeplineending=True)` |
|
475 | 500 | for example when passing block of text to this function. |
|
476 | 501 | |
|
477 | 502 | """ |
|
478 | 503 | # NL tokens are used inside multiline expressions, but also after blank |
|
479 | 504 | # lines or comments. This is intentional - see https://bugs.python.org/issue17061 |
|
480 | 505 | # We want to group the former case together but split the latter, so we |
|
481 | 506 | # track parentheses level, similar to the internals of tokenize. |
|
482 | 507 | |
|
483 | 508 | # reexported from token on 3.7+ |
|
484 | 509 | NEWLINE, NL = tokenize.NEWLINE, tokenize.NL # type: ignore |
|
485 | 510 | tokens_by_line:List[List[Any]] = [[]] |
|
486 | 511 | if len(lines) > 1 and not lines[0].endswith(('\n', '\r', '\r\n', '\x0b', '\x0c')): |
|
487 | 512 | warnings.warn("`make_tokens_by_line` received a list of lines which do not have lineending markers ('\\n', '\\r', '\\r\\n', '\\x0b', '\\x0c'), behavior will be unspecified") |
|
488 | 513 | parenlev = 0 |
|
489 | 514 | try: |
|
490 | 515 | for token in tokenize.generate_tokens(iter(lines).__next__): |
|
491 | 516 | tokens_by_line[-1].append(token) |
|
492 | 517 | if (token.type == NEWLINE) \ |
|
493 | 518 | or ((token.type == NL) and (parenlev <= 0)): |
|
494 | 519 | tokens_by_line.append([]) |
|
495 | 520 | elif token.string in {'(', '[', '{'}: |
|
496 | 521 | parenlev += 1 |
|
497 | 522 | elif token.string in {')', ']', '}'}: |
|
498 | 523 | if parenlev > 0: |
|
499 | 524 | parenlev -= 1 |
|
500 | 525 | except tokenize.TokenError: |
|
501 | 526 | # Input ended in a multiline string or expression. That's OK for us. |
|
502 | 527 | pass |
|
503 | 528 | |
|
504 | 529 | |
|
505 | 530 | if not tokens_by_line[-1]: |
|
506 | 531 | tokens_by_line.pop() |
|
507 | 532 | |
|
508 | 533 | |
|
509 | 534 | return tokens_by_line |
|
510 | 535 | |
|
536 | ||
|
537 | def has_sunken_brackets(tokens: List[tokenize.TokenInfo]): | |
|
538 | """Check if the depth of brackets in the list of tokens drops below 0""" | |
|
539 | parenlev = 0 | |
|
540 | for token in tokens: | |
|
541 | if token.string in {"(", "[", "{"}: | |
|
542 | parenlev += 1 | |
|
543 | elif token.string in {")", "]", "}"}: | |
|
544 | parenlev -= 1 | |
|
545 | if parenlev < 0: | |
|
546 | return True | |
|
547 | return False | |
|
548 | ||
|
549 | ||
|
511 | 550 | def show_linewise_tokens(s: str): |
|
512 | 551 | """For investigation and debugging""" |
|
513 | 552 | if not s.endswith('\n'): |
|
514 | 553 | s += '\n' |
|
515 | 554 | lines = s.splitlines(keepends=True) |
|
516 | 555 | for line in make_tokens_by_line(lines): |
|
517 | 556 | print("Line -------") |
|
518 | 557 | for tokinfo in line: |
|
519 | 558 | print(" ", tokinfo) |
|
520 | 559 | |
|
521 | 560 | # Arbitrary limit to prevent getting stuck in infinite loops |
|
522 | 561 | TRANSFORM_LOOP_LIMIT = 500 |
|
523 | 562 | |
|
524 | 563 | class TransformerManager: |
|
525 | 564 | """Applies various transformations to a cell or code block. |
|
526 | 565 | |
|
527 | 566 | The key methods for external use are ``transform_cell()`` |
|
528 | 567 | and ``check_complete()``. |
|
529 | 568 | """ |
|
530 | 569 | def __init__(self): |
|
531 | 570 | self.cleanup_transforms = [ |
|
532 | 571 | leading_empty_lines, |
|
533 | 572 | leading_indent, |
|
534 | 573 | classic_prompt, |
|
535 | 574 | ipython_prompt, |
|
536 | 575 | ] |
|
537 | 576 | self.line_transforms = [ |
|
538 | 577 | cell_magic, |
|
539 | 578 | ] |
|
540 | 579 | self.token_transformers = [ |
|
541 | 580 | MagicAssign, |
|
542 | 581 | SystemAssign, |
|
543 | 582 | EscapedCommand, |
|
544 | 583 | HelpEnd, |
|
545 | 584 | ] |
|
546 | 585 | |
|
547 | 586 | def do_one_token_transform(self, lines): |
|
548 | 587 | """Find and run the transform earliest in the code. |
|
549 | 588 | |
|
550 | 589 | Returns (changed, lines). |
|
551 | 590 | |
|
552 | 591 | This method is called repeatedly until changed is False, indicating |
|
553 | 592 | that all available transformations are complete. |
|
554 | 593 | |
|
555 | 594 | The tokens following IPython special syntax might not be valid, so |
|
556 | 595 | the transformed code is retokenised every time to identify the next |
|
557 | 596 | piece of special syntax. Hopefully long code cells are mostly valid |
|
558 | 597 | Python, not using lots of IPython special syntax, so this shouldn't be |
|
559 | 598 | a performance issue. |
|
560 | 599 | """ |
|
561 | 600 | tokens_by_line = make_tokens_by_line(lines) |
|
562 | 601 | candidates = [] |
|
563 | 602 | for transformer_cls in self.token_transformers: |
|
564 | 603 | transformer = transformer_cls.find(tokens_by_line) |
|
565 | 604 | if transformer: |
|
566 | 605 | candidates.append(transformer) |
|
567 | 606 | |
|
568 | 607 | if not candidates: |
|
569 | 608 | # Nothing to transform |
|
570 | 609 | return False, lines |
|
571 | 610 | ordered_transformers = sorted(candidates, key=TokenTransformBase.sortby) |
|
572 | 611 | for transformer in ordered_transformers: |
|
573 | 612 | try: |
|
574 | 613 | return True, transformer.transform(lines) |
|
575 | 614 | except SyntaxError: |
|
576 | 615 | pass |
|
577 | 616 | return False, lines |
|
578 | 617 | |
|
579 | 618 | def do_token_transforms(self, lines): |
|
580 | 619 | for _ in range(TRANSFORM_LOOP_LIMIT): |
|
581 | 620 | changed, lines = self.do_one_token_transform(lines) |
|
582 | 621 | if not changed: |
|
583 | 622 | return lines |
|
584 | 623 | |
|
585 | 624 | raise RuntimeError("Input transformation still changing after " |
|
586 | 625 | "%d iterations. Aborting." % TRANSFORM_LOOP_LIMIT) |
|
587 | 626 | |
|
588 | 627 | def transform_cell(self, cell: str) -> str: |
|
589 | 628 | """Transforms a cell of input code""" |
|
590 | 629 | if not cell.endswith('\n'): |
|
591 | 630 | cell += '\n' # Ensure the cell has a trailing newline |
|
592 | 631 | lines = cell.splitlines(keepends=True) |
|
593 | 632 | for transform in self.cleanup_transforms + self.line_transforms: |
|
594 | 633 | lines = transform(lines) |
|
595 | 634 | |
|
596 | 635 | lines = self.do_token_transforms(lines) |
|
597 | 636 | return ''.join(lines) |
|
598 | 637 | |
|
599 | 638 | def check_complete(self, cell: str): |
|
600 | 639 | """Return whether a block of code is ready to execute, or should be continued |
|
601 | 640 | |
|
602 | 641 | Parameters |
|
603 | 642 | ---------- |
|
604 | 643 | source : string |
|
605 | 644 | Python input code, which can be multiline. |
|
606 | 645 | |
|
607 | 646 | Returns |
|
608 | 647 | ------- |
|
609 | 648 | status : str |
|
610 | 649 | One of 'complete', 'incomplete', or 'invalid' if source is not a |
|
611 | 650 | prefix of valid code. |
|
612 | 651 | indent_spaces : int or None |
|
613 | 652 | The number of spaces by which to indent the next line of code. If |
|
614 | 653 | status is not 'incomplete', this is None. |
|
615 | 654 | """ |
|
616 | 655 | # Remember if the lines ends in a new line. |
|
617 | 656 | ends_with_newline = False |
|
618 | 657 | for character in reversed(cell): |
|
619 | 658 | if character == '\n': |
|
620 | 659 | ends_with_newline = True |
|
621 | 660 | break |
|
622 | 661 | elif character.strip(): |
|
623 | 662 | break |
|
624 | 663 | else: |
|
625 | 664 | continue |
|
626 | 665 | |
|
627 | 666 | if not ends_with_newline: |
|
628 | 667 | # Append an newline for consistent tokenization |
|
629 | 668 | # See https://bugs.python.org/issue33899 |
|
630 | 669 | cell += '\n' |
|
631 | 670 | |
|
632 | 671 | lines = cell.splitlines(keepends=True) |
|
633 | 672 | |
|
634 | 673 | if not lines: |
|
635 | 674 | return 'complete', None |
|
636 | 675 | |
|
637 | 676 | if lines[-1].endswith('\\'): |
|
638 | 677 | # Explicit backslash continuation |
|
639 | 678 | return 'incomplete', find_last_indent(lines) |
|
640 | 679 | |
|
641 | 680 | try: |
|
642 | 681 | for transform in self.cleanup_transforms: |
|
643 | 682 | if not getattr(transform, 'has_side_effects', False): |
|
644 | 683 | lines = transform(lines) |
|
645 | 684 | except SyntaxError: |
|
646 | 685 | return 'invalid', None |
|
647 | 686 | |
|
648 | 687 | if lines[0].startswith('%%'): |
|
649 | 688 | # Special case for cell magics - completion marked by blank line |
|
650 | 689 | if lines[-1].strip(): |
|
651 | 690 | return 'incomplete', find_last_indent(lines) |
|
652 | 691 | else: |
|
653 | 692 | return 'complete', None |
|
654 | 693 | |
|
655 | 694 | try: |
|
656 | 695 | for transform in self.line_transforms: |
|
657 | 696 | if not getattr(transform, 'has_side_effects', False): |
|
658 | 697 | lines = transform(lines) |
|
659 | 698 | lines = self.do_token_transforms(lines) |
|
660 | 699 | except SyntaxError: |
|
661 | 700 | return 'invalid', None |
|
662 | 701 | |
|
663 | 702 | tokens_by_line = make_tokens_by_line(lines) |
|
664 | 703 | |
|
704 | # Bail if we got one line and there are more closing parentheses than | |
|
705 | # the opening ones | |
|
706 | if ( | |
|
707 | len(lines) == 1 | |
|
708 | and tokens_by_line | |
|
709 | and has_sunken_brackets(tokens_by_line[0]) | |
|
710 | ): | |
|
711 | return "invalid", None | |
|
712 | ||
|
665 | 713 | if not tokens_by_line: |
|
666 | 714 | return 'incomplete', find_last_indent(lines) |
|
667 | 715 | |
|
668 | 716 | if tokens_by_line[-1][-1].type != tokenize.ENDMARKER: |
|
669 | 717 | # We're in a multiline string or expression |
|
670 | 718 | return 'incomplete', find_last_indent(lines) |
|
671 | 719 | |
|
672 | 720 | newline_types = {tokenize.NEWLINE, tokenize.COMMENT, tokenize.ENDMARKER} # type: ignore |
|
673 | 721 | |
|
674 | 722 | # Pop the last line which only contains DEDENTs and ENDMARKER |
|
675 | 723 | last_token_line = None |
|
676 | 724 | if {t.type for t in tokens_by_line[-1]} in [ |
|
677 | 725 | {tokenize.DEDENT, tokenize.ENDMARKER}, |
|
678 | 726 | {tokenize.ENDMARKER} |
|
679 | 727 | ] and len(tokens_by_line) > 1: |
|
680 | 728 | last_token_line = tokens_by_line.pop() |
|
681 | 729 | |
|
682 | 730 | while tokens_by_line[-1] and tokens_by_line[-1][-1].type in newline_types: |
|
683 | 731 | tokens_by_line[-1].pop() |
|
684 | 732 | |
|
685 | 733 | if not tokens_by_line[-1]: |
|
686 | 734 | return 'incomplete', find_last_indent(lines) |
|
687 | 735 | |
|
688 | 736 | if tokens_by_line[-1][-1].string == ':': |
|
689 | 737 | # The last line starts a block (e.g. 'if foo:') |
|
690 | 738 | ix = 0 |
|
691 | 739 | while tokens_by_line[-1][ix].type in {tokenize.INDENT, tokenize.DEDENT}: |
|
692 | 740 | ix += 1 |
|
693 | 741 | |
|
694 | 742 | indent = tokens_by_line[-1][ix].start[1] |
|
695 | 743 | return 'incomplete', indent + 4 |
|
696 | 744 | |
|
697 | 745 | if tokens_by_line[-1][0].line.endswith('\\'): |
|
698 | 746 | return 'incomplete', None |
|
699 | 747 | |
|
700 | 748 | # At this point, our checks think the code is complete (or invalid). |
|
701 | 749 | # We'll use codeop.compile_command to check this with the real parser |
|
702 | 750 | try: |
|
703 | 751 | with warnings.catch_warnings(): |
|
704 | 752 | warnings.simplefilter('error', SyntaxWarning) |
|
705 | 753 | res = compile_command(''.join(lines), symbol='exec') |
|
706 | 754 | except (SyntaxError, OverflowError, ValueError, TypeError, |
|
707 | 755 | MemoryError, SyntaxWarning): |
|
708 | 756 | return 'invalid', None |
|
709 | 757 | else: |
|
710 | 758 | if res is None: |
|
711 | 759 | return 'incomplete', find_last_indent(lines) |
|
712 | 760 | |
|
713 | 761 | if last_token_line and last_token_line[0].type == tokenize.DEDENT: |
|
714 | 762 | if ends_with_newline: |
|
715 | 763 | return 'complete', None |
|
716 | 764 | return 'incomplete', find_last_indent(lines) |
|
717 | 765 | |
|
718 | 766 | # If there's a blank line at the end, assume we're ready to execute |
|
719 | 767 | if not lines[-1].strip(): |
|
720 | 768 | return 'complete', None |
|
721 | 769 | |
|
722 | 770 | return 'complete', None |
|
723 | 771 | |
|
724 | 772 | |
|
725 | 773 | def find_last_indent(lines): |
|
726 | 774 | m = _indent_re.match(lines[-1]) |
|
727 | 775 | if not m: |
|
728 | 776 | return 0 |
|
729 | 777 | return len(m.group(0).replace('\t', ' '*4)) |
|
778 | ||
|
779 | ||
|
780 | class MaybeAsyncCompile(Compile): | |
|
781 | def __init__(self, extra_flags=0): | |
|
782 | super().__init__() | |
|
783 | self.flags |= extra_flags | |
|
784 | ||
|
785 | def __call__(self, *args, **kwds): | |
|
786 | return compile(*args, **kwds) | |
|
787 | ||
|
788 | ||
|
789 | class MaybeAsyncCommandCompiler(CommandCompiler): | |
|
790 | def __init__(self, extra_flags=0): | |
|
791 | self.compiler = MaybeAsyncCompile(extra_flags=extra_flags) | |
|
792 | ||
|
793 | ||
|
794 | if (sys.version_info.major, sys.version_info.minor) >= (3, 8): | |
|
795 | _extra_flags = ast.PyCF_ALLOW_TOP_LEVEL_AWAIT | |
|
796 | else: | |
|
797 | _extra_flags = ast.PyCF_ONLY_AST | |
|
798 | ||
|
799 | compile_command = MaybeAsyncCommandCompiler(extra_flags=_extra_flags) |
@@ -1,3824 +1,3879 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Main IPython class.""" |
|
3 | 3 | |
|
4 | 4 | #----------------------------------------------------------------------------- |
|
5 | 5 | # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> |
|
6 | 6 | # Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu> |
|
7 | 7 | # Copyright (C) 2008-2011 The IPython Development Team |
|
8 | 8 | # |
|
9 | 9 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | 10 | # the file COPYING, distributed as part of this software. |
|
11 | 11 | #----------------------------------------------------------------------------- |
|
12 | 12 | |
|
13 | 13 | |
|
14 | 14 | import abc |
|
15 | 15 | import ast |
|
16 | 16 | import atexit |
|
17 | 17 | import builtins as builtin_mod |
|
18 | 18 | import functools |
|
19 | 19 | import inspect |
|
20 | 20 | import os |
|
21 | 21 | import re |
|
22 | 22 | import runpy |
|
23 | 23 | import sys |
|
24 | 24 | import tempfile |
|
25 | 25 | import traceback |
|
26 | 26 | import types |
|
27 | 27 | import subprocess |
|
28 | 28 | import warnings |
|
29 | 29 | from io import open as io_open |
|
30 | 30 | |
|
31 | 31 | from pathlib import Path |
|
32 | 32 | from pickleshare import PickleShareDB |
|
33 | 33 | |
|
34 | 34 | from traitlets.config.configurable import SingletonConfigurable |
|
35 | 35 | from traitlets.utils.importstring import import_item |
|
36 | 36 | from IPython.core import oinspect |
|
37 | 37 | from IPython.core import magic |
|
38 | 38 | from IPython.core import page |
|
39 | 39 | from IPython.core import prefilter |
|
40 | 40 | from IPython.core import ultratb |
|
41 | 41 | from IPython.core.alias import Alias, AliasManager |
|
42 | 42 | from IPython.core.autocall import ExitAutocall |
|
43 | 43 | from IPython.core.builtin_trap import BuiltinTrap |
|
44 | 44 | from IPython.core.events import EventManager, available_events |
|
45 | 45 | from IPython.core.compilerop import CachingCompiler, check_linecache_ipython |
|
46 | 46 | from IPython.core.debugger import Pdb |
|
47 | 47 | from IPython.core.display_trap import DisplayTrap |
|
48 | 48 | from IPython.core.displayhook import DisplayHook |
|
49 | 49 | from IPython.core.displaypub import DisplayPublisher |
|
50 | 50 | from IPython.core.error import InputRejected, UsageError |
|
51 | 51 | from IPython.core.extensions import ExtensionManager |
|
52 | 52 | from IPython.core.formatters import DisplayFormatter |
|
53 | 53 | from IPython.core.history import HistoryManager |
|
54 | 54 | from IPython.core.inputtransformer2 import ESC_MAGIC, ESC_MAGIC2 |
|
55 | 55 | from IPython.core.logger import Logger |
|
56 | 56 | from IPython.core.macro import Macro |
|
57 | 57 | from IPython.core.payload import PayloadManager |
|
58 | 58 | from IPython.core.prefilter import PrefilterManager |
|
59 | 59 | from IPython.core.profiledir import ProfileDir |
|
60 | 60 | from IPython.core.usage import default_banner |
|
61 | 61 | from IPython.display import display |
|
62 | 62 | from IPython.testing.skipdoctest import skip_doctest |
|
63 | 63 | from IPython.utils import PyColorize |
|
64 | 64 | from IPython.utils import io |
|
65 | 65 | from IPython.utils import py3compat |
|
66 | 66 | from IPython.utils import openpy |
|
67 | 67 | from IPython.utils.decorators import undoc |
|
68 | 68 | from IPython.utils.io import ask_yes_no |
|
69 | 69 | from IPython.utils.ipstruct import Struct |
|
70 | 70 | from IPython.paths import get_ipython_dir |
|
71 | 71 | from IPython.utils.path import get_home_dir, get_py_filename, ensure_dir_exists |
|
72 | 72 | from IPython.utils.process import system, getoutput |
|
73 | 73 | from IPython.utils.strdispatch import StrDispatch |
|
74 | 74 | from IPython.utils.syspathcontext import prepended_to_syspath |
|
75 | 75 | from IPython.utils.text import format_screen, LSString, SList, DollarFormatter |
|
76 | 76 | from IPython.utils.tempdir import TemporaryDirectory |
|
77 | 77 | from traitlets import ( |
|
78 | 78 | Integer, Bool, CaselessStrEnum, Enum, List, Dict, Unicode, Instance, Type, |
|
79 | 79 | observe, default, validate, Any |
|
80 | 80 | ) |
|
81 | 81 | from warnings import warn |
|
82 | 82 | from logging import error |
|
83 | 83 | import IPython.core.hooks |
|
84 | 84 | |
|
85 | 85 | from typing import List as ListType, Tuple, Optional |
|
86 | 86 | from ast import AST |
|
87 | 87 | |
|
88 | 88 | # NoOpContext is deprecated, but ipykernel imports it from here. |
|
89 | 89 | # See https://github.com/ipython/ipykernel/issues/157 |
|
90 | 90 | # (2016, let's try to remove than in IPython 8.0) |
|
91 | 91 | from IPython.utils.contexts import NoOpContext |
|
92 | 92 | |
|
93 | 93 | try: |
|
94 | 94 | import docrepr.sphinxify as sphx |
|
95 | 95 | |
|
96 | 96 | def sphinxify(doc): |
|
97 | 97 | with TemporaryDirectory() as dirname: |
|
98 | 98 | return { |
|
99 | 99 | 'text/html': sphx.sphinxify(doc, dirname), |
|
100 | 100 | 'text/plain': doc |
|
101 | 101 | } |
|
102 | 102 | except ImportError: |
|
103 | 103 | sphinxify = None |
|
104 | 104 | |
|
105 | 105 | |
|
106 | 106 | class ProvisionalWarning(DeprecationWarning): |
|
107 | 107 | """ |
|
108 | 108 | Warning class for unstable features |
|
109 | 109 | """ |
|
110 | 110 | pass |
|
111 | 111 | |
|
112 | 112 | if sys.version_info > (3,8): |
|
113 | 113 | from ast import Module |
|
114 | 114 | else : |
|
115 | 115 | # mock the new API, ignore second argument |
|
116 | 116 | # see https://github.com/ipython/ipython/issues/11590 |
|
117 | 117 | from ast import Module as OriginalModule |
|
118 | 118 | Module = lambda nodelist, type_ignores: OriginalModule(nodelist) |
|
119 | 119 | |
|
120 | 120 | if sys.version_info > (3,6): |
|
121 | 121 | _assign_nodes = (ast.AugAssign, ast.AnnAssign, ast.Assign) |
|
122 | 122 | _single_targets_nodes = (ast.AugAssign, ast.AnnAssign) |
|
123 | 123 | else: |
|
124 | 124 | _assign_nodes = (ast.AugAssign, ast.Assign ) |
|
125 | 125 | _single_targets_nodes = (ast.AugAssign, ) |
|
126 | 126 | |
|
127 | 127 | #----------------------------------------------------------------------------- |
|
128 | 128 | # Await Helpers |
|
129 | 129 | #----------------------------------------------------------------------------- |
|
130 | 130 | |
|
131 | 131 | def removed_co_newlocals(function:types.FunctionType) -> types.FunctionType: |
|
132 | 132 | """Return a function that do not create a new local scope. |
|
133 | 133 | |
|
134 | 134 | Given a function, create a clone of this function where the co_newlocal flag |
|
135 | 135 | has been removed, making this function code actually run in the sourounding |
|
136 | 136 | scope. |
|
137 | 137 | |
|
138 | 138 | We need this in order to run asynchronous code in user level namespace. |
|
139 | 139 | """ |
|
140 | 140 | from types import CodeType, FunctionType |
|
141 | 141 | CO_NEWLOCALS = 0x0002 |
|
142 | 142 | code = function.__code__ |
|
143 | 143 | new_co_flags = code.co_flags & ~CO_NEWLOCALS |
|
144 | 144 | if sys.version_info > (3, 8, 0, 'alpha', 3): |
|
145 | 145 | new_code = code.replace(co_flags=new_co_flags) |
|
146 | 146 | else: |
|
147 | 147 | new_code = CodeType( |
|
148 | 148 | code.co_argcount, |
|
149 | 149 | code.co_kwonlyargcount, |
|
150 | 150 | code.co_nlocals, |
|
151 | 151 | code.co_stacksize, |
|
152 | 152 | new_co_flags, |
|
153 | 153 | code.co_code, |
|
154 | 154 | code.co_consts, |
|
155 | 155 | code.co_names, |
|
156 | 156 | code.co_varnames, |
|
157 | 157 | code.co_filename, |
|
158 | 158 | code.co_name, |
|
159 | 159 | code.co_firstlineno, |
|
160 | 160 | code.co_lnotab, |
|
161 | 161 | code.co_freevars, |
|
162 | 162 | code.co_cellvars |
|
163 | 163 | ) |
|
164 | 164 | return FunctionType(new_code, globals(), function.__name__, function.__defaults__) |
|
165 | 165 | |
|
166 | 166 | |
|
167 | 167 | # we still need to run things using the asyncio eventloop, but there is no |
|
168 | 168 | # async integration |
|
169 | 169 | from .async_helpers import (_asyncio_runner, _asyncify, _pseudo_sync_runner) |
|
170 | 170 | from .async_helpers import _curio_runner, _trio_runner, _should_be_async |
|
171 | 171 | |
|
172 | 172 | |
|
173 | 173 | def _ast_asyncify(cell:str, wrapper_name:str) -> ast.Module: |
|
174 | 174 | """ |
|
175 | 175 | Parse a cell with top-level await and modify the AST to be able to run it later. |
|
176 | 176 | |
|
177 | 177 | Parameters |
|
178 | 178 | ---------- |
|
179 | 179 | cell: str |
|
180 | 180 | The code cell to asyncronify |
|
181 | 181 | wrapper_name: str |
|
182 | 182 | The name of the function to be used to wrap the passed `cell`. It is |
|
183 | 183 | advised to **not** use a python identifier in order to not pollute the |
|
184 | 184 | global namespace in which the function will be ran. |
|
185 | 185 | |
|
186 | 186 | Returns |
|
187 | 187 | ------- |
|
188 | 188 | ModuleType: |
|
189 | 189 | A module object AST containing **one** function named `wrapper_name`. |
|
190 | 190 | |
|
191 | 191 | The given code is wrapped in a async-def function, parsed into an AST, and |
|
192 | 192 | the resulting function definition AST is modified to return the last |
|
193 | 193 | expression. |
|
194 | 194 | |
|
195 | 195 | The last expression or await node is moved into a return statement at the |
|
196 | 196 | end of the function, and removed from its original location. If the last |
|
197 | 197 | node is not Expr or Await nothing is done. |
|
198 | 198 | |
|
199 | 199 | The function `__code__` will need to be later modified (by |
|
200 | 200 | ``removed_co_newlocals``) in a subsequent step to not create new `locals()` |
|
201 | 201 | meaning that the local and global scope are the same, ie as if the body of |
|
202 | 202 | the function was at module level. |
|
203 | 203 | |
|
204 | 204 | Lastly a call to `locals()` is made just before the last expression of the |
|
205 | 205 | function, or just after the last assignment or statement to make sure the |
|
206 | 206 | global dict is updated as python function work with a local fast cache which |
|
207 | 207 | is updated only on `local()` calls. |
|
208 | 208 | """ |
|
209 | 209 | |
|
210 | 210 | from ast import Expr, Await, Return |
|
211 | 211 | if sys.version_info >= (3,8): |
|
212 | 212 | return ast.parse(cell) |
|
213 | 213 | tree = ast.parse(_asyncify(cell)) |
|
214 | 214 | |
|
215 | 215 | function_def = tree.body[0] |
|
216 | 216 | function_def.name = wrapper_name |
|
217 | 217 | try_block = function_def.body[0] |
|
218 | 218 | lastexpr = try_block.body[-1] |
|
219 | 219 | if isinstance(lastexpr, (Expr, Await)): |
|
220 | 220 | try_block.body[-1] = Return(lastexpr.value) |
|
221 | 221 | ast.fix_missing_locations(tree) |
|
222 | 222 | return tree |
|
223 | 223 | #----------------------------------------------------------------------------- |
|
224 | 224 | # Globals |
|
225 | 225 | #----------------------------------------------------------------------------- |
|
226 | 226 | |
|
227 | 227 | # compiled regexps for autoindent management |
|
228 | 228 | dedent_re = re.compile(r'^\s+raise|^\s+return|^\s+pass') |
|
229 | 229 | |
|
230 | 230 | #----------------------------------------------------------------------------- |
|
231 | 231 | # Utilities |
|
232 | 232 | #----------------------------------------------------------------------------- |
|
233 | 233 | |
|
234 | 234 | @undoc |
|
235 | 235 | def softspace(file, newvalue): |
|
236 | 236 | """Copied from code.py, to remove the dependency""" |
|
237 | 237 | |
|
238 | 238 | oldvalue = 0 |
|
239 | 239 | try: |
|
240 | 240 | oldvalue = file.softspace |
|
241 | 241 | except AttributeError: |
|
242 | 242 | pass |
|
243 | 243 | try: |
|
244 | 244 | file.softspace = newvalue |
|
245 | 245 | except (AttributeError, TypeError): |
|
246 | 246 | # "attribute-less object" or "read-only attributes" |
|
247 | 247 | pass |
|
248 | 248 | return oldvalue |
|
249 | 249 | |
|
250 | 250 | @undoc |
|
251 | 251 | def no_op(*a, **kw): |
|
252 | 252 | pass |
|
253 | 253 | |
|
254 | 254 | |
|
255 | 255 | class SpaceInInput(Exception): pass |
|
256 | 256 | |
|
257 | 257 | |
|
258 | 258 | def get_default_colors(): |
|
259 | 259 | "DEPRECATED" |
|
260 | 260 | warn('get_default_color is deprecated since IPython 5.0, and returns `Neutral` on all platforms.', |
|
261 | 261 | DeprecationWarning, stacklevel=2) |
|
262 | 262 | return 'Neutral' |
|
263 | 263 | |
|
264 | 264 | |
|
265 | 265 | class SeparateUnicode(Unicode): |
|
266 | 266 | r"""A Unicode subclass to validate separate_in, separate_out, etc. |
|
267 | 267 | |
|
268 | 268 | This is a Unicode based trait that converts '0'->'' and ``'\\n'->'\n'``. |
|
269 | 269 | """ |
|
270 | 270 | |
|
271 | 271 | def validate(self, obj, value): |
|
272 | 272 | if value == '0': value = '' |
|
273 | 273 | value = value.replace('\\n','\n') |
|
274 | 274 | return super(SeparateUnicode, self).validate(obj, value) |
|
275 | 275 | |
|
276 | 276 | |
|
277 | 277 | @undoc |
|
278 | 278 | class DummyMod(object): |
|
279 | 279 | """A dummy module used for IPython's interactive module when |
|
280 | 280 | a namespace must be assigned to the module's __dict__.""" |
|
281 | 281 | __spec__ = None |
|
282 | 282 | |
|
283 | 283 | |
|
284 | 284 | class ExecutionInfo(object): |
|
285 | 285 | """The arguments used for a call to :meth:`InteractiveShell.run_cell` |
|
286 | 286 | |
|
287 | 287 | Stores information about what is going to happen. |
|
288 | 288 | """ |
|
289 | 289 | raw_cell = None |
|
290 | 290 | store_history = False |
|
291 | 291 | silent = False |
|
292 | 292 | shell_futures = True |
|
293 | 293 | |
|
294 | 294 | def __init__(self, raw_cell, store_history, silent, shell_futures): |
|
295 | 295 | self.raw_cell = raw_cell |
|
296 | 296 | self.store_history = store_history |
|
297 | 297 | self.silent = silent |
|
298 | 298 | self.shell_futures = shell_futures |
|
299 | 299 | |
|
300 | 300 | def __repr__(self): |
|
301 | 301 | name = self.__class__.__qualname__ |
|
302 | 302 | raw_cell = ((self.raw_cell[:50] + '..') |
|
303 | 303 | if len(self.raw_cell) > 50 else self.raw_cell) |
|
304 | 304 | return '<%s object at %x, raw_cell="%s" store_history=%s silent=%s shell_futures=%s>' %\ |
|
305 | 305 | (name, id(self), raw_cell, self.store_history, self.silent, self.shell_futures) |
|
306 | 306 | |
|
307 | 307 | |
|
308 | 308 | class ExecutionResult(object): |
|
309 | 309 | """The result of a call to :meth:`InteractiveShell.run_cell` |
|
310 | 310 | |
|
311 | 311 | Stores information about what took place. |
|
312 | 312 | """ |
|
313 | 313 | execution_count = None |
|
314 | 314 | error_before_exec = None |
|
315 | 315 | error_in_exec = None |
|
316 | 316 | info = None |
|
317 | 317 | result = None |
|
318 | 318 | |
|
319 | 319 | def __init__(self, info): |
|
320 | 320 | self.info = info |
|
321 | 321 | |
|
322 | 322 | @property |
|
323 | 323 | def success(self): |
|
324 | 324 | return (self.error_before_exec is None) and (self.error_in_exec is None) |
|
325 | 325 | |
|
326 | 326 | def raise_error(self): |
|
327 | 327 | """Reraises error if `success` is `False`, otherwise does nothing""" |
|
328 | 328 | if self.error_before_exec is not None: |
|
329 | 329 | raise self.error_before_exec |
|
330 | 330 | if self.error_in_exec is not None: |
|
331 | 331 | raise self.error_in_exec |
|
332 | 332 | |
|
333 | 333 | def __repr__(self): |
|
334 | 334 | name = self.__class__.__qualname__ |
|
335 | 335 | return '<%s object at %x, execution_count=%s error_before_exec=%s error_in_exec=%s info=%s result=%s>' %\ |
|
336 | 336 | (name, id(self), self.execution_count, self.error_before_exec, self.error_in_exec, repr(self.info), repr(self.result)) |
|
337 | 337 | |
|
338 | 338 | |
|
339 | 339 | class InteractiveShell(SingletonConfigurable): |
|
340 | 340 | """An enhanced, interactive shell for Python.""" |
|
341 | 341 | |
|
342 | 342 | _instance = None |
|
343 | 343 | |
|
344 | 344 | ast_transformers = List([], help= |
|
345 | 345 | """ |
|
346 | 346 | A list of ast.NodeTransformer subclass instances, which will be applied |
|
347 | 347 | to user input before code is run. |
|
348 | 348 | """ |
|
349 | 349 | ).tag(config=True) |
|
350 | 350 | |
|
351 | 351 | autocall = Enum((0,1,2), default_value=0, help= |
|
352 | 352 | """ |
|
353 | 353 | Make IPython automatically call any callable object even if you didn't |
|
354 | 354 | type explicit parentheses. For example, 'str 43' becomes 'str(43)' |
|
355 | 355 | automatically. The value can be '0' to disable the feature, '1' for |
|
356 | 356 | 'smart' autocall, where it is not applied if there are no more |
|
357 | 357 | arguments on the line, and '2' for 'full' autocall, where all callable |
|
358 | 358 | objects are automatically called (even if no arguments are present). |
|
359 | 359 | """ |
|
360 | 360 | ).tag(config=True) |
|
361 | 361 | |
|
362 | 362 | autoindent = Bool(True, help= |
|
363 | 363 | """ |
|
364 | 364 | Autoindent IPython code entered interactively. |
|
365 | 365 | """ |
|
366 | 366 | ).tag(config=True) |
|
367 | 367 | |
|
368 | 368 | autoawait = Bool(True, help= |
|
369 | 369 | """ |
|
370 | 370 | Automatically run await statement in the top level repl. |
|
371 | 371 | """ |
|
372 | 372 | ).tag(config=True) |
|
373 | 373 | |
|
374 | 374 | loop_runner_map ={ |
|
375 | 375 | 'asyncio':(_asyncio_runner, True), |
|
376 | 376 | 'curio':(_curio_runner, True), |
|
377 | 377 | 'trio':(_trio_runner, True), |
|
378 | 378 | 'sync': (_pseudo_sync_runner, False) |
|
379 | 379 | } |
|
380 | 380 | |
|
381 | 381 | loop_runner = Any(default_value="IPython.core.interactiveshell._asyncio_runner", |
|
382 | 382 | allow_none=True, |
|
383 | 383 | help="""Select the loop runner that will be used to execute top-level asynchronous code""" |
|
384 | 384 | ).tag(config=True) |
|
385 | 385 | |
|
386 | 386 | @default('loop_runner') |
|
387 | 387 | def _default_loop_runner(self): |
|
388 | 388 | return import_item("IPython.core.interactiveshell._asyncio_runner") |
|
389 | 389 | |
|
390 | 390 | @validate('loop_runner') |
|
391 | 391 | def _import_runner(self, proposal): |
|
392 | 392 | if isinstance(proposal.value, str): |
|
393 | 393 | if proposal.value in self.loop_runner_map: |
|
394 | 394 | runner, autoawait = self.loop_runner_map[proposal.value] |
|
395 | 395 | self.autoawait = autoawait |
|
396 | 396 | return runner |
|
397 | 397 | runner = import_item(proposal.value) |
|
398 | 398 | if not callable(runner): |
|
399 | 399 | raise ValueError('loop_runner must be callable') |
|
400 | 400 | return runner |
|
401 | 401 | if not callable(proposal.value): |
|
402 | 402 | raise ValueError('loop_runner must be callable') |
|
403 | 403 | return proposal.value |
|
404 | 404 | |
|
405 | 405 | automagic = Bool(True, help= |
|
406 | 406 | """ |
|
407 | 407 | Enable magic commands to be called without the leading %. |
|
408 | 408 | """ |
|
409 | 409 | ).tag(config=True) |
|
410 | 410 | |
|
411 | 411 | banner1 = Unicode(default_banner, |
|
412 | 412 | help="""The part of the banner to be printed before the profile""" |
|
413 | 413 | ).tag(config=True) |
|
414 | 414 | banner2 = Unicode('', |
|
415 | 415 | help="""The part of the banner to be printed after the profile""" |
|
416 | 416 | ).tag(config=True) |
|
417 | 417 | |
|
418 | 418 | cache_size = Integer(1000, help= |
|
419 | 419 | """ |
|
420 | 420 | Set the size of the output cache. The default is 1000, you can |
|
421 | 421 | change it permanently in your config file. Setting it to 0 completely |
|
422 | 422 | disables the caching system, and the minimum value accepted is 3 (if |
|
423 | 423 | you provide a value less than 3, it is reset to 0 and a warning is |
|
424 | 424 | issued). This limit is defined because otherwise you'll spend more |
|
425 | 425 | time re-flushing a too small cache than working |
|
426 | 426 | """ |
|
427 | 427 | ).tag(config=True) |
|
428 | 428 | color_info = Bool(True, help= |
|
429 | 429 | """ |
|
430 | 430 | Use colors for displaying information about objects. Because this |
|
431 | 431 | information is passed through a pager (like 'less'), and some pagers |
|
432 | 432 | get confused with color codes, this capability can be turned off. |
|
433 | 433 | """ |
|
434 | 434 | ).tag(config=True) |
|
435 | 435 | colors = CaselessStrEnum(('Neutral', 'NoColor','LightBG','Linux'), |
|
436 | 436 | default_value='Neutral', |
|
437 | 437 | help="Set the color scheme (NoColor, Neutral, Linux, or LightBG)." |
|
438 | 438 | ).tag(config=True) |
|
439 | 439 | debug = Bool(False).tag(config=True) |
|
440 | 440 | disable_failing_post_execute = Bool(False, |
|
441 | 441 | help="Don't call post-execute functions that have failed in the past." |
|
442 | 442 | ).tag(config=True) |
|
443 | 443 | display_formatter = Instance(DisplayFormatter, allow_none=True) |
|
444 | 444 | displayhook_class = Type(DisplayHook) |
|
445 | 445 | display_pub_class = Type(DisplayPublisher) |
|
446 | 446 | compiler_class = Type(CachingCompiler) |
|
447 | 447 | |
|
448 | 448 | sphinxify_docstring = Bool(False, help= |
|
449 | 449 | """ |
|
450 | 450 | Enables rich html representation of docstrings. (This requires the |
|
451 | 451 | docrepr module). |
|
452 | 452 | """).tag(config=True) |
|
453 | 453 | |
|
454 | 454 | @observe("sphinxify_docstring") |
|
455 | 455 | def _sphinxify_docstring_changed(self, change): |
|
456 | 456 | if change['new']: |
|
457 | 457 | warn("`sphinxify_docstring` is provisional since IPython 5.0 and might change in future versions." , ProvisionalWarning) |
|
458 | 458 | |
|
459 | 459 | enable_html_pager = Bool(False, help= |
|
460 | 460 | """ |
|
461 | 461 | (Provisional API) enables html representation in mime bundles sent |
|
462 | 462 | to pagers. |
|
463 | 463 | """).tag(config=True) |
|
464 | 464 | |
|
465 | 465 | @observe("enable_html_pager") |
|
466 | 466 | def _enable_html_pager_changed(self, change): |
|
467 | 467 | if change['new']: |
|
468 | 468 | warn("`enable_html_pager` is provisional since IPython 5.0 and might change in future versions.", ProvisionalWarning) |
|
469 | 469 | |
|
470 | 470 | data_pub_class = None |
|
471 | 471 | |
|
472 | 472 | exit_now = Bool(False) |
|
473 | 473 | exiter = Instance(ExitAutocall) |
|
474 | 474 | @default('exiter') |
|
475 | 475 | def _exiter_default(self): |
|
476 | 476 | return ExitAutocall(self) |
|
477 | 477 | # Monotonically increasing execution counter |
|
478 | 478 | execution_count = Integer(1) |
|
479 | 479 | filename = Unicode("<ipython console>") |
|
480 | 480 | ipython_dir= Unicode('').tag(config=True) # Set to get_ipython_dir() in __init__ |
|
481 | 481 | |
|
482 | 482 | # Used to transform cells before running them, and check whether code is complete |
|
483 | 483 | input_transformer_manager = Instance('IPython.core.inputtransformer2.TransformerManager', |
|
484 | 484 | ()) |
|
485 | 485 | |
|
486 | 486 | @property |
|
487 | 487 | def input_transformers_cleanup(self): |
|
488 | 488 | return self.input_transformer_manager.cleanup_transforms |
|
489 | 489 | |
|
490 | 490 | input_transformers_post = List([], |
|
491 | 491 | help="A list of string input transformers, to be applied after IPython's " |
|
492 | 492 | "own input transformations." |
|
493 | 493 | ) |
|
494 | 494 | |
|
495 | 495 | @property |
|
496 | 496 | def input_splitter(self): |
|
497 | 497 | """Make this available for backward compatibility (pre-7.0 release) with existing code. |
|
498 | 498 | |
|
499 | 499 | For example, ipykernel ipykernel currently uses |
|
500 | 500 | `shell.input_splitter.check_complete` |
|
501 | 501 | """ |
|
502 | 502 | from warnings import warn |
|
503 | 503 | warn("`input_splitter` is deprecated since IPython 7.0, prefer `input_transformer_manager`.", |
|
504 | 504 | DeprecationWarning, stacklevel=2 |
|
505 | 505 | ) |
|
506 | 506 | return self.input_transformer_manager |
|
507 | 507 | |
|
508 | 508 | logstart = Bool(False, help= |
|
509 | 509 | """ |
|
510 | 510 | Start logging to the default log file in overwrite mode. |
|
511 | 511 | Use `logappend` to specify a log file to **append** logs to. |
|
512 | 512 | """ |
|
513 | 513 | ).tag(config=True) |
|
514 | 514 | logfile = Unicode('', help= |
|
515 | 515 | """ |
|
516 | 516 | The name of the logfile to use. |
|
517 | 517 | """ |
|
518 | 518 | ).tag(config=True) |
|
519 | 519 | logappend = Unicode('', help= |
|
520 | 520 | """ |
|
521 | 521 | Start logging to the given file in append mode. |
|
522 | 522 | Use `logfile` to specify a log file to **overwrite** logs to. |
|
523 | 523 | """ |
|
524 | 524 | ).tag(config=True) |
|
525 | 525 | object_info_string_level = Enum((0,1,2), default_value=0, |
|
526 | 526 | ).tag(config=True) |
|
527 | 527 | pdb = Bool(False, help= |
|
528 | 528 | """ |
|
529 | 529 | Automatically call the pdb debugger after every exception. |
|
530 | 530 | """ |
|
531 | 531 | ).tag(config=True) |
|
532 | 532 | display_page = Bool(False, |
|
533 | 533 | help="""If True, anything that would be passed to the pager |
|
534 | 534 | will be displayed as regular output instead.""" |
|
535 | 535 | ).tag(config=True) |
|
536 | 536 | |
|
537 | 537 | # deprecated prompt traits: |
|
538 | 538 | |
|
539 | 539 | prompt_in1 = Unicode('In [\\#]: ', |
|
540 | 540 | help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." |
|
541 | 541 | ).tag(config=True) |
|
542 | 542 | prompt_in2 = Unicode(' .\\D.: ', |
|
543 | 543 | help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." |
|
544 | 544 | ).tag(config=True) |
|
545 | 545 | prompt_out = Unicode('Out[\\#]: ', |
|
546 | 546 | help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." |
|
547 | 547 | ).tag(config=True) |
|
548 | 548 | prompts_pad_left = Bool(True, |
|
549 | 549 | help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." |
|
550 | 550 | ).tag(config=True) |
|
551 | 551 | |
|
552 | 552 | @observe('prompt_in1', 'prompt_in2', 'prompt_out', 'prompt_pad_left') |
|
553 | 553 | def _prompt_trait_changed(self, change): |
|
554 | 554 | name = change['name'] |
|
555 | 555 | warn("InteractiveShell.{name} is deprecated since IPython 4.0" |
|
556 | 556 | " and ignored since 5.0, set TerminalInteractiveShell.prompts" |
|
557 | 557 | " object directly.".format(name=name)) |
|
558 | 558 | |
|
559 | 559 | # protect against weird cases where self.config may not exist: |
|
560 | 560 | |
|
561 | 561 | show_rewritten_input = Bool(True, |
|
562 | 562 | help="Show rewritten input, e.g. for autocall." |
|
563 | 563 | ).tag(config=True) |
|
564 | 564 | |
|
565 | 565 | quiet = Bool(False).tag(config=True) |
|
566 | 566 | |
|
567 | 567 | history_length = Integer(10000, |
|
568 | 568 | help='Total length of command history' |
|
569 | 569 | ).tag(config=True) |
|
570 | 570 | |
|
571 | 571 | history_load_length = Integer(1000, help= |
|
572 | 572 | """ |
|
573 | 573 | The number of saved history entries to be loaded |
|
574 | 574 | into the history buffer at startup. |
|
575 | 575 | """ |
|
576 | 576 | ).tag(config=True) |
|
577 | 577 | |
|
578 | 578 | ast_node_interactivity = Enum(['all', 'last', 'last_expr', 'none', 'last_expr_or_assign'], |
|
579 | 579 | default_value='last_expr', |
|
580 | 580 | help=""" |
|
581 | 581 | 'all', 'last', 'last_expr' or 'none', 'last_expr_or_assign' specifying |
|
582 | 582 | which nodes should be run interactively (displaying output from expressions). |
|
583 | 583 | """ |
|
584 | 584 | ).tag(config=True) |
|
585 | 585 | |
|
586 | 586 | # TODO: this part of prompt management should be moved to the frontends. |
|
587 | 587 | # Use custom TraitTypes that convert '0'->'' and '\\n'->'\n' |
|
588 | 588 | separate_in = SeparateUnicode('\n').tag(config=True) |
|
589 | 589 | separate_out = SeparateUnicode('').tag(config=True) |
|
590 | 590 | separate_out2 = SeparateUnicode('').tag(config=True) |
|
591 | 591 | wildcards_case_sensitive = Bool(True).tag(config=True) |
|
592 | 592 | xmode = CaselessStrEnum(('Context', 'Plain', 'Verbose', 'Minimal'), |
|
593 | 593 | default_value='Context', |
|
594 | 594 | help="Switch modes for the IPython exception handlers." |
|
595 | 595 | ).tag(config=True) |
|
596 | 596 | |
|
597 | 597 | # Subcomponents of InteractiveShell |
|
598 | 598 | alias_manager = Instance('IPython.core.alias.AliasManager', allow_none=True) |
|
599 | 599 | prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True) |
|
600 | 600 | builtin_trap = Instance('IPython.core.builtin_trap.BuiltinTrap', allow_none=True) |
|
601 | 601 | display_trap = Instance('IPython.core.display_trap.DisplayTrap', allow_none=True) |
|
602 | 602 | extension_manager = Instance('IPython.core.extensions.ExtensionManager', allow_none=True) |
|
603 | 603 | payload_manager = Instance('IPython.core.payload.PayloadManager', allow_none=True) |
|
604 | 604 | history_manager = Instance('IPython.core.history.HistoryAccessorBase', allow_none=True) |
|
605 | 605 | magics_manager = Instance('IPython.core.magic.MagicsManager', allow_none=True) |
|
606 | 606 | |
|
607 | 607 | profile_dir = Instance('IPython.core.application.ProfileDir', allow_none=True) |
|
608 | 608 | @property |
|
609 | 609 | def profile(self): |
|
610 | 610 | if self.profile_dir is not None: |
|
611 | 611 | name = os.path.basename(self.profile_dir.location) |
|
612 | 612 | return name.replace('profile_','') |
|
613 | 613 | |
|
614 | 614 | |
|
615 | 615 | # Private interface |
|
616 | 616 | _post_execute = Dict() |
|
617 | 617 | |
|
618 | 618 | # Tracks any GUI loop loaded for pylab |
|
619 | 619 | pylab_gui_select = None |
|
620 | 620 | |
|
621 | 621 | last_execution_succeeded = Bool(True, help='Did last executed command succeeded') |
|
622 | 622 | |
|
623 | 623 | last_execution_result = Instance('IPython.core.interactiveshell.ExecutionResult', help='Result of executing the last command', allow_none=True) |
|
624 | 624 | |
|
625 | 625 | def __init__(self, ipython_dir=None, profile_dir=None, |
|
626 | 626 | user_module=None, user_ns=None, |
|
627 | 627 | custom_exceptions=((), None), **kwargs): |
|
628 | 628 | |
|
629 | 629 | # This is where traits with a config_key argument are updated |
|
630 | 630 | # from the values on config. |
|
631 | 631 | super(InteractiveShell, self).__init__(**kwargs) |
|
632 | 632 | if 'PromptManager' in self.config: |
|
633 | 633 | warn('As of IPython 5.0 `PromptManager` config will have no effect' |
|
634 | 634 | ' and has been replaced by TerminalInteractiveShell.prompts_class') |
|
635 | 635 | self.configurables = [self] |
|
636 | 636 | |
|
637 | 637 | # These are relatively independent and stateless |
|
638 | 638 | self.init_ipython_dir(ipython_dir) |
|
639 | 639 | self.init_profile_dir(profile_dir) |
|
640 | 640 | self.init_instance_attrs() |
|
641 | 641 | self.init_environment() |
|
642 | 642 | |
|
643 | 643 | # Check if we're in a virtualenv, and set up sys.path. |
|
644 | 644 | self.init_virtualenv() |
|
645 | 645 | |
|
646 | 646 | # Create namespaces (user_ns, user_global_ns, etc.) |
|
647 | 647 | self.init_create_namespaces(user_module, user_ns) |
|
648 | 648 | # This has to be done after init_create_namespaces because it uses |
|
649 | 649 | # something in self.user_ns, but before init_sys_modules, which |
|
650 | 650 | # is the first thing to modify sys. |
|
651 | 651 | # TODO: When we override sys.stdout and sys.stderr before this class |
|
652 | 652 | # is created, we are saving the overridden ones here. Not sure if this |
|
653 | 653 | # is what we want to do. |
|
654 | 654 | self.save_sys_module_state() |
|
655 | 655 | self.init_sys_modules() |
|
656 | 656 | |
|
657 | 657 | # While we're trying to have each part of the code directly access what |
|
658 | 658 | # it needs without keeping redundant references to objects, we have too |
|
659 | 659 | # much legacy code that expects ip.db to exist. |
|
660 | 660 | self.db = PickleShareDB(os.path.join(self.profile_dir.location, 'db')) |
|
661 | 661 | |
|
662 | 662 | self.init_history() |
|
663 | 663 | self.init_encoding() |
|
664 | 664 | self.init_prefilter() |
|
665 | 665 | |
|
666 | 666 | self.init_syntax_highlighting() |
|
667 | 667 | self.init_hooks() |
|
668 | 668 | self.init_events() |
|
669 | 669 | self.init_pushd_popd_magic() |
|
670 | 670 | self.init_user_ns() |
|
671 | 671 | self.init_logger() |
|
672 | 672 | self.init_builtins() |
|
673 | 673 | |
|
674 | 674 | # The following was in post_config_initialization |
|
675 | 675 | self.init_inspector() |
|
676 | 676 | self.raw_input_original = input |
|
677 | 677 | self.init_completer() |
|
678 | 678 | # TODO: init_io() needs to happen before init_traceback handlers |
|
679 | 679 | # because the traceback handlers hardcode the stdout/stderr streams. |
|
680 | 680 | # This logic in in debugger.Pdb and should eventually be changed. |
|
681 | 681 | self.init_io() |
|
682 | 682 | self.init_traceback_handlers(custom_exceptions) |
|
683 | 683 | self.init_prompts() |
|
684 | 684 | self.init_display_formatter() |
|
685 | 685 | self.init_display_pub() |
|
686 | 686 | self.init_data_pub() |
|
687 | 687 | self.init_displayhook() |
|
688 | 688 | self.init_magics() |
|
689 | 689 | self.init_alias() |
|
690 | 690 | self.init_logstart() |
|
691 | 691 | self.init_pdb() |
|
692 | 692 | self.init_extension_manager() |
|
693 | 693 | self.init_payload() |
|
694 | 694 | self.init_deprecation_warnings() |
|
695 | 695 | self.hooks.late_startup_hook() |
|
696 | 696 | self.events.trigger('shell_initialized', self) |
|
697 | 697 | atexit.register(self.atexit_operations) |
|
698 | 698 | |
|
699 | 699 | # The trio runner is used for running Trio in the foreground thread. It |
|
700 | 700 | # is different from `_trio_runner(async_fn)` in `async_helpers.py` |
|
701 | 701 | # which calls `trio.run()` for every cell. This runner runs all cells |
|
702 | 702 | # inside a single Trio event loop. If used, it is set from |
|
703 | 703 | # `ipykernel.kernelapp`. |
|
704 | 704 | self.trio_runner = None |
|
705 | 705 | |
|
706 | 706 | def get_ipython(self): |
|
707 | 707 | """Return the currently running IPython instance.""" |
|
708 | 708 | return self |
|
709 | 709 | |
|
710 | 710 | #------------------------------------------------------------------------- |
|
711 | 711 | # Trait changed handlers |
|
712 | 712 | #------------------------------------------------------------------------- |
|
713 | 713 | @observe('ipython_dir') |
|
714 | 714 | def _ipython_dir_changed(self, change): |
|
715 | 715 | ensure_dir_exists(change['new']) |
|
716 | 716 | |
|
717 | 717 | def set_autoindent(self,value=None): |
|
718 | 718 | """Set the autoindent flag. |
|
719 | 719 | |
|
720 | 720 | If called with no arguments, it acts as a toggle.""" |
|
721 | 721 | if value is None: |
|
722 | 722 | self.autoindent = not self.autoindent |
|
723 | 723 | else: |
|
724 | 724 | self.autoindent = value |
|
725 | 725 | |
|
726 | 726 | def set_trio_runner(self, tr): |
|
727 | 727 | self.trio_runner = tr |
|
728 | 728 | |
|
729 | 729 | #------------------------------------------------------------------------- |
|
730 | 730 | # init_* methods called by __init__ |
|
731 | 731 | #------------------------------------------------------------------------- |
|
732 | 732 | |
|
733 | 733 | def init_ipython_dir(self, ipython_dir): |
|
734 | 734 | if ipython_dir is not None: |
|
735 | 735 | self.ipython_dir = ipython_dir |
|
736 | 736 | return |
|
737 | 737 | |
|
738 | 738 | self.ipython_dir = get_ipython_dir() |
|
739 | 739 | |
|
740 | 740 | def init_profile_dir(self, profile_dir): |
|
741 | 741 | if profile_dir is not None: |
|
742 | 742 | self.profile_dir = profile_dir |
|
743 | 743 | return |
|
744 | 744 | self.profile_dir = ProfileDir.create_profile_dir_by_name( |
|
745 | 745 | self.ipython_dir, "default" |
|
746 | 746 | ) |
|
747 | 747 | |
|
748 | 748 | def init_instance_attrs(self): |
|
749 | 749 | self.more = False |
|
750 | 750 | |
|
751 | 751 | # command compiler |
|
752 | 752 | self.compile = self.compiler_class() |
|
753 | 753 | |
|
754 | 754 | # Make an empty namespace, which extension writers can rely on both |
|
755 | 755 | # existing and NEVER being used by ipython itself. This gives them a |
|
756 | 756 | # convenient location for storing additional information and state |
|
757 | 757 | # their extensions may require, without fear of collisions with other |
|
758 | 758 | # ipython names that may develop later. |
|
759 | 759 | self.meta = Struct() |
|
760 | 760 | |
|
761 | 761 | # Temporary files used for various purposes. Deleted at exit. |
|
762 | 762 | # The files here are stored with Path from Pathlib |
|
763 | 763 | self.tempfiles = [] |
|
764 | 764 | self.tempdirs = [] |
|
765 | 765 | |
|
766 | 766 | # keep track of where we started running (mainly for crash post-mortem) |
|
767 | 767 | # This is not being used anywhere currently. |
|
768 | 768 | self.starting_dir = os.getcwd() |
|
769 | 769 | |
|
770 | 770 | # Indentation management |
|
771 | 771 | self.indent_current_nsp = 0 |
|
772 | 772 | |
|
773 | 773 | # Dict to track post-execution functions that have been registered |
|
774 | 774 | self._post_execute = {} |
|
775 | 775 | |
|
776 | 776 | def init_environment(self): |
|
777 | 777 | """Any changes we need to make to the user's environment.""" |
|
778 | 778 | pass |
|
779 | 779 | |
|
780 | 780 | def init_encoding(self): |
|
781 | 781 | # Get system encoding at startup time. Certain terminals (like Emacs |
|
782 | 782 | # under Win32 have it set to None, and we need to have a known valid |
|
783 | 783 | # encoding to use in the raw_input() method |
|
784 | 784 | try: |
|
785 | 785 | self.stdin_encoding = sys.stdin.encoding or 'ascii' |
|
786 | 786 | except AttributeError: |
|
787 | 787 | self.stdin_encoding = 'ascii' |
|
788 | 788 | |
|
789 | 789 | |
|
790 | 790 | @observe('colors') |
|
791 | 791 | def init_syntax_highlighting(self, changes=None): |
|
792 | 792 | # Python source parser/formatter for syntax highlighting |
|
793 | 793 | pyformat = PyColorize.Parser(style=self.colors, parent=self).format |
|
794 | 794 | self.pycolorize = lambda src: pyformat(src,'str') |
|
795 | 795 | |
|
796 | 796 | def refresh_style(self): |
|
797 | 797 | # No-op here, used in subclass |
|
798 | 798 | pass |
|
799 | 799 | |
|
800 | 800 | def init_pushd_popd_magic(self): |
|
801 | 801 | # for pushd/popd management |
|
802 | 802 | self.home_dir = get_home_dir() |
|
803 | 803 | |
|
804 | 804 | self.dir_stack = [] |
|
805 | 805 | |
|
806 | 806 | def init_logger(self): |
|
807 | 807 | self.logger = Logger(self.home_dir, logfname='ipython_log.py', |
|
808 | 808 | logmode='rotate') |
|
809 | 809 | |
|
810 | 810 | def init_logstart(self): |
|
811 | 811 | """Initialize logging in case it was requested at the command line. |
|
812 | 812 | """ |
|
813 | 813 | if self.logappend: |
|
814 | 814 | self.magic('logstart %s append' % self.logappend) |
|
815 | 815 | elif self.logfile: |
|
816 | 816 | self.magic('logstart %s' % self.logfile) |
|
817 | 817 | elif self.logstart: |
|
818 | 818 | self.magic('logstart') |
|
819 | 819 | |
|
820 | 820 | def init_deprecation_warnings(self): |
|
821 | 821 | """ |
|
822 | 822 | register default filter for deprecation warning. |
|
823 | 823 | |
|
824 | 824 | This will allow deprecation warning of function used interactively to show |
|
825 | 825 | warning to users, and still hide deprecation warning from libraries import. |
|
826 | 826 | """ |
|
827 | 827 | if sys.version_info < (3,7): |
|
828 | 828 | warnings.filterwarnings("default", category=DeprecationWarning, module=self.user_ns.get("__name__")) |
|
829 | 829 | |
|
830 | 830 | |
|
831 | 831 | def init_builtins(self): |
|
832 | 832 | # A single, static flag that we set to True. Its presence indicates |
|
833 | 833 | # that an IPython shell has been created, and we make no attempts at |
|
834 | 834 | # removing on exit or representing the existence of more than one |
|
835 | 835 | # IPython at a time. |
|
836 | 836 | builtin_mod.__dict__['__IPYTHON__'] = True |
|
837 | 837 | builtin_mod.__dict__['display'] = display |
|
838 | 838 | |
|
839 | 839 | self.builtin_trap = BuiltinTrap(shell=self) |
|
840 | 840 | |
|
841 | 841 | @observe('colors') |
|
842 | 842 | def init_inspector(self, changes=None): |
|
843 | 843 | # Object inspector |
|
844 | 844 | self.inspector = oinspect.Inspector(oinspect.InspectColors, |
|
845 | 845 | PyColorize.ANSICodeColors, |
|
846 | 846 | self.colors, |
|
847 | 847 | self.object_info_string_level) |
|
848 | 848 | |
|
849 | 849 | def init_io(self): |
|
850 | 850 | # This will just use sys.stdout and sys.stderr. If you want to |
|
851 | 851 | # override sys.stdout and sys.stderr themselves, you need to do that |
|
852 | 852 | # *before* instantiating this class, because io holds onto |
|
853 | 853 | # references to the underlying streams. |
|
854 | 854 | # io.std* are deprecated, but don't show our own deprecation warnings |
|
855 | 855 | # during initialization of the deprecated API. |
|
856 | 856 | with warnings.catch_warnings(): |
|
857 | 857 | warnings.simplefilter('ignore', DeprecationWarning) |
|
858 | 858 | io.stdout = io.IOStream(sys.stdout) |
|
859 | 859 | io.stderr = io.IOStream(sys.stderr) |
|
860 | 860 | |
|
861 | 861 | def init_prompts(self): |
|
862 | 862 | # Set system prompts, so that scripts can decide if they are running |
|
863 | 863 | # interactively. |
|
864 | 864 | sys.ps1 = 'In : ' |
|
865 | 865 | sys.ps2 = '...: ' |
|
866 | 866 | sys.ps3 = 'Out: ' |
|
867 | 867 | |
|
868 | 868 | def init_display_formatter(self): |
|
869 | 869 | self.display_formatter = DisplayFormatter(parent=self) |
|
870 | 870 | self.configurables.append(self.display_formatter) |
|
871 | 871 | |
|
872 | 872 | def init_display_pub(self): |
|
873 | 873 | self.display_pub = self.display_pub_class(parent=self, shell=self) |
|
874 | 874 | self.configurables.append(self.display_pub) |
|
875 | 875 | |
|
876 | 876 | def init_data_pub(self): |
|
877 | 877 | if not self.data_pub_class: |
|
878 | 878 | self.data_pub = None |
|
879 | 879 | return |
|
880 | 880 | self.data_pub = self.data_pub_class(parent=self) |
|
881 | 881 | self.configurables.append(self.data_pub) |
|
882 | 882 | |
|
883 | 883 | def init_displayhook(self): |
|
884 | 884 | # Initialize displayhook, set in/out prompts and printing system |
|
885 | 885 | self.displayhook = self.displayhook_class( |
|
886 | 886 | parent=self, |
|
887 | 887 | shell=self, |
|
888 | 888 | cache_size=self.cache_size, |
|
889 | 889 | ) |
|
890 | 890 | self.configurables.append(self.displayhook) |
|
891 | 891 | # This is a context manager that installs/revmoes the displayhook at |
|
892 | 892 | # the appropriate time. |
|
893 | 893 | self.display_trap = DisplayTrap(hook=self.displayhook) |
|
894 | 894 | |
|
895 | 895 | def init_virtualenv(self): |
|
896 | 896 | """Add the current virtualenv to sys.path so the user can import modules from it. |
|
897 | 897 | This isn't perfect: it doesn't use the Python interpreter with which the |
|
898 | 898 | virtualenv was built, and it ignores the --no-site-packages option. A |
|
899 | 899 | warning will appear suggesting the user installs IPython in the |
|
900 | 900 | virtualenv, but for many cases, it probably works well enough. |
|
901 | 901 | |
|
902 | 902 | Adapted from code snippets online. |
|
903 | 903 | |
|
904 | 904 | http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv |
|
905 | 905 | """ |
|
906 | 906 | if 'VIRTUAL_ENV' not in os.environ: |
|
907 | 907 | # Not in a virtualenv |
|
908 | 908 | return |
|
909 | 909 | elif os.environ["VIRTUAL_ENV"] == "": |
|
910 | 910 | warn("Virtual env path set to '', please check if this is intended.") |
|
911 | 911 | return |
|
912 | 912 | |
|
913 | 913 | p = Path(sys.executable) |
|
914 | 914 | p_venv = Path(os.environ["VIRTUAL_ENV"]) |
|
915 | 915 | |
|
916 | 916 | # fallback venv detection: |
|
917 | 917 | # stdlib venv may symlink sys.executable, so we can't use realpath. |
|
918 | 918 | # but others can symlink *to* the venv Python, so we can't just use sys.executable. |
|
919 | 919 | # So we just check every item in the symlink tree (generally <= 3) |
|
920 | 920 | paths = [p] |
|
921 | 921 | while p.is_symlink(): |
|
922 | 922 | p = Path(os.readlink(p)) |
|
923 | 923 | paths.append(p.resolve()) |
|
924 | 924 | |
|
925 | 925 | # In Cygwin paths like "c:\..." and '\cygdrive\c\...' are possible |
|
926 |
if |
|
|
927 | p_venv = Path(str(p_venv)[11:]) | |
|
928 | elif len(str(p_venv)) >= 2 and str(p_venv)[1] == ":": | |
|
929 | p_venv = Path(str(p_venv)[2:]) | |
|
926 | if p_venv.parts[1] == "cygdrive": | |
|
927 | drive_name = p_venv.parts[2] | |
|
928 | p_venv = (drive_name + ":/") / Path(*p_venv.parts[3:]) | |
|
930 | 929 | |
|
931 |
if any( |
|
|
930 | if any(p_venv == p.parents[1] for p in paths): | |
|
932 | 931 | # Our exe is inside or has access to the virtualenv, don't need to do anything. |
|
933 | 932 | return |
|
934 | 933 | |
|
935 | 934 | if sys.platform == "win32": |
|
936 | 935 | virtual_env = Path(os.environ["VIRTUAL_ENV"], "Lib", "site-packages") |
|
937 | 936 | else: |
|
938 | 937 | virtual_env_path = Path( |
|
939 | 938 | os.environ["VIRTUAL_ENV"], "lib", "python{}.{}", "site-packages" |
|
940 | 939 | ) |
|
941 | 940 | p_ver = sys.version_info[:2] |
|
942 | 941 | |
|
943 | 942 | # Predict version from py[thon]-x.x in the $VIRTUAL_ENV |
|
944 | 943 | re_m = re.search(r"\bpy(?:thon)?([23])\.(\d+)\b", os.environ["VIRTUAL_ENV"]) |
|
945 | 944 | if re_m: |
|
946 | 945 | predicted_path = Path(str(virtual_env_path).format(*re_m.groups())) |
|
947 | 946 | if predicted_path.exists(): |
|
948 | 947 | p_ver = re_m.groups() |
|
949 | 948 | |
|
950 | 949 | virtual_env = str(virtual_env_path).format(*p_ver) |
|
951 | 950 | |
|
952 | 951 | warn( |
|
953 | 952 | "Attempting to work in a virtualenv. If you encounter problems, " |
|
954 | 953 | "please install IPython inside the virtualenv." |
|
955 | 954 | ) |
|
956 | 955 | import site |
|
957 | 956 | sys.path.insert(0, virtual_env) |
|
958 | 957 | site.addsitedir(virtual_env) |
|
959 | 958 | |
|
960 | 959 | #------------------------------------------------------------------------- |
|
961 | 960 | # Things related to injections into the sys module |
|
962 | 961 | #------------------------------------------------------------------------- |
|
963 | 962 | |
|
964 | 963 | def save_sys_module_state(self): |
|
965 | 964 | """Save the state of hooks in the sys module. |
|
966 | 965 | |
|
967 | 966 | This has to be called after self.user_module is created. |
|
968 | 967 | """ |
|
969 | 968 | self._orig_sys_module_state = {'stdin': sys.stdin, |
|
970 | 969 | 'stdout': sys.stdout, |
|
971 | 970 | 'stderr': sys.stderr, |
|
972 | 971 | 'excepthook': sys.excepthook} |
|
973 | 972 | self._orig_sys_modules_main_name = self.user_module.__name__ |
|
974 | 973 | self._orig_sys_modules_main_mod = sys.modules.get(self.user_module.__name__) |
|
975 | 974 | |
|
976 | 975 | def restore_sys_module_state(self): |
|
977 | 976 | """Restore the state of the sys module.""" |
|
978 | 977 | try: |
|
979 | 978 | for k, v in self._orig_sys_module_state.items(): |
|
980 | 979 | setattr(sys, k, v) |
|
981 | 980 | except AttributeError: |
|
982 | 981 | pass |
|
983 | 982 | # Reset what what done in self.init_sys_modules |
|
984 | 983 | if self._orig_sys_modules_main_mod is not None: |
|
985 | 984 | sys.modules[self._orig_sys_modules_main_name] = self._orig_sys_modules_main_mod |
|
986 | 985 | |
|
987 | 986 | #------------------------------------------------------------------------- |
|
988 | 987 | # Things related to the banner |
|
989 | 988 | #------------------------------------------------------------------------- |
|
990 | 989 | |
|
991 | 990 | @property |
|
992 | 991 | def banner(self): |
|
993 | 992 | banner = self.banner1 |
|
994 | 993 | if self.profile and self.profile != 'default': |
|
995 | 994 | banner += '\nIPython profile: %s\n' % self.profile |
|
996 | 995 | if self.banner2: |
|
997 | 996 | banner += '\n' + self.banner2 |
|
998 | 997 | return banner |
|
999 | 998 | |
|
1000 | 999 | def show_banner(self, banner=None): |
|
1001 | 1000 | if banner is None: |
|
1002 | 1001 | banner = self.banner |
|
1003 | 1002 | sys.stdout.write(banner) |
|
1004 | 1003 | |
|
1005 | 1004 | #------------------------------------------------------------------------- |
|
1006 | 1005 | # Things related to hooks |
|
1007 | 1006 | #------------------------------------------------------------------------- |
|
1008 | 1007 | |
|
1009 | 1008 | def init_hooks(self): |
|
1010 | 1009 | # hooks holds pointers used for user-side customizations |
|
1011 | 1010 | self.hooks = Struct() |
|
1012 | 1011 | |
|
1013 | 1012 | self.strdispatchers = {} |
|
1014 | 1013 | |
|
1015 | 1014 | # Set all default hooks, defined in the IPython.hooks module. |
|
1016 | 1015 | hooks = IPython.core.hooks |
|
1017 | 1016 | for hook_name in hooks.__all__: |
|
1018 | 1017 | # default hooks have priority 100, i.e. low; user hooks should have |
|
1019 | 1018 | # 0-100 priority |
|
1020 | 1019 | self.set_hook(hook_name,getattr(hooks,hook_name), 100, _warn_deprecated=False) |
|
1021 | 1020 | |
|
1022 | 1021 | if self.display_page: |
|
1023 | 1022 | self.set_hook('show_in_pager', page.as_hook(page.display_page), 90) |
|
1024 | 1023 | |
|
1025 | 1024 | def set_hook(self,name,hook, priority=50, str_key=None, re_key=None, |
|
1026 | 1025 | _warn_deprecated=True): |
|
1027 | 1026 | """set_hook(name,hook) -> sets an internal IPython hook. |
|
1028 | 1027 | |
|
1029 | 1028 | IPython exposes some of its internal API as user-modifiable hooks. By |
|
1030 | 1029 | adding your function to one of these hooks, you can modify IPython's |
|
1031 | 1030 | behavior to call at runtime your own routines.""" |
|
1032 | 1031 | |
|
1033 | 1032 | # At some point in the future, this should validate the hook before it |
|
1034 | 1033 | # accepts it. Probably at least check that the hook takes the number |
|
1035 | 1034 | # of args it's supposed to. |
|
1036 | 1035 | |
|
1037 | 1036 | f = types.MethodType(hook,self) |
|
1038 | 1037 | |
|
1039 | 1038 | # check if the hook is for strdispatcher first |
|
1040 | 1039 | if str_key is not None: |
|
1041 | 1040 | sdp = self.strdispatchers.get(name, StrDispatch()) |
|
1042 | 1041 | sdp.add_s(str_key, f, priority ) |
|
1043 | 1042 | self.strdispatchers[name] = sdp |
|
1044 | 1043 | return |
|
1045 | 1044 | if re_key is not None: |
|
1046 | 1045 | sdp = self.strdispatchers.get(name, StrDispatch()) |
|
1047 | 1046 | sdp.add_re(re.compile(re_key), f, priority ) |
|
1048 | 1047 | self.strdispatchers[name] = sdp |
|
1049 | 1048 | return |
|
1050 | 1049 | |
|
1051 | 1050 | dp = getattr(self.hooks, name, None) |
|
1052 | 1051 | if name not in IPython.core.hooks.__all__: |
|
1053 | 1052 | print("Warning! Hook '%s' is not one of %s" % \ |
|
1054 | 1053 | (name, IPython.core.hooks.__all__ )) |
|
1055 | 1054 | |
|
1056 | 1055 | if _warn_deprecated and (name in IPython.core.hooks.deprecated): |
|
1057 | 1056 | alternative = IPython.core.hooks.deprecated[name] |
|
1058 | 1057 | warn("Hook {} is deprecated. Use {} instead.".format(name, alternative), stacklevel=2) |
|
1059 | 1058 | |
|
1060 | 1059 | if not dp: |
|
1061 | 1060 | dp = IPython.core.hooks.CommandChainDispatcher() |
|
1062 | 1061 | |
|
1063 | 1062 | try: |
|
1064 | 1063 | dp.add(f,priority) |
|
1065 | 1064 | except AttributeError: |
|
1066 | 1065 | # it was not commandchain, plain old func - replace |
|
1067 | 1066 | dp = f |
|
1068 | 1067 | |
|
1069 | 1068 | setattr(self.hooks,name, dp) |
|
1070 | 1069 | |
|
1071 | 1070 | #------------------------------------------------------------------------- |
|
1072 | 1071 | # Things related to events |
|
1073 | 1072 | #------------------------------------------------------------------------- |
|
1074 | 1073 | |
|
1075 | 1074 | def init_events(self): |
|
1076 | 1075 | self.events = EventManager(self, available_events) |
|
1077 | 1076 | |
|
1078 | 1077 | self.events.register("pre_execute", self._clear_warning_registry) |
|
1079 | 1078 | |
|
1080 | 1079 | def register_post_execute(self, func): |
|
1081 | 1080 | """DEPRECATED: Use ip.events.register('post_run_cell', func) |
|
1082 | 1081 | |
|
1083 | 1082 | Register a function for calling after code execution. |
|
1084 | 1083 | """ |
|
1085 | 1084 | warn("ip.register_post_execute is deprecated, use " |
|
1086 | 1085 | "ip.events.register('post_run_cell', func) instead.", stacklevel=2) |
|
1087 | 1086 | self.events.register('post_run_cell', func) |
|
1088 | 1087 | |
|
1089 | 1088 | def _clear_warning_registry(self): |
|
1090 | 1089 | # clear the warning registry, so that different code blocks with |
|
1091 | 1090 | # overlapping line number ranges don't cause spurious suppression of |
|
1092 | 1091 | # warnings (see gh-6611 for details) |
|
1093 | 1092 | if "__warningregistry__" in self.user_global_ns: |
|
1094 | 1093 | del self.user_global_ns["__warningregistry__"] |
|
1095 | 1094 | |
|
1096 | 1095 | #------------------------------------------------------------------------- |
|
1097 | 1096 | # Things related to the "main" module |
|
1098 | 1097 | #------------------------------------------------------------------------- |
|
1099 | 1098 | |
|
1100 | 1099 | def new_main_mod(self, filename, modname): |
|
1101 | 1100 | """Return a new 'main' module object for user code execution. |
|
1102 | 1101 | |
|
1103 | 1102 | ``filename`` should be the path of the script which will be run in the |
|
1104 | 1103 | module. Requests with the same filename will get the same module, with |
|
1105 | 1104 | its namespace cleared. |
|
1106 | 1105 | |
|
1107 | 1106 | ``modname`` should be the module name - normally either '__main__' or |
|
1108 | 1107 | the basename of the file without the extension. |
|
1109 | 1108 | |
|
1110 | 1109 | When scripts are executed via %run, we must keep a reference to their |
|
1111 | 1110 | __main__ module around so that Python doesn't |
|
1112 | 1111 | clear it, rendering references to module globals useless. |
|
1113 | 1112 | |
|
1114 | 1113 | This method keeps said reference in a private dict, keyed by the |
|
1115 | 1114 | absolute path of the script. This way, for multiple executions of the |
|
1116 | 1115 | same script we only keep one copy of the namespace (the last one), |
|
1117 | 1116 | thus preventing memory leaks from old references while allowing the |
|
1118 | 1117 | objects from the last execution to be accessible. |
|
1119 | 1118 | """ |
|
1120 | 1119 | filename = os.path.abspath(filename) |
|
1121 | 1120 | try: |
|
1122 | 1121 | main_mod = self._main_mod_cache[filename] |
|
1123 | 1122 | except KeyError: |
|
1124 | 1123 | main_mod = self._main_mod_cache[filename] = types.ModuleType( |
|
1125 | 1124 | modname, |
|
1126 | 1125 | doc="Module created for script run in IPython") |
|
1127 | 1126 | else: |
|
1128 | 1127 | main_mod.__dict__.clear() |
|
1129 | 1128 | main_mod.__name__ = modname |
|
1130 | 1129 | |
|
1131 | 1130 | main_mod.__file__ = filename |
|
1132 | 1131 | # It seems pydoc (and perhaps others) needs any module instance to |
|
1133 | 1132 | # implement a __nonzero__ method |
|
1134 | 1133 | main_mod.__nonzero__ = lambda : True |
|
1135 | 1134 | |
|
1136 | 1135 | return main_mod |
|
1137 | 1136 | |
|
1138 | 1137 | def clear_main_mod_cache(self): |
|
1139 | 1138 | """Clear the cache of main modules. |
|
1140 | 1139 | |
|
1141 | 1140 | Mainly for use by utilities like %reset. |
|
1142 | 1141 | |
|
1143 | 1142 | Examples |
|
1144 | 1143 | -------- |
|
1145 | 1144 | In [15]: import IPython |
|
1146 | 1145 | |
|
1147 | 1146 | In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython') |
|
1148 | 1147 | |
|
1149 | 1148 | In [17]: len(_ip._main_mod_cache) > 0 |
|
1150 | 1149 | Out[17]: True |
|
1151 | 1150 | |
|
1152 | 1151 | In [18]: _ip.clear_main_mod_cache() |
|
1153 | 1152 | |
|
1154 | 1153 | In [19]: len(_ip._main_mod_cache) == 0 |
|
1155 | 1154 | Out[19]: True |
|
1156 | 1155 | """ |
|
1157 | 1156 | self._main_mod_cache.clear() |
|
1158 | 1157 | |
|
1159 | 1158 | #------------------------------------------------------------------------- |
|
1160 | 1159 | # Things related to debugging |
|
1161 | 1160 | #------------------------------------------------------------------------- |
|
1162 | 1161 | |
|
1163 | 1162 | def init_pdb(self): |
|
1164 | 1163 | # Set calling of pdb on exceptions |
|
1165 | 1164 | # self.call_pdb is a property |
|
1166 | 1165 | self.call_pdb = self.pdb |
|
1167 | 1166 | |
|
1168 | 1167 | def _get_call_pdb(self): |
|
1169 | 1168 | return self._call_pdb |
|
1170 | 1169 | |
|
1171 | 1170 | def _set_call_pdb(self,val): |
|
1172 | 1171 | |
|
1173 | 1172 | if val not in (0,1,False,True): |
|
1174 | 1173 | raise ValueError('new call_pdb value must be boolean') |
|
1175 | 1174 | |
|
1176 | 1175 | # store value in instance |
|
1177 | 1176 | self._call_pdb = val |
|
1178 | 1177 | |
|
1179 | 1178 | # notify the actual exception handlers |
|
1180 | 1179 | self.InteractiveTB.call_pdb = val |
|
1181 | 1180 | |
|
1182 | 1181 | call_pdb = property(_get_call_pdb,_set_call_pdb,None, |
|
1183 | 1182 | 'Control auto-activation of pdb at exceptions') |
|
1184 | 1183 | |
|
1185 | 1184 | def debugger(self,force=False): |
|
1186 | 1185 | """Call the pdb debugger. |
|
1187 | 1186 | |
|
1188 | 1187 | Keywords: |
|
1189 | 1188 | |
|
1190 | 1189 | - force(False): by default, this routine checks the instance call_pdb |
|
1191 | 1190 | flag and does not actually invoke the debugger if the flag is false. |
|
1192 | 1191 | The 'force' option forces the debugger to activate even if the flag |
|
1193 | 1192 | is false. |
|
1194 | 1193 | """ |
|
1195 | 1194 | |
|
1196 | 1195 | if not (force or self.call_pdb): |
|
1197 | 1196 | return |
|
1198 | 1197 | |
|
1199 | 1198 | if not hasattr(sys,'last_traceback'): |
|
1200 | 1199 | error('No traceback has been produced, nothing to debug.') |
|
1201 | 1200 | return |
|
1202 | 1201 | |
|
1203 | 1202 | self.InteractiveTB.debugger(force=True) |
|
1204 | 1203 | |
|
1205 | 1204 | #------------------------------------------------------------------------- |
|
1206 | 1205 | # Things related to IPython's various namespaces |
|
1207 | 1206 | #------------------------------------------------------------------------- |
|
1208 | 1207 | default_user_namespaces = True |
|
1209 | 1208 | |
|
1210 | 1209 | def init_create_namespaces(self, user_module=None, user_ns=None): |
|
1211 | 1210 | # Create the namespace where the user will operate. user_ns is |
|
1212 | 1211 | # normally the only one used, and it is passed to the exec calls as |
|
1213 | 1212 | # the locals argument. But we do carry a user_global_ns namespace |
|
1214 | 1213 | # given as the exec 'globals' argument, This is useful in embedding |
|
1215 | 1214 | # situations where the ipython shell opens in a context where the |
|
1216 | 1215 | # distinction between locals and globals is meaningful. For |
|
1217 | 1216 | # non-embedded contexts, it is just the same object as the user_ns dict. |
|
1218 | 1217 | |
|
1219 | 1218 | # FIXME. For some strange reason, __builtins__ is showing up at user |
|
1220 | 1219 | # level as a dict instead of a module. This is a manual fix, but I |
|
1221 | 1220 | # should really track down where the problem is coming from. Alex |
|
1222 | 1221 | # Schmolck reported this problem first. |
|
1223 | 1222 | |
|
1224 | 1223 | # A useful post by Alex Martelli on this topic: |
|
1225 | 1224 | # Re: inconsistent value from __builtins__ |
|
1226 | 1225 | # Von: Alex Martelli <aleaxit@yahoo.com> |
|
1227 | 1226 | # Datum: Freitag 01 Oktober 2004 04:45:34 nachmittags/abends |
|
1228 | 1227 | # Gruppen: comp.lang.python |
|
1229 | 1228 | |
|
1230 | 1229 | # Michael Hohn <hohn@hooknose.lbl.gov> wrote: |
|
1231 | 1230 | # > >>> print type(builtin_check.get_global_binding('__builtins__')) |
|
1232 | 1231 | # > <type 'dict'> |
|
1233 | 1232 | # > >>> print type(__builtins__) |
|
1234 | 1233 | # > <type 'module'> |
|
1235 | 1234 | # > Is this difference in return value intentional? |
|
1236 | 1235 | |
|
1237 | 1236 | # Well, it's documented that '__builtins__' can be either a dictionary |
|
1238 | 1237 | # or a module, and it's been that way for a long time. Whether it's |
|
1239 | 1238 | # intentional (or sensible), I don't know. In any case, the idea is |
|
1240 | 1239 | # that if you need to access the built-in namespace directly, you |
|
1241 | 1240 | # should start with "import __builtin__" (note, no 's') which will |
|
1242 | 1241 | # definitely give you a module. Yeah, it's somewhat confusing:-(. |
|
1243 | 1242 | |
|
1244 | 1243 | # These routines return a properly built module and dict as needed by |
|
1245 | 1244 | # the rest of the code, and can also be used by extension writers to |
|
1246 | 1245 | # generate properly initialized namespaces. |
|
1247 | 1246 | if (user_ns is not None) or (user_module is not None): |
|
1248 | 1247 | self.default_user_namespaces = False |
|
1249 | 1248 | self.user_module, self.user_ns = self.prepare_user_module(user_module, user_ns) |
|
1250 | 1249 | |
|
1251 | 1250 | # A record of hidden variables we have added to the user namespace, so |
|
1252 | 1251 | # we can list later only variables defined in actual interactive use. |
|
1253 | 1252 | self.user_ns_hidden = {} |
|
1254 | 1253 | |
|
1255 | 1254 | # Now that FakeModule produces a real module, we've run into a nasty |
|
1256 | 1255 | # problem: after script execution (via %run), the module where the user |
|
1257 | 1256 | # code ran is deleted. Now that this object is a true module (needed |
|
1258 | 1257 | # so doctest and other tools work correctly), the Python module |
|
1259 | 1258 | # teardown mechanism runs over it, and sets to None every variable |
|
1260 | 1259 | # present in that module. Top-level references to objects from the |
|
1261 | 1260 | # script survive, because the user_ns is updated with them. However, |
|
1262 | 1261 | # calling functions defined in the script that use other things from |
|
1263 | 1262 | # the script will fail, because the function's closure had references |
|
1264 | 1263 | # to the original objects, which are now all None. So we must protect |
|
1265 | 1264 | # these modules from deletion by keeping a cache. |
|
1266 | 1265 | # |
|
1267 | 1266 | # To avoid keeping stale modules around (we only need the one from the |
|
1268 | 1267 | # last run), we use a dict keyed with the full path to the script, so |
|
1269 | 1268 | # only the last version of the module is held in the cache. Note, |
|
1270 | 1269 | # however, that we must cache the module *namespace contents* (their |
|
1271 | 1270 | # __dict__). Because if we try to cache the actual modules, old ones |
|
1272 | 1271 | # (uncached) could be destroyed while still holding references (such as |
|
1273 | 1272 | # those held by GUI objects that tend to be long-lived)> |
|
1274 | 1273 | # |
|
1275 | 1274 | # The %reset command will flush this cache. See the cache_main_mod() |
|
1276 | 1275 | # and clear_main_mod_cache() methods for details on use. |
|
1277 | 1276 | |
|
1278 | 1277 | # This is the cache used for 'main' namespaces |
|
1279 | 1278 | self._main_mod_cache = {} |
|
1280 | 1279 | |
|
1281 | 1280 | # A table holding all the namespaces IPython deals with, so that |
|
1282 | 1281 | # introspection facilities can search easily. |
|
1283 | 1282 | self.ns_table = {'user_global':self.user_module.__dict__, |
|
1284 | 1283 | 'user_local':self.user_ns, |
|
1285 | 1284 | 'builtin':builtin_mod.__dict__ |
|
1286 | 1285 | } |
|
1287 | 1286 | |
|
1288 | 1287 | @property |
|
1289 | 1288 | def user_global_ns(self): |
|
1290 | 1289 | return self.user_module.__dict__ |
|
1291 | 1290 | |
|
1292 | 1291 | def prepare_user_module(self, user_module=None, user_ns=None): |
|
1293 | 1292 | """Prepare the module and namespace in which user code will be run. |
|
1294 | 1293 | |
|
1295 | 1294 | When IPython is started normally, both parameters are None: a new module |
|
1296 | 1295 | is created automatically, and its __dict__ used as the namespace. |
|
1297 | 1296 | |
|
1298 | 1297 | If only user_module is provided, its __dict__ is used as the namespace. |
|
1299 | 1298 | If only user_ns is provided, a dummy module is created, and user_ns |
|
1300 | 1299 | becomes the global namespace. If both are provided (as they may be |
|
1301 | 1300 | when embedding), user_ns is the local namespace, and user_module |
|
1302 | 1301 | provides the global namespace. |
|
1303 | 1302 | |
|
1304 | 1303 | Parameters |
|
1305 | 1304 | ---------- |
|
1306 | 1305 | user_module : module, optional |
|
1307 | 1306 | The current user module in which IPython is being run. If None, |
|
1308 | 1307 | a clean module will be created. |
|
1309 | 1308 | user_ns : dict, optional |
|
1310 | 1309 | A namespace in which to run interactive commands. |
|
1311 | 1310 | |
|
1312 | 1311 | Returns |
|
1313 | 1312 | ------- |
|
1314 | 1313 | A tuple of user_module and user_ns, each properly initialised. |
|
1315 | 1314 | """ |
|
1316 | 1315 | if user_module is None and user_ns is not None: |
|
1317 | 1316 | user_ns.setdefault("__name__", "__main__") |
|
1318 | 1317 | user_module = DummyMod() |
|
1319 | 1318 | user_module.__dict__ = user_ns |
|
1320 | 1319 | |
|
1321 | 1320 | if user_module is None: |
|
1322 | 1321 | user_module = types.ModuleType("__main__", |
|
1323 | 1322 | doc="Automatically created module for IPython interactive environment") |
|
1324 | 1323 | |
|
1325 | 1324 | # We must ensure that __builtin__ (without the final 's') is always |
|
1326 | 1325 | # available and pointing to the __builtin__ *module*. For more details: |
|
1327 | 1326 | # http://mail.python.org/pipermail/python-dev/2001-April/014068.html |
|
1328 | 1327 | user_module.__dict__.setdefault('__builtin__', builtin_mod) |
|
1329 | 1328 | user_module.__dict__.setdefault('__builtins__', builtin_mod) |
|
1330 | 1329 | |
|
1331 | 1330 | if user_ns is None: |
|
1332 | 1331 | user_ns = user_module.__dict__ |
|
1333 | 1332 | |
|
1334 | 1333 | return user_module, user_ns |
|
1335 | 1334 | |
|
1336 | 1335 | def init_sys_modules(self): |
|
1337 | 1336 | # We need to insert into sys.modules something that looks like a |
|
1338 | 1337 | # module but which accesses the IPython namespace, for shelve and |
|
1339 | 1338 | # pickle to work interactively. Normally they rely on getting |
|
1340 | 1339 | # everything out of __main__, but for embedding purposes each IPython |
|
1341 | 1340 | # instance has its own private namespace, so we can't go shoving |
|
1342 | 1341 | # everything into __main__. |
|
1343 | 1342 | |
|
1344 | 1343 | # note, however, that we should only do this for non-embedded |
|
1345 | 1344 | # ipythons, which really mimic the __main__.__dict__ with their own |
|
1346 | 1345 | # namespace. Embedded instances, on the other hand, should not do |
|
1347 | 1346 | # this because they need to manage the user local/global namespaces |
|
1348 | 1347 | # only, but they live within a 'normal' __main__ (meaning, they |
|
1349 | 1348 | # shouldn't overtake the execution environment of the script they're |
|
1350 | 1349 | # embedded in). |
|
1351 | 1350 | |
|
1352 | 1351 | # This is overridden in the InteractiveShellEmbed subclass to a no-op. |
|
1353 | 1352 | main_name = self.user_module.__name__ |
|
1354 | 1353 | sys.modules[main_name] = self.user_module |
|
1355 | 1354 | |
|
1356 | 1355 | def init_user_ns(self): |
|
1357 | 1356 | """Initialize all user-visible namespaces to their minimum defaults. |
|
1358 | 1357 | |
|
1359 | 1358 | Certain history lists are also initialized here, as they effectively |
|
1360 | 1359 | act as user namespaces. |
|
1361 | 1360 | |
|
1362 | 1361 | Notes |
|
1363 | 1362 | ----- |
|
1364 | 1363 | All data structures here are only filled in, they are NOT reset by this |
|
1365 | 1364 | method. If they were not empty before, data will simply be added to |
|
1366 | 1365 | them. |
|
1367 | 1366 | """ |
|
1368 | 1367 | # This function works in two parts: first we put a few things in |
|
1369 | 1368 | # user_ns, and we sync that contents into user_ns_hidden so that these |
|
1370 | 1369 | # initial variables aren't shown by %who. After the sync, we add the |
|
1371 | 1370 | # rest of what we *do* want the user to see with %who even on a new |
|
1372 | 1371 | # session (probably nothing, so they really only see their own stuff) |
|
1373 | 1372 | |
|
1374 | 1373 | # The user dict must *always* have a __builtin__ reference to the |
|
1375 | 1374 | # Python standard __builtin__ namespace, which must be imported. |
|
1376 | 1375 | # This is so that certain operations in prompt evaluation can be |
|
1377 | 1376 | # reliably executed with builtins. Note that we can NOT use |
|
1378 | 1377 | # __builtins__ (note the 's'), because that can either be a dict or a |
|
1379 | 1378 | # module, and can even mutate at runtime, depending on the context |
|
1380 | 1379 | # (Python makes no guarantees on it). In contrast, __builtin__ is |
|
1381 | 1380 | # always a module object, though it must be explicitly imported. |
|
1382 | 1381 | |
|
1383 | 1382 | # For more details: |
|
1384 | 1383 | # http://mail.python.org/pipermail/python-dev/2001-April/014068.html |
|
1385 | 1384 | ns = {} |
|
1386 | 1385 | |
|
1387 | 1386 | # make global variables for user access to the histories |
|
1388 | 1387 | ns['_ih'] = self.history_manager.input_hist_parsed |
|
1389 | 1388 | ns['_oh'] = self.history_manager.output_hist |
|
1390 | 1389 | ns['_dh'] = self.history_manager.dir_hist |
|
1391 | 1390 | |
|
1392 | 1391 | # user aliases to input and output histories. These shouldn't show up |
|
1393 | 1392 | # in %who, as they can have very large reprs. |
|
1394 | 1393 | ns['In'] = self.history_manager.input_hist_parsed |
|
1395 | 1394 | ns['Out'] = self.history_manager.output_hist |
|
1396 | 1395 | |
|
1397 | 1396 | # Store myself as the public api!!! |
|
1398 | 1397 | ns['get_ipython'] = self.get_ipython |
|
1399 | 1398 | |
|
1400 | 1399 | ns['exit'] = self.exiter |
|
1401 | 1400 | ns['quit'] = self.exiter |
|
1402 | 1401 | |
|
1403 | 1402 | # Sync what we've added so far to user_ns_hidden so these aren't seen |
|
1404 | 1403 | # by %who |
|
1405 | 1404 | self.user_ns_hidden.update(ns) |
|
1406 | 1405 | |
|
1407 | 1406 | # Anything put into ns now would show up in %who. Think twice before |
|
1408 | 1407 | # putting anything here, as we really want %who to show the user their |
|
1409 | 1408 | # stuff, not our variables. |
|
1410 | 1409 | |
|
1411 | 1410 | # Finally, update the real user's namespace |
|
1412 | 1411 | self.user_ns.update(ns) |
|
1413 | 1412 | |
|
1414 | 1413 | @property |
|
1415 | 1414 | def all_ns_refs(self): |
|
1416 | 1415 | """Get a list of references to all the namespace dictionaries in which |
|
1417 | 1416 | IPython might store a user-created object. |
|
1418 | 1417 | |
|
1419 | 1418 | Note that this does not include the displayhook, which also caches |
|
1420 | 1419 | objects from the output.""" |
|
1421 | 1420 | return [self.user_ns, self.user_global_ns, self.user_ns_hidden] + \ |
|
1422 | 1421 | [m.__dict__ for m in self._main_mod_cache.values()] |
|
1423 | 1422 | |
|
1424 | 1423 | def reset(self, new_session=True, aggressive=False): |
|
1425 | 1424 | """Clear all internal namespaces, and attempt to release references to |
|
1426 | 1425 | user objects. |
|
1427 | 1426 | |
|
1428 | 1427 | If new_session is True, a new history session will be opened. |
|
1429 | 1428 | """ |
|
1430 | 1429 | # Clear histories |
|
1431 | 1430 | self.history_manager.reset(new_session) |
|
1432 | 1431 | # Reset counter used to index all histories |
|
1433 | 1432 | if new_session: |
|
1434 | 1433 | self.execution_count = 1 |
|
1435 | 1434 | |
|
1436 | 1435 | # Reset last execution result |
|
1437 | 1436 | self.last_execution_succeeded = True |
|
1438 | 1437 | self.last_execution_result = None |
|
1439 | 1438 | |
|
1440 | 1439 | # Flush cached output items |
|
1441 | 1440 | if self.displayhook.do_full_cache: |
|
1442 | 1441 | self.displayhook.flush() |
|
1443 | 1442 | |
|
1444 | 1443 | # The main execution namespaces must be cleared very carefully, |
|
1445 | 1444 | # skipping the deletion of the builtin-related keys, because doing so |
|
1446 | 1445 | # would cause errors in many object's __del__ methods. |
|
1447 | 1446 | if self.user_ns is not self.user_global_ns: |
|
1448 | 1447 | self.user_ns.clear() |
|
1449 | 1448 | ns = self.user_global_ns |
|
1450 | 1449 | drop_keys = set(ns.keys()) |
|
1451 | 1450 | drop_keys.discard('__builtin__') |
|
1452 | 1451 | drop_keys.discard('__builtins__') |
|
1453 | 1452 | drop_keys.discard('__name__') |
|
1454 | 1453 | for k in drop_keys: |
|
1455 | 1454 | del ns[k] |
|
1456 | 1455 | |
|
1457 | 1456 | self.user_ns_hidden.clear() |
|
1458 | 1457 | |
|
1459 | 1458 | # Restore the user namespaces to minimal usability |
|
1460 | 1459 | self.init_user_ns() |
|
1461 | 1460 | if aggressive and not hasattr(self, "_sys_modules_keys"): |
|
1462 | 1461 | print("Cannot restore sys.module, no snapshot") |
|
1463 | 1462 | elif aggressive: |
|
1464 | 1463 | print("culling sys module...") |
|
1465 | 1464 | current_keys = set(sys.modules.keys()) |
|
1466 | 1465 | for k in current_keys - self._sys_modules_keys: |
|
1467 | 1466 | if k.startswith("multiprocessing"): |
|
1468 | 1467 | continue |
|
1469 | 1468 | del sys.modules[k] |
|
1470 | 1469 | |
|
1471 | 1470 | # Restore the default and user aliases |
|
1472 | 1471 | self.alias_manager.clear_aliases() |
|
1473 | 1472 | self.alias_manager.init_aliases() |
|
1474 | 1473 | |
|
1475 | 1474 | # Now define aliases that only make sense on the terminal, because they |
|
1476 | 1475 | # need direct access to the console in a way that we can't emulate in |
|
1477 | 1476 | # GUI or web frontend |
|
1478 | 1477 | if os.name == 'posix': |
|
1479 | 1478 | for cmd in ('clear', 'more', 'less', 'man'): |
|
1480 | 1479 | if cmd not in self.magics_manager.magics['line']: |
|
1481 | 1480 | self.alias_manager.soft_define_alias(cmd, cmd) |
|
1482 | 1481 | |
|
1483 | 1482 | # Flush the private list of module references kept for script |
|
1484 | 1483 | # execution protection |
|
1485 | 1484 | self.clear_main_mod_cache() |
|
1486 | 1485 | |
|
1487 | 1486 | def del_var(self, varname, by_name=False): |
|
1488 | 1487 | """Delete a variable from the various namespaces, so that, as |
|
1489 | 1488 | far as possible, we're not keeping any hidden references to it. |
|
1490 | 1489 | |
|
1491 | 1490 | Parameters |
|
1492 | 1491 | ---------- |
|
1493 | 1492 | varname : str |
|
1494 | 1493 | The name of the variable to delete. |
|
1495 | 1494 | by_name : bool |
|
1496 | 1495 | If True, delete variables with the given name in each |
|
1497 | 1496 | namespace. If False (default), find the variable in the user |
|
1498 | 1497 | namespace, and delete references to it. |
|
1499 | 1498 | """ |
|
1500 | 1499 | if varname in ('__builtin__', '__builtins__'): |
|
1501 | 1500 | raise ValueError("Refusing to delete %s" % varname) |
|
1502 | 1501 | |
|
1503 | 1502 | ns_refs = self.all_ns_refs |
|
1504 | 1503 | |
|
1505 | 1504 | if by_name: # Delete by name |
|
1506 | 1505 | for ns in ns_refs: |
|
1507 | 1506 | try: |
|
1508 | 1507 | del ns[varname] |
|
1509 | 1508 | except KeyError: |
|
1510 | 1509 | pass |
|
1511 | 1510 | else: # Delete by object |
|
1512 | 1511 | try: |
|
1513 | 1512 | obj = self.user_ns[varname] |
|
1514 | 1513 | except KeyError as e: |
|
1515 | 1514 | raise NameError("name '%s' is not defined" % varname) from e |
|
1516 | 1515 | # Also check in output history |
|
1517 | 1516 | ns_refs.append(self.history_manager.output_hist) |
|
1518 | 1517 | for ns in ns_refs: |
|
1519 | 1518 | to_delete = [n for n, o in ns.items() if o is obj] |
|
1520 | 1519 | for name in to_delete: |
|
1521 | 1520 | del ns[name] |
|
1522 | 1521 | |
|
1523 | 1522 | # Ensure it is removed from the last execution result |
|
1524 | 1523 | if self.last_execution_result.result is obj: |
|
1525 | 1524 | self.last_execution_result = None |
|
1526 | 1525 | |
|
1527 | 1526 | # displayhook keeps extra references, but not in a dictionary |
|
1528 | 1527 | for name in ('_', '__', '___'): |
|
1529 | 1528 | if getattr(self.displayhook, name) is obj: |
|
1530 | 1529 | setattr(self.displayhook, name, None) |
|
1531 | 1530 | |
|
1532 | 1531 | def reset_selective(self, regex=None): |
|
1533 | 1532 | """Clear selective variables from internal namespaces based on a |
|
1534 | 1533 | specified regular expression. |
|
1535 | 1534 | |
|
1536 | 1535 | Parameters |
|
1537 | 1536 | ---------- |
|
1538 | 1537 | regex : string or compiled pattern, optional |
|
1539 | 1538 | A regular expression pattern that will be used in searching |
|
1540 | 1539 | variable names in the users namespaces. |
|
1541 | 1540 | """ |
|
1542 | 1541 | if regex is not None: |
|
1543 | 1542 | try: |
|
1544 | 1543 | m = re.compile(regex) |
|
1545 | 1544 | except TypeError as e: |
|
1546 | 1545 | raise TypeError('regex must be a string or compiled pattern') from e |
|
1547 | 1546 | # Search for keys in each namespace that match the given regex |
|
1548 | 1547 | # If a match is found, delete the key/value pair. |
|
1549 | 1548 | for ns in self.all_ns_refs: |
|
1550 | 1549 | for var in ns: |
|
1551 | 1550 | if m.search(var): |
|
1552 | 1551 | del ns[var] |
|
1553 | 1552 | |
|
1554 | 1553 | def push(self, variables, interactive=True): |
|
1555 | 1554 | """Inject a group of variables into the IPython user namespace. |
|
1556 | 1555 | |
|
1557 | 1556 | Parameters |
|
1558 | 1557 | ---------- |
|
1559 | 1558 | variables : dict, str or list/tuple of str |
|
1560 | 1559 | The variables to inject into the user's namespace. If a dict, a |
|
1561 | 1560 | simple update is done. If a str, the string is assumed to have |
|
1562 | 1561 | variable names separated by spaces. A list/tuple of str can also |
|
1563 | 1562 | be used to give the variable names. If just the variable names are |
|
1564 | 1563 | give (list/tuple/str) then the variable values looked up in the |
|
1565 | 1564 | callers frame. |
|
1566 | 1565 | interactive : bool |
|
1567 | 1566 | If True (default), the variables will be listed with the ``who`` |
|
1568 | 1567 | magic. |
|
1569 | 1568 | """ |
|
1570 | 1569 | vdict = None |
|
1571 | 1570 | |
|
1572 | 1571 | # We need a dict of name/value pairs to do namespace updates. |
|
1573 | 1572 | if isinstance(variables, dict): |
|
1574 | 1573 | vdict = variables |
|
1575 | 1574 | elif isinstance(variables, (str, list, tuple)): |
|
1576 | 1575 | if isinstance(variables, str): |
|
1577 | 1576 | vlist = variables.split() |
|
1578 | 1577 | else: |
|
1579 | 1578 | vlist = variables |
|
1580 | 1579 | vdict = {} |
|
1581 | 1580 | cf = sys._getframe(1) |
|
1582 | 1581 | for name in vlist: |
|
1583 | 1582 | try: |
|
1584 | 1583 | vdict[name] = eval(name, cf.f_globals, cf.f_locals) |
|
1585 | 1584 | except: |
|
1586 | 1585 | print('Could not get variable %s from %s' % |
|
1587 | 1586 | (name,cf.f_code.co_name)) |
|
1588 | 1587 | else: |
|
1589 | 1588 | raise ValueError('variables must be a dict/str/list/tuple') |
|
1590 | 1589 | |
|
1591 | 1590 | # Propagate variables to user namespace |
|
1592 | 1591 | self.user_ns.update(vdict) |
|
1593 | 1592 | |
|
1594 | 1593 | # And configure interactive visibility |
|
1595 | 1594 | user_ns_hidden = self.user_ns_hidden |
|
1596 | 1595 | if interactive: |
|
1597 | 1596 | for name in vdict: |
|
1598 | 1597 | user_ns_hidden.pop(name, None) |
|
1599 | 1598 | else: |
|
1600 | 1599 | user_ns_hidden.update(vdict) |
|
1601 | 1600 | |
|
1602 | 1601 | def drop_by_id(self, variables): |
|
1603 | 1602 | """Remove a dict of variables from the user namespace, if they are the |
|
1604 | 1603 | same as the values in the dictionary. |
|
1605 | 1604 | |
|
1606 | 1605 | This is intended for use by extensions: variables that they've added can |
|
1607 | 1606 | be taken back out if they are unloaded, without removing any that the |
|
1608 | 1607 | user has overwritten. |
|
1609 | 1608 | |
|
1610 | 1609 | Parameters |
|
1611 | 1610 | ---------- |
|
1612 | 1611 | variables : dict |
|
1613 | 1612 | A dictionary mapping object names (as strings) to the objects. |
|
1614 | 1613 | """ |
|
1615 | 1614 | for name, obj in variables.items(): |
|
1616 | 1615 | if name in self.user_ns and self.user_ns[name] is obj: |
|
1617 | 1616 | del self.user_ns[name] |
|
1618 | 1617 | self.user_ns_hidden.pop(name, None) |
|
1619 | 1618 | |
|
1620 | 1619 | #------------------------------------------------------------------------- |
|
1621 | 1620 | # Things related to object introspection |
|
1622 | 1621 | #------------------------------------------------------------------------- |
|
1623 | 1622 | |
|
1624 | 1623 | def _ofind(self, oname, namespaces=None): |
|
1625 | 1624 | """Find an object in the available namespaces. |
|
1626 | 1625 | |
|
1627 | 1626 | self._ofind(oname) -> dict with keys: found,obj,ospace,ismagic |
|
1628 | 1627 | |
|
1629 | 1628 | Has special code to detect magic functions. |
|
1630 | 1629 | """ |
|
1631 | 1630 | oname = oname.strip() |
|
1632 | 1631 | if not oname.startswith(ESC_MAGIC) and \ |
|
1633 | 1632 | not oname.startswith(ESC_MAGIC2) and \ |
|
1634 | 1633 | not all(a.isidentifier() for a in oname.split(".")): |
|
1635 | 1634 | return {'found': False} |
|
1636 | 1635 | |
|
1637 | 1636 | if namespaces is None: |
|
1638 | 1637 | # Namespaces to search in: |
|
1639 | 1638 | # Put them in a list. The order is important so that we |
|
1640 | 1639 | # find things in the same order that Python finds them. |
|
1641 | 1640 | namespaces = [ ('Interactive', self.user_ns), |
|
1642 | 1641 | ('Interactive (global)', self.user_global_ns), |
|
1643 | 1642 | ('Python builtin', builtin_mod.__dict__), |
|
1644 | 1643 | ] |
|
1645 | 1644 | |
|
1646 | 1645 | ismagic = False |
|
1647 | 1646 | isalias = False |
|
1648 | 1647 | found = False |
|
1649 | 1648 | ospace = None |
|
1650 | 1649 | parent = None |
|
1651 | 1650 | obj = None |
|
1652 | 1651 | |
|
1653 | 1652 | |
|
1654 | 1653 | # Look for the given name by splitting it in parts. If the head is |
|
1655 | 1654 | # found, then we look for all the remaining parts as members, and only |
|
1656 | 1655 | # declare success if we can find them all. |
|
1657 | 1656 | oname_parts = oname.split('.') |
|
1658 | 1657 | oname_head, oname_rest = oname_parts[0],oname_parts[1:] |
|
1659 | 1658 | for nsname,ns in namespaces: |
|
1660 | 1659 | try: |
|
1661 | 1660 | obj = ns[oname_head] |
|
1662 | 1661 | except KeyError: |
|
1663 | 1662 | continue |
|
1664 | 1663 | else: |
|
1665 | 1664 | for idx, part in enumerate(oname_rest): |
|
1666 | 1665 | try: |
|
1667 | 1666 | parent = obj |
|
1668 | 1667 | # The last part is looked up in a special way to avoid |
|
1669 | 1668 | # descriptor invocation as it may raise or have side |
|
1670 | 1669 | # effects. |
|
1671 | 1670 | if idx == len(oname_rest) - 1: |
|
1672 | 1671 | obj = self._getattr_property(obj, part) |
|
1673 | 1672 | else: |
|
1674 | 1673 | obj = getattr(obj, part) |
|
1675 | 1674 | except: |
|
1676 | 1675 | # Blanket except b/c some badly implemented objects |
|
1677 | 1676 | # allow __getattr__ to raise exceptions other than |
|
1678 | 1677 | # AttributeError, which then crashes IPython. |
|
1679 | 1678 | break |
|
1680 | 1679 | else: |
|
1681 | 1680 | # If we finish the for loop (no break), we got all members |
|
1682 | 1681 | found = True |
|
1683 | 1682 | ospace = nsname |
|
1684 | 1683 | break # namespace loop |
|
1685 | 1684 | |
|
1686 | 1685 | # Try to see if it's magic |
|
1687 | 1686 | if not found: |
|
1688 | 1687 | obj = None |
|
1689 | 1688 | if oname.startswith(ESC_MAGIC2): |
|
1690 | 1689 | oname = oname.lstrip(ESC_MAGIC2) |
|
1691 | 1690 | obj = self.find_cell_magic(oname) |
|
1692 | 1691 | elif oname.startswith(ESC_MAGIC): |
|
1693 | 1692 | oname = oname.lstrip(ESC_MAGIC) |
|
1694 | 1693 | obj = self.find_line_magic(oname) |
|
1695 | 1694 | else: |
|
1696 | 1695 | # search without prefix, so run? will find %run? |
|
1697 | 1696 | obj = self.find_line_magic(oname) |
|
1698 | 1697 | if obj is None: |
|
1699 | 1698 | obj = self.find_cell_magic(oname) |
|
1700 | 1699 | if obj is not None: |
|
1701 | 1700 | found = True |
|
1702 | 1701 | ospace = 'IPython internal' |
|
1703 | 1702 | ismagic = True |
|
1704 | 1703 | isalias = isinstance(obj, Alias) |
|
1705 | 1704 | |
|
1706 | 1705 | # Last try: special-case some literals like '', [], {}, etc: |
|
1707 | 1706 | if not found and oname_head in ["''",'""','[]','{}','()']: |
|
1708 | 1707 | obj = eval(oname_head) |
|
1709 | 1708 | found = True |
|
1710 | 1709 | ospace = 'Interactive' |
|
1711 | 1710 | |
|
1712 | 1711 | return { |
|
1713 | 1712 | 'obj':obj, |
|
1714 | 1713 | 'found':found, |
|
1715 | 1714 | 'parent':parent, |
|
1716 | 1715 | 'ismagic':ismagic, |
|
1717 | 1716 | 'isalias':isalias, |
|
1718 | 1717 | 'namespace':ospace |
|
1719 | 1718 | } |
|
1720 | 1719 | |
|
1721 | 1720 | @staticmethod |
|
1722 | 1721 | def _getattr_property(obj, attrname): |
|
1723 | 1722 | """Property-aware getattr to use in object finding. |
|
1724 | 1723 | |
|
1725 | 1724 | If attrname represents a property, return it unevaluated (in case it has |
|
1726 | 1725 | side effects or raises an error. |
|
1727 | 1726 | |
|
1728 | 1727 | """ |
|
1729 | 1728 | if not isinstance(obj, type): |
|
1730 | 1729 | try: |
|
1731 | 1730 | # `getattr(type(obj), attrname)` is not guaranteed to return |
|
1732 | 1731 | # `obj`, but does so for property: |
|
1733 | 1732 | # |
|
1734 | 1733 | # property.__get__(self, None, cls) -> self |
|
1735 | 1734 | # |
|
1736 | 1735 | # The universal alternative is to traverse the mro manually |
|
1737 | 1736 | # searching for attrname in class dicts. |
|
1738 | 1737 | attr = getattr(type(obj), attrname) |
|
1739 | 1738 | except AttributeError: |
|
1740 | 1739 | pass |
|
1741 | 1740 | else: |
|
1742 | 1741 | # This relies on the fact that data descriptors (with both |
|
1743 | 1742 | # __get__ & __set__ magic methods) take precedence over |
|
1744 | 1743 | # instance-level attributes: |
|
1745 | 1744 | # |
|
1746 | 1745 | # class A(object): |
|
1747 | 1746 | # @property |
|
1748 | 1747 | # def foobar(self): return 123 |
|
1749 | 1748 | # a = A() |
|
1750 | 1749 | # a.__dict__['foobar'] = 345 |
|
1751 | 1750 | # a.foobar # == 123 |
|
1752 | 1751 | # |
|
1753 | 1752 | # So, a property may be returned right away. |
|
1754 | 1753 | if isinstance(attr, property): |
|
1755 | 1754 | return attr |
|
1756 | 1755 | |
|
1757 | 1756 | # Nothing helped, fall back. |
|
1758 | 1757 | return getattr(obj, attrname) |
|
1759 | 1758 | |
|
1760 | 1759 | def _object_find(self, oname, namespaces=None): |
|
1761 | 1760 | """Find an object and return a struct with info about it.""" |
|
1762 | 1761 | return Struct(self._ofind(oname, namespaces)) |
|
1763 | 1762 | |
|
1764 | 1763 | def _inspect(self, meth, oname, namespaces=None, **kw): |
|
1765 | 1764 | """Generic interface to the inspector system. |
|
1766 | 1765 | |
|
1767 | 1766 | This function is meant to be called by pdef, pdoc & friends. |
|
1768 | 1767 | """ |
|
1769 | 1768 | info = self._object_find(oname, namespaces) |
|
1770 | 1769 | docformat = sphinxify if self.sphinxify_docstring else None |
|
1771 | 1770 | if info.found: |
|
1772 | 1771 | pmethod = getattr(self.inspector, meth) |
|
1773 | 1772 | # TODO: only apply format_screen to the plain/text repr of the mime |
|
1774 | 1773 | # bundle. |
|
1775 | 1774 | formatter = format_screen if info.ismagic else docformat |
|
1776 | 1775 | if meth == 'pdoc': |
|
1777 | 1776 | pmethod(info.obj, oname, formatter) |
|
1778 | 1777 | elif meth == 'pinfo': |
|
1779 | 1778 | pmethod( |
|
1780 | 1779 | info.obj, |
|
1781 | 1780 | oname, |
|
1782 | 1781 | formatter, |
|
1783 | 1782 | info, |
|
1784 | 1783 | enable_html_pager=self.enable_html_pager, |
|
1785 | 1784 | **kw |
|
1786 | 1785 | ) |
|
1787 | 1786 | else: |
|
1788 | 1787 | pmethod(info.obj, oname) |
|
1789 | 1788 | else: |
|
1790 | 1789 | print('Object `%s` not found.' % oname) |
|
1791 | 1790 | return 'not found' # so callers can take other action |
|
1792 | 1791 | |
|
1793 | 1792 | def object_inspect(self, oname, detail_level=0): |
|
1794 | 1793 | """Get object info about oname""" |
|
1795 | 1794 | with self.builtin_trap: |
|
1796 | 1795 | info = self._object_find(oname) |
|
1797 | 1796 | if info.found: |
|
1798 | 1797 | return self.inspector.info(info.obj, oname, info=info, |
|
1799 | 1798 | detail_level=detail_level |
|
1800 | 1799 | ) |
|
1801 | 1800 | else: |
|
1802 | 1801 | return oinspect.object_info(name=oname, found=False) |
|
1803 | 1802 | |
|
1804 | 1803 | def object_inspect_text(self, oname, detail_level=0): |
|
1805 | 1804 | """Get object info as formatted text""" |
|
1806 | 1805 | return self.object_inspect_mime(oname, detail_level)['text/plain'] |
|
1807 | 1806 | |
|
1808 | 1807 | def object_inspect_mime(self, oname, detail_level=0): |
|
1809 | 1808 | """Get object info as a mimebundle of formatted representations. |
|
1810 | 1809 | |
|
1811 | 1810 | A mimebundle is a dictionary, keyed by mime-type. |
|
1812 | 1811 | It must always have the key `'text/plain'`. |
|
1813 | 1812 | """ |
|
1814 | 1813 | with self.builtin_trap: |
|
1815 | 1814 | info = self._object_find(oname) |
|
1816 | 1815 | if info.found: |
|
1817 | 1816 | return self.inspector._get_info(info.obj, oname, info=info, |
|
1818 | 1817 | detail_level=detail_level |
|
1819 | 1818 | ) |
|
1820 | 1819 | else: |
|
1821 | 1820 | raise KeyError(oname) |
|
1822 | 1821 | |
|
1823 | 1822 | #------------------------------------------------------------------------- |
|
1824 | 1823 | # Things related to history management |
|
1825 | 1824 | #------------------------------------------------------------------------- |
|
1826 | 1825 | |
|
1827 | 1826 | def init_history(self): |
|
1828 | 1827 | """Sets up the command history, and starts regular autosaves.""" |
|
1829 | 1828 | self.history_manager = HistoryManager(shell=self, parent=self) |
|
1830 | 1829 | self.configurables.append(self.history_manager) |
|
1831 | 1830 | |
|
1832 | 1831 | #------------------------------------------------------------------------- |
|
1833 | 1832 | # Things related to exception handling and tracebacks (not debugging) |
|
1834 | 1833 | #------------------------------------------------------------------------- |
|
1835 | 1834 | |
|
1836 | 1835 | debugger_cls = Pdb |
|
1837 | 1836 | |
|
1838 | 1837 | def init_traceback_handlers(self, custom_exceptions): |
|
1839 | 1838 | # Syntax error handler. |
|
1840 | 1839 | self.SyntaxTB = ultratb.SyntaxTB(color_scheme='NoColor', parent=self) |
|
1841 | 1840 | |
|
1842 | 1841 | # The interactive one is initialized with an offset, meaning we always |
|
1843 | 1842 | # want to remove the topmost item in the traceback, which is our own |
|
1844 | 1843 | # internal code. Valid modes: ['Plain','Context','Verbose','Minimal'] |
|
1845 | 1844 | self.InteractiveTB = ultratb.AutoFormattedTB(mode = 'Plain', |
|
1846 | 1845 | color_scheme='NoColor', |
|
1847 | 1846 | tb_offset = 1, |
|
1848 | 1847 | check_cache=check_linecache_ipython, |
|
1849 | 1848 | debugger_cls=self.debugger_cls, parent=self) |
|
1850 | 1849 | |
|
1851 | 1850 | # The instance will store a pointer to the system-wide exception hook, |
|
1852 | 1851 | # so that runtime code (such as magics) can access it. This is because |
|
1853 | 1852 | # during the read-eval loop, it may get temporarily overwritten. |
|
1854 | 1853 | self.sys_excepthook = sys.excepthook |
|
1855 | 1854 | |
|
1856 | 1855 | # and add any custom exception handlers the user may have specified |
|
1857 | 1856 | self.set_custom_exc(*custom_exceptions) |
|
1858 | 1857 | |
|
1859 | 1858 | # Set the exception mode |
|
1860 | 1859 | self.InteractiveTB.set_mode(mode=self.xmode) |
|
1861 | 1860 | |
|
1862 | 1861 | def set_custom_exc(self, exc_tuple, handler): |
|
1863 | 1862 | """set_custom_exc(exc_tuple, handler) |
|
1864 | 1863 | |
|
1865 | 1864 | Set a custom exception handler, which will be called if any of the |
|
1866 | 1865 | exceptions in exc_tuple occur in the mainloop (specifically, in the |
|
1867 | 1866 | run_code() method). |
|
1868 | 1867 | |
|
1869 | 1868 | Parameters |
|
1870 | 1869 | ---------- |
|
1871 | 1870 | |
|
1872 | 1871 | exc_tuple : tuple of exception classes |
|
1873 | 1872 | A *tuple* of exception classes, for which to call the defined |
|
1874 | 1873 | handler. It is very important that you use a tuple, and NOT A |
|
1875 | 1874 | LIST here, because of the way Python's except statement works. If |
|
1876 | 1875 | you only want to trap a single exception, use a singleton tuple:: |
|
1877 | 1876 | |
|
1878 | 1877 | exc_tuple == (MyCustomException,) |
|
1879 | 1878 | |
|
1880 | 1879 | handler : callable |
|
1881 | 1880 | handler must have the following signature:: |
|
1882 | 1881 | |
|
1883 | 1882 | def my_handler(self, etype, value, tb, tb_offset=None): |
|
1884 | 1883 | ... |
|
1885 | 1884 | return structured_traceback |
|
1886 | 1885 | |
|
1887 | 1886 | Your handler must return a structured traceback (a list of strings), |
|
1888 | 1887 | or None. |
|
1889 | 1888 | |
|
1890 | 1889 | This will be made into an instance method (via types.MethodType) |
|
1891 | 1890 | of IPython itself, and it will be called if any of the exceptions |
|
1892 | 1891 | listed in the exc_tuple are caught. If the handler is None, an |
|
1893 | 1892 | internal basic one is used, which just prints basic info. |
|
1894 | 1893 | |
|
1895 | 1894 | To protect IPython from crashes, if your handler ever raises an |
|
1896 | 1895 | exception or returns an invalid result, it will be immediately |
|
1897 | 1896 | disabled. |
|
1898 | 1897 | |
|
1898 | Notes | |
|
1899 | ----- | |
|
1900 | ||
|
1899 | 1901 | WARNING: by putting in your own exception handler into IPython's main |
|
1900 | 1902 | execution loop, you run a very good chance of nasty crashes. This |
|
1901 |
facility should only be used if you really know what you are doing. |
|
|
1903 | facility should only be used if you really know what you are doing. | |
|
1904 | """ | |
|
1905 | ||
|
1902 | 1906 | if not isinstance(exc_tuple, tuple): |
|
1903 | 1907 | raise TypeError("The custom exceptions must be given as a tuple.") |
|
1904 | 1908 | |
|
1905 | 1909 | def dummy_handler(self, etype, value, tb, tb_offset=None): |
|
1906 | 1910 | print('*** Simple custom exception handler ***') |
|
1907 | 1911 | print('Exception type :', etype) |
|
1908 | 1912 | print('Exception value:', value) |
|
1909 | 1913 | print('Traceback :', tb) |
|
1910 | 1914 | |
|
1911 | 1915 | def validate_stb(stb): |
|
1912 | 1916 | """validate structured traceback return type |
|
1913 | 1917 | |
|
1914 | 1918 | return type of CustomTB *should* be a list of strings, but allow |
|
1915 | 1919 | single strings or None, which are harmless. |
|
1916 | 1920 | |
|
1917 | 1921 | This function will *always* return a list of strings, |
|
1918 | 1922 | and will raise a TypeError if stb is inappropriate. |
|
1919 | 1923 | """ |
|
1920 | 1924 | msg = "CustomTB must return list of strings, not %r" % stb |
|
1921 | 1925 | if stb is None: |
|
1922 | 1926 | return [] |
|
1923 | 1927 | elif isinstance(stb, str): |
|
1924 | 1928 | return [stb] |
|
1925 | 1929 | elif not isinstance(stb, list): |
|
1926 | 1930 | raise TypeError(msg) |
|
1927 | 1931 | # it's a list |
|
1928 | 1932 | for line in stb: |
|
1929 | 1933 | # check every element |
|
1930 | 1934 | if not isinstance(line, str): |
|
1931 | 1935 | raise TypeError(msg) |
|
1932 | 1936 | return stb |
|
1933 | 1937 | |
|
1934 | 1938 | if handler is None: |
|
1935 | 1939 | wrapped = dummy_handler |
|
1936 | 1940 | else: |
|
1937 | 1941 | def wrapped(self,etype,value,tb,tb_offset=None): |
|
1938 | 1942 | """wrap CustomTB handler, to protect IPython from user code |
|
1939 | 1943 | |
|
1940 | 1944 | This makes it harder (but not impossible) for custom exception |
|
1941 | 1945 | handlers to crash IPython. |
|
1942 | 1946 | """ |
|
1943 | 1947 | try: |
|
1944 | 1948 | stb = handler(self,etype,value,tb,tb_offset=tb_offset) |
|
1945 | 1949 | return validate_stb(stb) |
|
1946 | 1950 | except: |
|
1947 | 1951 | # clear custom handler immediately |
|
1948 | 1952 | self.set_custom_exc((), None) |
|
1949 | 1953 | print("Custom TB Handler failed, unregistering", file=sys.stderr) |
|
1950 | 1954 | # show the exception in handler first |
|
1951 | 1955 | stb = self.InteractiveTB.structured_traceback(*sys.exc_info()) |
|
1952 | 1956 | print(self.InteractiveTB.stb2text(stb)) |
|
1953 | 1957 | print("The original exception:") |
|
1954 | 1958 | stb = self.InteractiveTB.structured_traceback( |
|
1955 | 1959 | (etype,value,tb), tb_offset=tb_offset |
|
1956 | 1960 | ) |
|
1957 | 1961 | return stb |
|
1958 | 1962 | |
|
1959 | 1963 | self.CustomTB = types.MethodType(wrapped,self) |
|
1960 | 1964 | self.custom_exceptions = exc_tuple |
|
1961 | 1965 | |
|
1962 | 1966 | def excepthook(self, etype, value, tb): |
|
1963 | 1967 | """One more defense for GUI apps that call sys.excepthook. |
|
1964 | 1968 | |
|
1965 | 1969 | GUI frameworks like wxPython trap exceptions and call |
|
1966 | 1970 | sys.excepthook themselves. I guess this is a feature that |
|
1967 | 1971 | enables them to keep running after exceptions that would |
|
1968 | 1972 | otherwise kill their mainloop. This is a bother for IPython |
|
1969 |
which ex |
|
|
1973 | which expects to catch all of the program exceptions with a try: | |
|
1970 | 1974 | except: statement. |
|
1971 | 1975 | |
|
1972 | 1976 | Normally, IPython sets sys.excepthook to a CrashHandler instance, so if |
|
1973 | 1977 | any app directly invokes sys.excepthook, it will look to the user like |
|
1974 | 1978 | IPython crashed. In order to work around this, we can disable the |
|
1975 | 1979 | CrashHandler and replace it with this excepthook instead, which prints a |
|
1976 | 1980 | regular traceback using our InteractiveTB. In this fashion, apps which |
|
1977 | 1981 | call sys.excepthook will generate a regular-looking exception from |
|
1978 | 1982 | IPython, and the CrashHandler will only be triggered by real IPython |
|
1979 | 1983 | crashes. |
|
1980 | 1984 | |
|
1981 | 1985 | This hook should be used sparingly, only in places which are not likely |
|
1982 | 1986 | to be true IPython errors. |
|
1983 | 1987 | """ |
|
1984 | 1988 | self.showtraceback((etype, value, tb), tb_offset=0) |
|
1985 | 1989 | |
|
1986 | 1990 | def _get_exc_info(self, exc_tuple=None): |
|
1987 | 1991 | """get exc_info from a given tuple, sys.exc_info() or sys.last_type etc. |
|
1988 | 1992 | |
|
1989 | 1993 | Ensures sys.last_type,value,traceback hold the exc_info we found, |
|
1990 | 1994 | from whichever source. |
|
1991 | 1995 | |
|
1992 | 1996 | raises ValueError if none of these contain any information |
|
1993 | 1997 | """ |
|
1994 | 1998 | if exc_tuple is None: |
|
1995 | 1999 | etype, value, tb = sys.exc_info() |
|
1996 | 2000 | else: |
|
1997 | 2001 | etype, value, tb = exc_tuple |
|
1998 | 2002 | |
|
1999 | 2003 | if etype is None: |
|
2000 | 2004 | if hasattr(sys, 'last_type'): |
|
2001 | 2005 | etype, value, tb = sys.last_type, sys.last_value, \ |
|
2002 | 2006 | sys.last_traceback |
|
2003 | 2007 | |
|
2004 | 2008 | if etype is None: |
|
2005 | 2009 | raise ValueError("No exception to find") |
|
2006 | 2010 | |
|
2007 | 2011 | # Now store the exception info in sys.last_type etc. |
|
2008 | 2012 | # WARNING: these variables are somewhat deprecated and not |
|
2009 | 2013 | # necessarily safe to use in a threaded environment, but tools |
|
2010 | 2014 | # like pdb depend on their existence, so let's set them. If we |
|
2011 | 2015 | # find problems in the field, we'll need to revisit their use. |
|
2012 | 2016 | sys.last_type = etype |
|
2013 | 2017 | sys.last_value = value |
|
2014 | 2018 | sys.last_traceback = tb |
|
2015 | 2019 | |
|
2016 | 2020 | return etype, value, tb |
|
2017 | 2021 | |
|
2018 | 2022 | def show_usage_error(self, exc): |
|
2019 | 2023 | """Show a short message for UsageErrors |
|
2020 | 2024 | |
|
2021 | 2025 | These are special exceptions that shouldn't show a traceback. |
|
2022 | 2026 | """ |
|
2023 | 2027 | print("UsageError: %s" % exc, file=sys.stderr) |
|
2024 | 2028 | |
|
2025 | 2029 | def get_exception_only(self, exc_tuple=None): |
|
2026 | 2030 | """ |
|
2027 | 2031 | Return as a string (ending with a newline) the exception that |
|
2028 | 2032 | just occurred, without any traceback. |
|
2029 | 2033 | """ |
|
2030 | 2034 | etype, value, tb = self._get_exc_info(exc_tuple) |
|
2031 | 2035 | msg = traceback.format_exception_only(etype, value) |
|
2032 | 2036 | return ''.join(msg) |
|
2033 | 2037 | |
|
2034 | 2038 | def showtraceback(self, exc_tuple=None, filename=None, tb_offset=None, |
|
2035 | 2039 | exception_only=False, running_compiled_code=False): |
|
2036 | 2040 | """Display the exception that just occurred. |
|
2037 | 2041 | |
|
2038 | 2042 | If nothing is known about the exception, this is the method which |
|
2039 | 2043 | should be used throughout the code for presenting user tracebacks, |
|
2040 | 2044 | rather than directly invoking the InteractiveTB object. |
|
2041 | 2045 | |
|
2042 | 2046 | A specific showsyntaxerror() also exists, but this method can take |
|
2043 | 2047 | care of calling it if needed, so unless you are explicitly catching a |
|
2044 | 2048 | SyntaxError exception, don't try to analyze the stack manually and |
|
2045 | 2049 | simply call this method.""" |
|
2046 | 2050 | |
|
2047 | 2051 | try: |
|
2048 | 2052 | try: |
|
2049 | 2053 | etype, value, tb = self._get_exc_info(exc_tuple) |
|
2050 | 2054 | except ValueError: |
|
2051 | 2055 | print('No traceback available to show.', file=sys.stderr) |
|
2052 | 2056 | return |
|
2053 | 2057 | |
|
2054 | 2058 | if issubclass(etype, SyntaxError): |
|
2055 | 2059 | # Though this won't be called by syntax errors in the input |
|
2056 | 2060 | # line, there may be SyntaxError cases with imported code. |
|
2057 | 2061 | self.showsyntaxerror(filename, running_compiled_code) |
|
2058 | 2062 | elif etype is UsageError: |
|
2059 | 2063 | self.show_usage_error(value) |
|
2060 | 2064 | else: |
|
2061 | 2065 | if exception_only: |
|
2062 | 2066 | stb = ['An exception has occurred, use %tb to see ' |
|
2063 | 2067 | 'the full traceback.\n'] |
|
2064 | 2068 | stb.extend(self.InteractiveTB.get_exception_only(etype, |
|
2065 | 2069 | value)) |
|
2066 | 2070 | else: |
|
2067 | 2071 | try: |
|
2068 | 2072 | # Exception classes can customise their traceback - we |
|
2069 | 2073 | # use this in IPython.parallel for exceptions occurring |
|
2070 | 2074 | # in the engines. This should return a list of strings. |
|
2071 | 2075 | stb = value._render_traceback_() |
|
2072 | 2076 | except Exception: |
|
2073 | 2077 | stb = self.InteractiveTB.structured_traceback(etype, |
|
2074 | 2078 | value, tb, tb_offset=tb_offset) |
|
2075 | 2079 | |
|
2076 | 2080 | self._showtraceback(etype, value, stb) |
|
2077 | 2081 | if self.call_pdb: |
|
2078 | 2082 | # drop into debugger |
|
2079 | 2083 | self.debugger(force=True) |
|
2080 | 2084 | return |
|
2081 | 2085 | |
|
2082 | 2086 | # Actually show the traceback |
|
2083 | 2087 | self._showtraceback(etype, value, stb) |
|
2084 | 2088 | |
|
2085 | 2089 | except KeyboardInterrupt: |
|
2086 | 2090 | print('\n' + self.get_exception_only(), file=sys.stderr) |
|
2087 | 2091 | |
|
2088 | def _showtraceback(self, etype, evalue, stb): | |
|
2092 | def _showtraceback(self, etype, evalue, stb: str): | |
|
2089 | 2093 | """Actually show a traceback. |
|
2090 | 2094 | |
|
2091 | 2095 | Subclasses may override this method to put the traceback on a different |
|
2092 | 2096 | place, like a side channel. |
|
2093 | 2097 | """ |
|
2094 |
|
|
|
2098 | val = self.InteractiveTB.stb2text(stb) | |
|
2099 | try: | |
|
2100 | print(val) | |
|
2101 | except UnicodeEncodeError: | |
|
2102 | print(val.encode("utf-8", "backslashreplace").decode()) | |
|
2095 | 2103 | |
|
2096 | 2104 | def showsyntaxerror(self, filename=None, running_compiled_code=False): |
|
2097 | 2105 | """Display the syntax error that just occurred. |
|
2098 | 2106 | |
|
2099 | 2107 | This doesn't display a stack trace because there isn't one. |
|
2100 | 2108 | |
|
2101 | 2109 | If a filename is given, it is stuffed in the exception instead |
|
2102 | 2110 | of what was there before (because Python's parser always uses |
|
2103 | 2111 | "<string>" when reading from a string). |
|
2104 | 2112 | |
|
2105 | 2113 | If the syntax error occurred when running a compiled code (i.e. running_compile_code=True), |
|
2106 | 2114 | longer stack trace will be displayed. |
|
2107 | 2115 | """ |
|
2108 | 2116 | etype, value, last_traceback = self._get_exc_info() |
|
2109 | 2117 | |
|
2110 | 2118 | if filename and issubclass(etype, SyntaxError): |
|
2111 | 2119 | try: |
|
2112 | 2120 | value.filename = filename |
|
2113 | 2121 | except: |
|
2114 | 2122 | # Not the format we expect; leave it alone |
|
2115 | 2123 | pass |
|
2116 | 2124 | |
|
2117 | 2125 | # If the error occurred when executing compiled code, we should provide full stacktrace. |
|
2118 | 2126 | elist = traceback.extract_tb(last_traceback) if running_compiled_code else [] |
|
2119 | 2127 | stb = self.SyntaxTB.structured_traceback(etype, value, elist) |
|
2120 | 2128 | self._showtraceback(etype, value, stb) |
|
2121 | 2129 | |
|
2122 | 2130 | # This is overridden in TerminalInteractiveShell to show a message about |
|
2123 | 2131 | # the %paste magic. |
|
2124 | 2132 | def showindentationerror(self): |
|
2125 | 2133 | """Called by _run_cell when there's an IndentationError in code entered |
|
2126 | 2134 | at the prompt. |
|
2127 | 2135 | |
|
2128 | 2136 | This is overridden in TerminalInteractiveShell to show a message about |
|
2129 | 2137 | the %paste magic.""" |
|
2130 | 2138 | self.showsyntaxerror() |
|
2131 | 2139 | |
|
2132 | 2140 | #------------------------------------------------------------------------- |
|
2133 | 2141 | # Things related to readline |
|
2134 | 2142 | #------------------------------------------------------------------------- |
|
2135 | 2143 | |
|
2136 | 2144 | def init_readline(self): |
|
2137 | 2145 | """DEPRECATED |
|
2138 | 2146 | |
|
2139 | 2147 | Moved to terminal subclass, here only to simplify the init logic.""" |
|
2140 | 2148 | # Set a number of methods that depend on readline to be no-op |
|
2141 | 2149 | warnings.warn('`init_readline` is no-op since IPython 5.0 and is Deprecated', |
|
2142 | 2150 | DeprecationWarning, stacklevel=2) |
|
2143 | 2151 | self.set_custom_completer = no_op |
|
2144 | 2152 | |
|
2145 | 2153 | @skip_doctest |
|
2146 | 2154 | def set_next_input(self, s, replace=False): |
|
2147 | 2155 | """ Sets the 'default' input string for the next command line. |
|
2148 | 2156 | |
|
2149 | 2157 | Example:: |
|
2150 | 2158 | |
|
2151 | 2159 | In [1]: _ip.set_next_input("Hello Word") |
|
2152 | 2160 | In [2]: Hello Word_ # cursor is here |
|
2153 | 2161 | """ |
|
2154 | 2162 | self.rl_next_input = s |
|
2155 | 2163 | |
|
2156 | 2164 | def _indent_current_str(self): |
|
2157 | 2165 | """return the current level of indentation as a string""" |
|
2158 | 2166 | return self.input_splitter.get_indent_spaces() * ' ' |
|
2159 | 2167 | |
|
2160 | 2168 | #------------------------------------------------------------------------- |
|
2161 | 2169 | # Things related to text completion |
|
2162 | 2170 | #------------------------------------------------------------------------- |
|
2163 | 2171 | |
|
2164 | 2172 | def init_completer(self): |
|
2165 | 2173 | """Initialize the completion machinery. |
|
2166 | 2174 | |
|
2167 | 2175 | This creates completion machinery that can be used by client code, |
|
2168 | 2176 | either interactively in-process (typically triggered by the readline |
|
2169 | 2177 | library), programmatically (such as in test suites) or out-of-process |
|
2170 | 2178 | (typically over the network by remote frontends). |
|
2171 | 2179 | """ |
|
2172 | 2180 | from IPython.core.completer import IPCompleter |
|
2173 | 2181 | from IPython.core.completerlib import (module_completer, |
|
2174 | 2182 | magic_run_completer, cd_completer, reset_completer) |
|
2175 | 2183 | |
|
2176 | 2184 | self.Completer = IPCompleter(shell=self, |
|
2177 | 2185 | namespace=self.user_ns, |
|
2178 | 2186 | global_namespace=self.user_global_ns, |
|
2179 | 2187 | parent=self, |
|
2180 | 2188 | ) |
|
2181 | 2189 | self.configurables.append(self.Completer) |
|
2182 | 2190 | |
|
2183 | 2191 | # Add custom completers to the basic ones built into IPCompleter |
|
2184 | 2192 | sdisp = self.strdispatchers.get('complete_command', StrDispatch()) |
|
2185 | 2193 | self.strdispatchers['complete_command'] = sdisp |
|
2186 | 2194 | self.Completer.custom_completers = sdisp |
|
2187 | 2195 | |
|
2188 | 2196 | self.set_hook('complete_command', module_completer, str_key = 'import') |
|
2189 | 2197 | self.set_hook('complete_command', module_completer, str_key = 'from') |
|
2190 | 2198 | self.set_hook('complete_command', module_completer, str_key = '%aimport') |
|
2191 | 2199 | self.set_hook('complete_command', magic_run_completer, str_key = '%run') |
|
2192 | 2200 | self.set_hook('complete_command', cd_completer, str_key = '%cd') |
|
2193 | 2201 | self.set_hook('complete_command', reset_completer, str_key = '%reset') |
|
2194 | 2202 | |
|
2195 | 2203 | @skip_doctest |
|
2196 | 2204 | def complete(self, text, line=None, cursor_pos=None): |
|
2197 | 2205 | """Return the completed text and a list of completions. |
|
2198 | 2206 | |
|
2199 | 2207 | Parameters |
|
2200 | 2208 | ---------- |
|
2201 | 2209 | |
|
2202 | 2210 | text : string |
|
2203 | 2211 | A string of text to be completed on. It can be given as empty and |
|
2204 | 2212 | instead a line/position pair are given. In this case, the |
|
2205 | 2213 | completer itself will split the line like readline does. |
|
2206 | 2214 | |
|
2207 | 2215 | line : string, optional |
|
2208 | 2216 | The complete line that text is part of. |
|
2209 | 2217 | |
|
2210 | 2218 | cursor_pos : int, optional |
|
2211 | 2219 | The position of the cursor on the input line. |
|
2212 | 2220 | |
|
2213 | 2221 | Returns |
|
2214 | 2222 | ------- |
|
2215 | 2223 | text : string |
|
2216 | 2224 | The actual text that was completed. |
|
2217 | 2225 | |
|
2218 | 2226 | matches : list |
|
2219 | 2227 | A sorted list with all possible completions. |
|
2220 | 2228 | |
|
2229 | ||
|
2230 | Notes | |
|
2231 | ----- | |
|
2221 | 2232 | The optional arguments allow the completion to take more context into |
|
2222 | 2233 | account, and are part of the low-level completion API. |
|
2223 | 2234 | |
|
2224 | 2235 | This is a wrapper around the completion mechanism, similar to what |
|
2225 | 2236 | readline does at the command line when the TAB key is hit. By |
|
2226 | 2237 | exposing it as a method, it can be used by other non-readline |
|
2227 | 2238 | environments (such as GUIs) for text completion. |
|
2228 | 2239 | |
|
2229 |
|
|
|
2240 | Examples | |
|
2241 | -------- | |
|
2230 | 2242 | |
|
2231 | 2243 | In [1]: x = 'hello' |
|
2232 | 2244 | |
|
2233 | 2245 | In [2]: _ip.complete('x.l') |
|
2234 | 2246 | Out[2]: ('x.l', ['x.ljust', 'x.lower', 'x.lstrip']) |
|
2235 | 2247 | """ |
|
2236 | 2248 | |
|
2237 | 2249 | # Inject names into __builtin__ so we can complete on the added names. |
|
2238 | 2250 | with self.builtin_trap: |
|
2239 | 2251 | return self.Completer.complete(text, line, cursor_pos) |
|
2240 | 2252 | |
|
2241 | 2253 | def set_custom_completer(self, completer, pos=0) -> None: |
|
2242 | 2254 | """Adds a new custom completer function. |
|
2243 | 2255 | |
|
2244 | 2256 | The position argument (defaults to 0) is the index in the completers |
|
2245 | 2257 | list where you want the completer to be inserted. |
|
2246 | 2258 | |
|
2247 | 2259 | `completer` should have the following signature:: |
|
2248 | 2260 | |
|
2249 | 2261 | def completion(self: Completer, text: string) -> List[str]: |
|
2250 | 2262 | raise NotImplementedError |
|
2251 | 2263 | |
|
2252 | 2264 | It will be bound to the current Completer instance and pass some text |
|
2253 | 2265 | and return a list with current completions to suggest to the user. |
|
2254 | 2266 | """ |
|
2255 | 2267 | |
|
2256 | 2268 | newcomp = types.MethodType(completer, self.Completer) |
|
2257 | 2269 | self.Completer.custom_matchers.insert(pos,newcomp) |
|
2258 | 2270 | |
|
2259 | 2271 | def set_completer_frame(self, frame=None): |
|
2260 | 2272 | """Set the frame of the completer.""" |
|
2261 | 2273 | if frame: |
|
2262 | 2274 | self.Completer.namespace = frame.f_locals |
|
2263 | 2275 | self.Completer.global_namespace = frame.f_globals |
|
2264 | 2276 | else: |
|
2265 | 2277 | self.Completer.namespace = self.user_ns |
|
2266 | 2278 | self.Completer.global_namespace = self.user_global_ns |
|
2267 | 2279 | |
|
2268 | 2280 | #------------------------------------------------------------------------- |
|
2269 | 2281 | # Things related to magics |
|
2270 | 2282 | #------------------------------------------------------------------------- |
|
2271 | 2283 | |
|
2272 | 2284 | def init_magics(self): |
|
2273 | 2285 | from IPython.core import magics as m |
|
2274 | 2286 | self.magics_manager = magic.MagicsManager(shell=self, |
|
2275 | 2287 | parent=self, |
|
2276 | 2288 | user_magics=m.UserMagics(self)) |
|
2277 | 2289 | self.configurables.append(self.magics_manager) |
|
2278 | 2290 | |
|
2279 | 2291 | # Expose as public API from the magics manager |
|
2280 | 2292 | self.register_magics = self.magics_manager.register |
|
2281 | 2293 | |
|
2282 | 2294 | self.register_magics(m.AutoMagics, m.BasicMagics, m.CodeMagics, |
|
2283 | 2295 | m.ConfigMagics, m.DisplayMagics, m.ExecutionMagics, |
|
2284 | 2296 | m.ExtensionMagics, m.HistoryMagics, m.LoggingMagics, |
|
2285 | 2297 | m.NamespaceMagics, m.OSMagics, m.PackagingMagics, |
|
2286 | 2298 | m.PylabMagics, m.ScriptMagics, |
|
2287 | 2299 | ) |
|
2288 | 2300 | self.register_magics(m.AsyncMagics) |
|
2289 | 2301 | |
|
2290 | 2302 | # Register Magic Aliases |
|
2291 | 2303 | mman = self.magics_manager |
|
2292 | 2304 | # FIXME: magic aliases should be defined by the Magics classes |
|
2293 | 2305 | # or in MagicsManager, not here |
|
2294 | 2306 | mman.register_alias('ed', 'edit') |
|
2295 | 2307 | mman.register_alias('hist', 'history') |
|
2296 | 2308 | mman.register_alias('rep', 'recall') |
|
2297 | 2309 | mman.register_alias('SVG', 'svg', 'cell') |
|
2298 | 2310 | mman.register_alias('HTML', 'html', 'cell') |
|
2299 | 2311 | mman.register_alias('file', 'writefile', 'cell') |
|
2300 | 2312 | |
|
2301 | 2313 | # FIXME: Move the color initialization to the DisplayHook, which |
|
2302 | 2314 | # should be split into a prompt manager and displayhook. We probably |
|
2303 | 2315 | # even need a centralize colors management object. |
|
2304 | 2316 | self.run_line_magic('colors', self.colors) |
|
2305 | 2317 | |
|
2306 | 2318 | # Defined here so that it's included in the documentation |
|
2307 | 2319 | @functools.wraps(magic.MagicsManager.register_function) |
|
2308 | 2320 | def register_magic_function(self, func, magic_kind='line', magic_name=None): |
|
2309 | 2321 | self.magics_manager.register_function( |
|
2310 | 2322 | func, magic_kind=magic_kind, magic_name=magic_name |
|
2311 | 2323 | ) |
|
2312 | 2324 | |
|
2313 | 2325 | def run_line_magic(self, magic_name, line, _stack_depth=1): |
|
2314 | 2326 | """Execute the given line magic. |
|
2315 | 2327 | |
|
2316 | 2328 | Parameters |
|
2317 | 2329 | ---------- |
|
2318 | 2330 | magic_name : str |
|
2319 | 2331 | Name of the desired magic function, without '%' prefix. |
|
2320 | 2332 | line : str |
|
2321 | 2333 | The rest of the input line as a single string. |
|
2322 | 2334 | _stack_depth : int |
|
2323 | 2335 | If run_line_magic() is called from magic() then _stack_depth=2. |
|
2324 | 2336 | This is added to ensure backward compatibility for use of 'get_ipython().magic()' |
|
2325 | 2337 | """ |
|
2326 | 2338 | fn = self.find_line_magic(magic_name) |
|
2327 | 2339 | if fn is None: |
|
2328 | 2340 | cm = self.find_cell_magic(magic_name) |
|
2329 | 2341 | etpl = "Line magic function `%%%s` not found%s." |
|
2330 | 2342 | extra = '' if cm is None else (' (But cell magic `%%%%%s` exists, ' |
|
2331 | 2343 | 'did you mean that instead?)' % magic_name ) |
|
2332 | 2344 | raise UsageError(etpl % (magic_name, extra)) |
|
2333 | 2345 | else: |
|
2334 | 2346 | # Note: this is the distance in the stack to the user's frame. |
|
2335 | 2347 | # This will need to be updated if the internal calling logic gets |
|
2336 | 2348 | # refactored, or else we'll be expanding the wrong variables. |
|
2337 | 2349 | |
|
2338 | 2350 | # Determine stack_depth depending on where run_line_magic() has been called |
|
2339 | 2351 | stack_depth = _stack_depth |
|
2340 | 2352 | if getattr(fn, magic.MAGIC_NO_VAR_EXPAND_ATTR, False): |
|
2341 | 2353 | # magic has opted out of var_expand |
|
2342 | 2354 | magic_arg_s = line |
|
2343 | 2355 | else: |
|
2344 | 2356 | magic_arg_s = self.var_expand(line, stack_depth) |
|
2345 | 2357 | # Put magic args in a list so we can call with f(*a) syntax |
|
2346 | 2358 | args = [magic_arg_s] |
|
2347 | 2359 | kwargs = {} |
|
2348 | 2360 | # Grab local namespace if we need it: |
|
2349 | 2361 | if getattr(fn, "needs_local_scope", False): |
|
2350 | 2362 | kwargs['local_ns'] = self.get_local_scope(stack_depth) |
|
2351 | 2363 | with self.builtin_trap: |
|
2352 | 2364 | result = fn(*args, **kwargs) |
|
2353 | 2365 | return result |
|
2354 | 2366 | |
|
2355 | 2367 | def get_local_scope(self, stack_depth): |
|
2356 | 2368 | """Get local scope at given stack depth. |
|
2357 | 2369 | |
|
2358 | 2370 | Parameters |
|
2359 | 2371 | ---------- |
|
2360 | 2372 | stack_depth : int |
|
2361 | 2373 | Depth relative to calling frame |
|
2362 | 2374 | """ |
|
2363 | 2375 | return sys._getframe(stack_depth + 1).f_locals |
|
2364 | 2376 | |
|
2365 | 2377 | def run_cell_magic(self, magic_name, line, cell): |
|
2366 | 2378 | """Execute the given cell magic. |
|
2367 | 2379 | |
|
2368 | 2380 | Parameters |
|
2369 | 2381 | ---------- |
|
2370 | 2382 | magic_name : str |
|
2371 | 2383 | Name of the desired magic function, without '%' prefix. |
|
2372 | 2384 | line : str |
|
2373 | 2385 | The rest of the first input line as a single string. |
|
2374 | 2386 | cell : str |
|
2375 | 2387 | The body of the cell as a (possibly multiline) string. |
|
2376 | 2388 | """ |
|
2377 | 2389 | fn = self.find_cell_magic(magic_name) |
|
2378 | 2390 | if fn is None: |
|
2379 | 2391 | lm = self.find_line_magic(magic_name) |
|
2380 | 2392 | etpl = "Cell magic `%%{0}` not found{1}." |
|
2381 | 2393 | extra = '' if lm is None else (' (But line magic `%{0}` exists, ' |
|
2382 | 2394 | 'did you mean that instead?)'.format(magic_name)) |
|
2383 | 2395 | raise UsageError(etpl.format(magic_name, extra)) |
|
2384 | 2396 | elif cell == '': |
|
2385 | 2397 | message = '%%{0} is a cell magic, but the cell body is empty.'.format(magic_name) |
|
2386 | 2398 | if self.find_line_magic(magic_name) is not None: |
|
2387 | 2399 | message += ' Did you mean the line magic %{0} (single %)?'.format(magic_name) |
|
2388 | 2400 | raise UsageError(message) |
|
2389 | 2401 | else: |
|
2390 | 2402 | # Note: this is the distance in the stack to the user's frame. |
|
2391 | 2403 | # This will need to be updated if the internal calling logic gets |
|
2392 | 2404 | # refactored, or else we'll be expanding the wrong variables. |
|
2393 | 2405 | stack_depth = 2 |
|
2394 | 2406 | if getattr(fn, magic.MAGIC_NO_VAR_EXPAND_ATTR, False): |
|
2395 | 2407 | # magic has opted out of var_expand |
|
2396 | 2408 | magic_arg_s = line |
|
2397 | 2409 | else: |
|
2398 | 2410 | magic_arg_s = self.var_expand(line, stack_depth) |
|
2399 | 2411 | kwargs = {} |
|
2400 | 2412 | if getattr(fn, "needs_local_scope", False): |
|
2401 | 2413 | kwargs['local_ns'] = self.user_ns |
|
2402 | 2414 | |
|
2403 | 2415 | with self.builtin_trap: |
|
2404 | 2416 | args = (magic_arg_s, cell) |
|
2405 | 2417 | result = fn(*args, **kwargs) |
|
2406 | 2418 | return result |
|
2407 | 2419 | |
|
2408 | 2420 | def find_line_magic(self, magic_name): |
|
2409 | 2421 | """Find and return a line magic by name. |
|
2410 | 2422 | |
|
2411 | 2423 | Returns None if the magic isn't found.""" |
|
2412 | 2424 | return self.magics_manager.magics['line'].get(magic_name) |
|
2413 | 2425 | |
|
2414 | 2426 | def find_cell_magic(self, magic_name): |
|
2415 | 2427 | """Find and return a cell magic by name. |
|
2416 | 2428 | |
|
2417 | 2429 | Returns None if the magic isn't found.""" |
|
2418 | 2430 | return self.magics_manager.magics['cell'].get(magic_name) |
|
2419 | 2431 | |
|
2420 | 2432 | def find_magic(self, magic_name, magic_kind='line'): |
|
2421 | 2433 | """Find and return a magic of the given type by name. |
|
2422 | 2434 | |
|
2423 | 2435 | Returns None if the magic isn't found.""" |
|
2424 | 2436 | return self.magics_manager.magics[magic_kind].get(magic_name) |
|
2425 | 2437 | |
|
2426 | 2438 | def magic(self, arg_s): |
|
2427 | 2439 | """DEPRECATED. Use run_line_magic() instead. |
|
2428 | 2440 | |
|
2429 | 2441 | Call a magic function by name. |
|
2430 | 2442 | |
|
2431 | 2443 | Input: a string containing the name of the magic function to call and |
|
2432 | 2444 | any additional arguments to be passed to the magic. |
|
2433 | 2445 | |
|
2434 | 2446 | magic('name -opt foo bar') is equivalent to typing at the ipython |
|
2435 | 2447 | prompt: |
|
2436 | 2448 | |
|
2437 | 2449 | In[1]: %name -opt foo bar |
|
2438 | 2450 | |
|
2439 | 2451 | To call a magic without arguments, simply use magic('name'). |
|
2440 | 2452 | |
|
2441 | 2453 | This provides a proper Python function to call IPython's magics in any |
|
2442 | 2454 | valid Python code you can type at the interpreter, including loops and |
|
2443 | 2455 | compound statements. |
|
2444 | 2456 | """ |
|
2445 | 2457 | # TODO: should we issue a loud deprecation warning here? |
|
2446 | 2458 | magic_name, _, magic_arg_s = arg_s.partition(' ') |
|
2447 | 2459 | magic_name = magic_name.lstrip(prefilter.ESC_MAGIC) |
|
2448 | 2460 | return self.run_line_magic(magic_name, magic_arg_s, _stack_depth=2) |
|
2449 | 2461 | |
|
2450 | 2462 | #------------------------------------------------------------------------- |
|
2451 | 2463 | # Things related to macros |
|
2452 | 2464 | #------------------------------------------------------------------------- |
|
2453 | 2465 | |
|
2454 | 2466 | def define_macro(self, name, themacro): |
|
2455 | 2467 | """Define a new macro |
|
2456 | 2468 | |
|
2457 | 2469 | Parameters |
|
2458 | 2470 | ---------- |
|
2459 | 2471 | name : str |
|
2460 | 2472 | The name of the macro. |
|
2461 | 2473 | themacro : str or Macro |
|
2462 | 2474 | The action to do upon invoking the macro. If a string, a new |
|
2463 | 2475 | Macro object is created by passing the string to it. |
|
2464 | 2476 | """ |
|
2465 | 2477 | |
|
2466 | 2478 | from IPython.core import macro |
|
2467 | 2479 | |
|
2468 | 2480 | if isinstance(themacro, str): |
|
2469 | 2481 | themacro = macro.Macro(themacro) |
|
2470 | 2482 | if not isinstance(themacro, macro.Macro): |
|
2471 | 2483 | raise ValueError('A macro must be a string or a Macro instance.') |
|
2472 | 2484 | self.user_ns[name] = themacro |
|
2473 | 2485 | |
|
2474 | 2486 | #------------------------------------------------------------------------- |
|
2475 | 2487 | # Things related to the running of system commands |
|
2476 | 2488 | #------------------------------------------------------------------------- |
|
2477 | 2489 | |
|
2478 | 2490 | def system_piped(self, cmd): |
|
2479 | 2491 | """Call the given cmd in a subprocess, piping stdout/err |
|
2480 | 2492 | |
|
2481 | 2493 | Parameters |
|
2482 | 2494 | ---------- |
|
2483 | 2495 | cmd : str |
|
2484 | 2496 | Command to execute (can not end in '&', as background processes are |
|
2485 | 2497 | not supported. Should not be a command that expects input |
|
2486 | 2498 | other than simple text. |
|
2487 | 2499 | """ |
|
2488 | 2500 | if cmd.rstrip().endswith('&'): |
|
2489 | 2501 | # this is *far* from a rigorous test |
|
2490 | 2502 | # We do not support backgrounding processes because we either use |
|
2491 | 2503 | # pexpect or pipes to read from. Users can always just call |
|
2492 | 2504 | # os.system() or use ip.system=ip.system_raw |
|
2493 | 2505 | # if they really want a background process. |
|
2494 | 2506 | raise OSError("Background processes not supported.") |
|
2495 | 2507 | |
|
2496 | 2508 | # we explicitly do NOT return the subprocess status code, because |
|
2497 | 2509 | # a non-None value would trigger :func:`sys.displayhook` calls. |
|
2498 | 2510 | # Instead, we store the exit_code in user_ns. |
|
2499 | 2511 | self.user_ns['_exit_code'] = system(self.var_expand(cmd, depth=1)) |
|
2500 | 2512 | |
|
2501 | 2513 | def system_raw(self, cmd): |
|
2502 | 2514 | """Call the given cmd in a subprocess using os.system on Windows or |
|
2503 | 2515 | subprocess.call using the system shell on other platforms. |
|
2504 | 2516 | |
|
2505 | 2517 | Parameters |
|
2506 | 2518 | ---------- |
|
2507 | 2519 | cmd : str |
|
2508 | 2520 | Command to execute. |
|
2509 | 2521 | """ |
|
2510 | 2522 | cmd = self.var_expand(cmd, depth=1) |
|
2523 | # warn if there is an IPython magic alternative. | |
|
2524 | main_cmd = cmd.split()[0] | |
|
2525 | has_magic_alternatives = ("pip", "conda", "cd", "ls") | |
|
2526 | ||
|
2527 | # had to check if the command was an alias expanded because of `ls` | |
|
2528 | is_alias_expanded = self.alias_manager.is_alias(main_cmd) and ( | |
|
2529 | self.alias_manager.retrieve_alias(main_cmd).strip() == cmd.strip() | |
|
2530 | ) | |
|
2531 | ||
|
2532 | if main_cmd in has_magic_alternatives and not is_alias_expanded: | |
|
2533 | warnings.warn( | |
|
2534 | ( | |
|
2535 | "You executed the system command !{0} which may not work " | |
|
2536 | "as expected. Try the IPython magic %{0} instead." | |
|
2537 | ).format(main_cmd) | |
|
2538 | ) | |
|
2539 | ||
|
2511 | 2540 | # protect os.system from UNC paths on Windows, which it can't handle: |
|
2512 | 2541 | if sys.platform == 'win32': |
|
2513 | 2542 | from IPython.utils._process_win32 import AvoidUNCPath |
|
2514 | 2543 | with AvoidUNCPath() as path: |
|
2515 | 2544 | if path is not None: |
|
2516 | 2545 | cmd = '"pushd %s &&"%s' % (path, cmd) |
|
2517 | 2546 | try: |
|
2518 | 2547 | ec = os.system(cmd) |
|
2519 | 2548 | except KeyboardInterrupt: |
|
2520 | 2549 | print('\n' + self.get_exception_only(), file=sys.stderr) |
|
2521 | 2550 | ec = -2 |
|
2522 | 2551 | else: |
|
2523 | 2552 | # For posix the result of the subprocess.call() below is an exit |
|
2524 | 2553 | # code, which by convention is zero for success, positive for |
|
2525 | 2554 | # program failure. Exit codes above 128 are reserved for signals, |
|
2526 | 2555 | # and the formula for converting a signal to an exit code is usually |
|
2527 | 2556 | # signal_number+128. To more easily differentiate between exit |
|
2528 | 2557 | # codes and signals, ipython uses negative numbers. For instance |
|
2529 | 2558 | # since control-c is signal 2 but exit code 130, ipython's |
|
2530 | 2559 | # _exit_code variable will read -2. Note that some shells like |
|
2531 | 2560 | # csh and fish don't follow sh/bash conventions for exit codes. |
|
2532 | 2561 | executable = os.environ.get('SHELL', None) |
|
2533 | 2562 | try: |
|
2534 | 2563 | # Use env shell instead of default /bin/sh |
|
2535 | 2564 | ec = subprocess.call(cmd, shell=True, executable=executable) |
|
2536 | 2565 | except KeyboardInterrupt: |
|
2537 | 2566 | # intercept control-C; a long traceback is not useful here |
|
2538 | 2567 | print('\n' + self.get_exception_only(), file=sys.stderr) |
|
2539 | 2568 | ec = 130 |
|
2540 | 2569 | if ec > 128: |
|
2541 | 2570 | ec = -(ec - 128) |
|
2542 | 2571 | |
|
2543 | 2572 | # We explicitly do NOT return the subprocess status code, because |
|
2544 | 2573 | # a non-None value would trigger :func:`sys.displayhook` calls. |
|
2545 | 2574 | # Instead, we store the exit_code in user_ns. Note the semantics |
|
2546 | 2575 | # of _exit_code: for control-c, _exit_code == -signal.SIGNIT, |
|
2547 | 2576 | # but raising SystemExit(_exit_code) will give status 254! |
|
2548 | 2577 | self.user_ns['_exit_code'] = ec |
|
2549 | 2578 | |
|
2550 | 2579 | # use piped system by default, because it is better behaved |
|
2551 | 2580 | system = system_piped |
|
2552 | 2581 | |
|
2553 | 2582 | def getoutput(self, cmd, split=True, depth=0): |
|
2554 | 2583 | """Get output (possibly including stderr) from a subprocess. |
|
2555 | 2584 | |
|
2556 | 2585 | Parameters |
|
2557 | 2586 | ---------- |
|
2558 | 2587 | cmd : str |
|
2559 | 2588 | Command to execute (can not end in '&', as background processes are |
|
2560 | 2589 | not supported. |
|
2561 | 2590 | split : bool, optional |
|
2562 | 2591 | If True, split the output into an IPython SList. Otherwise, an |
|
2563 | 2592 | IPython LSString is returned. These are objects similar to normal |
|
2564 | 2593 | lists and strings, with a few convenience attributes for easier |
|
2565 | 2594 | manipulation of line-based output. You can use '?' on them for |
|
2566 | 2595 | details. |
|
2567 | 2596 | depth : int, optional |
|
2568 | 2597 | How many frames above the caller are the local variables which should |
|
2569 | 2598 | be expanded in the command string? The default (0) assumes that the |
|
2570 | 2599 | expansion variables are in the stack frame calling this function. |
|
2571 | 2600 | """ |
|
2572 | 2601 | if cmd.rstrip().endswith('&'): |
|
2573 | 2602 | # this is *far* from a rigorous test |
|
2574 | 2603 | raise OSError("Background processes not supported.") |
|
2575 | 2604 | out = getoutput(self.var_expand(cmd, depth=depth+1)) |
|
2576 | 2605 | if split: |
|
2577 | 2606 | out = SList(out.splitlines()) |
|
2578 | 2607 | else: |
|
2579 | 2608 | out = LSString(out) |
|
2580 | 2609 | return out |
|
2581 | 2610 | |
|
2582 | 2611 | #------------------------------------------------------------------------- |
|
2583 | 2612 | # Things related to aliases |
|
2584 | 2613 | #------------------------------------------------------------------------- |
|
2585 | 2614 | |
|
2586 | 2615 | def init_alias(self): |
|
2587 | 2616 | self.alias_manager = AliasManager(shell=self, parent=self) |
|
2588 | 2617 | self.configurables.append(self.alias_manager) |
|
2589 | 2618 | |
|
2590 | 2619 | #------------------------------------------------------------------------- |
|
2591 | 2620 | # Things related to extensions |
|
2592 | 2621 | #------------------------------------------------------------------------- |
|
2593 | 2622 | |
|
2594 | 2623 | def init_extension_manager(self): |
|
2595 | 2624 | self.extension_manager = ExtensionManager(shell=self, parent=self) |
|
2596 | 2625 | self.configurables.append(self.extension_manager) |
|
2597 | 2626 | |
|
2598 | 2627 | #------------------------------------------------------------------------- |
|
2599 | 2628 | # Things related to payloads |
|
2600 | 2629 | #------------------------------------------------------------------------- |
|
2601 | 2630 | |
|
2602 | 2631 | def init_payload(self): |
|
2603 | 2632 | self.payload_manager = PayloadManager(parent=self) |
|
2604 | 2633 | self.configurables.append(self.payload_manager) |
|
2605 | 2634 | |
|
2606 | 2635 | #------------------------------------------------------------------------- |
|
2607 | 2636 | # Things related to the prefilter |
|
2608 | 2637 | #------------------------------------------------------------------------- |
|
2609 | 2638 | |
|
2610 | 2639 | def init_prefilter(self): |
|
2611 | 2640 | self.prefilter_manager = PrefilterManager(shell=self, parent=self) |
|
2612 | 2641 | self.configurables.append(self.prefilter_manager) |
|
2613 | 2642 | # Ultimately this will be refactored in the new interpreter code, but |
|
2614 | 2643 | # for now, we should expose the main prefilter method (there's legacy |
|
2615 | 2644 | # code out there that may rely on this). |
|
2616 | 2645 | self.prefilter = self.prefilter_manager.prefilter_lines |
|
2617 | 2646 | |
|
2618 | 2647 | def auto_rewrite_input(self, cmd): |
|
2619 | 2648 | """Print to the screen the rewritten form of the user's command. |
|
2620 | 2649 | |
|
2621 | 2650 | This shows visual feedback by rewriting input lines that cause |
|
2622 | 2651 | automatic calling to kick in, like:: |
|
2623 | 2652 | |
|
2624 | 2653 | /f x |
|
2625 | 2654 | |
|
2626 | 2655 | into:: |
|
2627 | 2656 | |
|
2628 | 2657 | ------> f(x) |
|
2629 | 2658 | |
|
2630 | 2659 | after the user's input prompt. This helps the user understand that the |
|
2631 | 2660 | input line was transformed automatically by IPython. |
|
2632 | 2661 | """ |
|
2633 | 2662 | if not self.show_rewritten_input: |
|
2634 | 2663 | return |
|
2635 | 2664 | |
|
2636 | 2665 | # This is overridden in TerminalInteractiveShell to use fancy prompts |
|
2637 | 2666 | print("------> " + cmd) |
|
2638 | 2667 | |
|
2639 | 2668 | #------------------------------------------------------------------------- |
|
2640 | 2669 | # Things related to extracting values/expressions from kernel and user_ns |
|
2641 | 2670 | #------------------------------------------------------------------------- |
|
2642 | 2671 | |
|
2643 | 2672 | def _user_obj_error(self): |
|
2644 | 2673 | """return simple exception dict |
|
2645 | 2674 | |
|
2646 | 2675 | for use in user_expressions |
|
2647 | 2676 | """ |
|
2648 | 2677 | |
|
2649 | 2678 | etype, evalue, tb = self._get_exc_info() |
|
2650 | 2679 | stb = self.InteractiveTB.get_exception_only(etype, evalue) |
|
2651 | 2680 | |
|
2652 | 2681 | exc_info = { |
|
2653 | 2682 | "status": "error", |
|
2654 | 2683 | "traceback": stb, |
|
2655 | 2684 | "ename": etype.__name__, |
|
2656 | 2685 | "evalue": py3compat.safe_unicode(evalue), |
|
2657 | 2686 | } |
|
2658 | 2687 | |
|
2659 | 2688 | return exc_info |
|
2660 | 2689 | |
|
2661 | 2690 | def _format_user_obj(self, obj): |
|
2662 | 2691 | """format a user object to display dict |
|
2663 | 2692 | |
|
2664 | 2693 | for use in user_expressions |
|
2665 | 2694 | """ |
|
2666 | 2695 | |
|
2667 | 2696 | data, md = self.display_formatter.format(obj) |
|
2668 | 2697 | value = { |
|
2669 | 2698 | 'status' : 'ok', |
|
2670 | 2699 | 'data' : data, |
|
2671 | 2700 | 'metadata' : md, |
|
2672 | 2701 | } |
|
2673 | 2702 | return value |
|
2674 | 2703 | |
|
2675 | 2704 | def user_expressions(self, expressions): |
|
2676 | 2705 | """Evaluate a dict of expressions in the user's namespace. |
|
2677 | 2706 | |
|
2678 | 2707 | Parameters |
|
2679 | 2708 | ---------- |
|
2680 | 2709 | expressions : dict |
|
2681 | 2710 | A dict with string keys and string values. The expression values |
|
2682 | 2711 | should be valid Python expressions, each of which will be evaluated |
|
2683 | 2712 | in the user namespace. |
|
2684 | 2713 | |
|
2685 | 2714 | Returns |
|
2686 | 2715 | ------- |
|
2687 | 2716 | A dict, keyed like the input expressions dict, with the rich mime-typed |
|
2688 | 2717 | display_data of each value. |
|
2689 | 2718 | """ |
|
2690 | 2719 | out = {} |
|
2691 | 2720 | user_ns = self.user_ns |
|
2692 | 2721 | global_ns = self.user_global_ns |
|
2693 | 2722 | |
|
2694 | 2723 | for key, expr in expressions.items(): |
|
2695 | 2724 | try: |
|
2696 | 2725 | value = self._format_user_obj(eval(expr, global_ns, user_ns)) |
|
2697 | 2726 | except: |
|
2698 | 2727 | value = self._user_obj_error() |
|
2699 | 2728 | out[key] = value |
|
2700 | 2729 | return out |
|
2701 | 2730 | |
|
2702 | 2731 | #------------------------------------------------------------------------- |
|
2703 | 2732 | # Things related to the running of code |
|
2704 | 2733 | #------------------------------------------------------------------------- |
|
2705 | 2734 | |
|
2706 | 2735 | def ex(self, cmd): |
|
2707 | 2736 | """Execute a normal python statement in user namespace.""" |
|
2708 | 2737 | with self.builtin_trap: |
|
2709 | 2738 | exec(cmd, self.user_global_ns, self.user_ns) |
|
2710 | 2739 | |
|
2711 | 2740 | def ev(self, expr): |
|
2712 | 2741 | """Evaluate python expression expr in user namespace. |
|
2713 | 2742 | |
|
2714 | 2743 | Returns the result of evaluation |
|
2715 | 2744 | """ |
|
2716 | 2745 | with self.builtin_trap: |
|
2717 | 2746 | return eval(expr, self.user_global_ns, self.user_ns) |
|
2718 | 2747 | |
|
2719 | 2748 | def safe_execfile(self, fname, *where, exit_ignore=False, raise_exceptions=False, shell_futures=False): |
|
2720 | 2749 | """A safe version of the builtin execfile(). |
|
2721 | 2750 | |
|
2722 | 2751 | This version will never throw an exception, but instead print |
|
2723 | 2752 | helpful error messages to the screen. This only works on pure |
|
2724 | 2753 | Python files with the .py extension. |
|
2725 | 2754 | |
|
2726 | 2755 | Parameters |
|
2727 | 2756 | ---------- |
|
2728 | 2757 | fname : string |
|
2729 | 2758 | The name of the file to be executed. |
|
2730 | 2759 | where : tuple |
|
2731 | 2760 | One or two namespaces, passed to execfile() as (globals,locals). |
|
2732 | 2761 | If only one is given, it is passed as both. |
|
2733 | 2762 | exit_ignore : bool (False) |
|
2734 | 2763 | If True, then silence SystemExit for non-zero status (it is always |
|
2735 | 2764 | silenced for zero status, as it is so common). |
|
2736 | 2765 | raise_exceptions : bool (False) |
|
2737 | 2766 | If True raise exceptions everywhere. Meant for testing. |
|
2738 | 2767 | shell_futures : bool (False) |
|
2739 | 2768 | If True, the code will share future statements with the interactive |
|
2740 | 2769 | shell. It will both be affected by previous __future__ imports, and |
|
2741 | 2770 | any __future__ imports in the code will affect the shell. If False, |
|
2742 | 2771 | __future__ imports are not shared in either direction. |
|
2743 | 2772 | |
|
2744 | 2773 | """ |
|
2745 | 2774 | fname = Path(fname).expanduser().resolve() |
|
2746 | 2775 | |
|
2747 | 2776 | # Make sure we can open the file |
|
2748 | 2777 | try: |
|
2749 | 2778 | with fname.open(): |
|
2750 | 2779 | pass |
|
2751 | 2780 | except: |
|
2752 | 2781 | warn('Could not open file <%s> for safe execution.' % fname) |
|
2753 | 2782 | return |
|
2754 | 2783 | |
|
2755 | 2784 | # Find things also in current directory. This is needed to mimic the |
|
2756 | 2785 | # behavior of running a script from the system command line, where |
|
2757 | 2786 | # Python inserts the script's directory into sys.path |
|
2758 | 2787 | dname = str(fname.parent) |
|
2759 | 2788 | |
|
2760 | 2789 | with prepended_to_syspath(dname), self.builtin_trap: |
|
2761 | 2790 | try: |
|
2762 | 2791 | glob, loc = (where + (None, ))[:2] |
|
2763 | 2792 | py3compat.execfile( |
|
2764 | 2793 | fname, glob, loc, |
|
2765 | 2794 | self.compile if shell_futures else None) |
|
2766 | 2795 | except SystemExit as status: |
|
2767 | 2796 | # If the call was made with 0 or None exit status (sys.exit(0) |
|
2768 | 2797 | # or sys.exit() ), don't bother showing a traceback, as both of |
|
2769 | 2798 | # these are considered normal by the OS: |
|
2770 | 2799 | # > python -c'import sys;sys.exit(0)'; echo $? |
|
2771 | 2800 | # 0 |
|
2772 | 2801 | # > python -c'import sys;sys.exit()'; echo $? |
|
2773 | 2802 | # 0 |
|
2774 | 2803 | # For other exit status, we show the exception unless |
|
2775 | 2804 | # explicitly silenced, but only in short form. |
|
2776 | 2805 | if status.code: |
|
2777 | 2806 | if raise_exceptions: |
|
2778 | 2807 | raise |
|
2779 | 2808 | if not exit_ignore: |
|
2780 | 2809 | self.showtraceback(exception_only=True) |
|
2781 | 2810 | except: |
|
2782 | 2811 | if raise_exceptions: |
|
2783 | 2812 | raise |
|
2784 | 2813 | # tb offset is 2 because we wrap execfile |
|
2785 | 2814 | self.showtraceback(tb_offset=2) |
|
2786 | 2815 | |
|
2787 | 2816 | def safe_execfile_ipy(self, fname, shell_futures=False, raise_exceptions=False): |
|
2788 | 2817 | """Like safe_execfile, but for .ipy or .ipynb files with IPython syntax. |
|
2789 | 2818 | |
|
2790 | 2819 | Parameters |
|
2791 | 2820 | ---------- |
|
2792 | 2821 | fname : str |
|
2793 | 2822 | The name of the file to execute. The filename must have a |
|
2794 | 2823 | .ipy or .ipynb extension. |
|
2795 | 2824 | shell_futures : bool (False) |
|
2796 | 2825 | If True, the code will share future statements with the interactive |
|
2797 | 2826 | shell. It will both be affected by previous __future__ imports, and |
|
2798 | 2827 | any __future__ imports in the code will affect the shell. If False, |
|
2799 | 2828 | __future__ imports are not shared in either direction. |
|
2800 | 2829 | raise_exceptions : bool (False) |
|
2801 | 2830 | If True raise exceptions everywhere. Meant for testing. |
|
2802 | 2831 | """ |
|
2803 | 2832 | fname = Path(fname).expanduser().resolve() |
|
2804 | 2833 | |
|
2805 | 2834 | # Make sure we can open the file |
|
2806 | 2835 | try: |
|
2807 | 2836 | with fname.open(): |
|
2808 | 2837 | pass |
|
2809 | 2838 | except: |
|
2810 | 2839 | warn('Could not open file <%s> for safe execution.' % fname) |
|
2811 | 2840 | return |
|
2812 | 2841 | |
|
2813 | 2842 | # Find things also in current directory. This is needed to mimic the |
|
2814 | 2843 | # behavior of running a script from the system command line, where |
|
2815 | 2844 | # Python inserts the script's directory into sys.path |
|
2816 | 2845 | dname = str(fname.parent) |
|
2817 | 2846 | |
|
2818 | 2847 | def get_cells(): |
|
2819 | 2848 | """generator for sequence of code blocks to run""" |
|
2820 | 2849 | if fname.suffix == ".ipynb": |
|
2821 | 2850 | from nbformat import read |
|
2822 | 2851 | nb = read(fname, as_version=4) |
|
2823 | 2852 | if not nb.cells: |
|
2824 | 2853 | return |
|
2825 | 2854 | for cell in nb.cells: |
|
2826 | 2855 | if cell.cell_type == 'code': |
|
2827 | 2856 | yield cell.source |
|
2828 | 2857 | else: |
|
2829 | 2858 | yield fname.read_text() |
|
2830 | 2859 | |
|
2831 | 2860 | with prepended_to_syspath(dname): |
|
2832 | 2861 | try: |
|
2833 | 2862 | for cell in get_cells(): |
|
2834 | 2863 | result = self.run_cell(cell, silent=True, shell_futures=shell_futures) |
|
2835 | 2864 | if raise_exceptions: |
|
2836 | 2865 | result.raise_error() |
|
2837 | 2866 | elif not result.success: |
|
2838 | 2867 | break |
|
2839 | 2868 | except: |
|
2840 | 2869 | if raise_exceptions: |
|
2841 | 2870 | raise |
|
2842 | 2871 | self.showtraceback() |
|
2843 | 2872 | warn('Unknown failure executing file: <%s>' % fname) |
|
2844 | 2873 | |
|
2845 | 2874 | def safe_run_module(self, mod_name, where): |
|
2846 | 2875 | """A safe version of runpy.run_module(). |
|
2847 | 2876 | |
|
2848 | 2877 | This version will never throw an exception, but instead print |
|
2849 | 2878 | helpful error messages to the screen. |
|
2850 | 2879 | |
|
2851 | 2880 | `SystemExit` exceptions with status code 0 or None are ignored. |
|
2852 | 2881 | |
|
2853 | 2882 | Parameters |
|
2854 | 2883 | ---------- |
|
2855 | 2884 | mod_name : string |
|
2856 | 2885 | The name of the module to be executed. |
|
2857 | 2886 | where : dict |
|
2858 | 2887 | The globals namespace. |
|
2859 | 2888 | """ |
|
2860 | 2889 | try: |
|
2861 | 2890 | try: |
|
2862 | 2891 | where.update( |
|
2863 | 2892 | runpy.run_module(str(mod_name), run_name="__main__", |
|
2864 | 2893 | alter_sys=True) |
|
2865 | 2894 | ) |
|
2866 | 2895 | except SystemExit as status: |
|
2867 | 2896 | if status.code: |
|
2868 | 2897 | raise |
|
2869 | 2898 | except: |
|
2870 | 2899 | self.showtraceback() |
|
2871 | 2900 | warn('Unknown failure executing module: <%s>' % mod_name) |
|
2872 | 2901 | |
|
2873 | 2902 | def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=True): |
|
2874 | 2903 | """Run a complete IPython cell. |
|
2875 | 2904 | |
|
2876 | 2905 | Parameters |
|
2877 | 2906 | ---------- |
|
2878 | 2907 | raw_cell : str |
|
2879 | 2908 | The code (including IPython code such as %magic functions) to run. |
|
2880 | 2909 | store_history : bool |
|
2881 | 2910 | If True, the raw and translated cell will be stored in IPython's |
|
2882 | 2911 | history. For user code calling back into IPython's machinery, this |
|
2883 | 2912 | should be set to False. |
|
2884 | 2913 | silent : bool |
|
2885 | 2914 | If True, avoid side-effects, such as implicit displayhooks and |
|
2886 | 2915 | and logging. silent=True forces store_history=False. |
|
2887 | 2916 | shell_futures : bool |
|
2888 | 2917 | If True, the code will share future statements with the interactive |
|
2889 | 2918 | shell. It will both be affected by previous __future__ imports, and |
|
2890 | 2919 | any __future__ imports in the code will affect the shell. If False, |
|
2891 | 2920 | __future__ imports are not shared in either direction. |
|
2892 | 2921 | |
|
2893 | 2922 | Returns |
|
2894 | 2923 | ------- |
|
2895 | 2924 | result : :class:`ExecutionResult` |
|
2896 | 2925 | """ |
|
2897 | 2926 | result = None |
|
2898 | 2927 | try: |
|
2899 | 2928 | result = self._run_cell( |
|
2900 | 2929 | raw_cell, store_history, silent, shell_futures) |
|
2901 | 2930 | finally: |
|
2902 | 2931 | self.events.trigger('post_execute') |
|
2903 | 2932 | if not silent: |
|
2904 | 2933 | self.events.trigger('post_run_cell', result) |
|
2905 | 2934 | return result |
|
2906 | 2935 | |
|
2907 | 2936 | def _run_cell(self, raw_cell:str, store_history:bool, silent:bool, shell_futures:bool) -> ExecutionResult: |
|
2908 | 2937 | """Internal method to run a complete IPython cell.""" |
|
2909 | 2938 | |
|
2910 | 2939 | # we need to avoid calling self.transform_cell multiple time on the same thing |
|
2911 | 2940 | # so we need to store some results: |
|
2912 | 2941 | preprocessing_exc_tuple = None |
|
2913 | 2942 | try: |
|
2914 | 2943 | transformed_cell = self.transform_cell(raw_cell) |
|
2915 | 2944 | except Exception: |
|
2916 | 2945 | transformed_cell = raw_cell |
|
2917 | 2946 | preprocessing_exc_tuple = sys.exc_info() |
|
2918 | 2947 | |
|
2919 | 2948 | assert transformed_cell is not None |
|
2920 | 2949 | coro = self.run_cell_async( |
|
2921 | 2950 | raw_cell, |
|
2922 | 2951 | store_history=store_history, |
|
2923 | 2952 | silent=silent, |
|
2924 | 2953 | shell_futures=shell_futures, |
|
2925 | 2954 | transformed_cell=transformed_cell, |
|
2926 | 2955 | preprocessing_exc_tuple=preprocessing_exc_tuple, |
|
2927 | 2956 | ) |
|
2928 | 2957 | |
|
2929 | 2958 | # run_cell_async is async, but may not actually need an eventloop. |
|
2930 | 2959 | # when this is the case, we want to run it using the pseudo_sync_runner |
|
2931 | 2960 | # so that code can invoke eventloops (for example via the %run , and |
|
2932 | 2961 | # `%paste` magic. |
|
2933 | 2962 | if self.trio_runner: |
|
2934 | 2963 | runner = self.trio_runner |
|
2935 | 2964 | elif self.should_run_async( |
|
2936 | 2965 | raw_cell, |
|
2937 | 2966 | transformed_cell=transformed_cell, |
|
2938 | 2967 | preprocessing_exc_tuple=preprocessing_exc_tuple, |
|
2939 | 2968 | ): |
|
2940 | 2969 | runner = self.loop_runner |
|
2941 | 2970 | else: |
|
2942 | 2971 | runner = _pseudo_sync_runner |
|
2943 | 2972 | |
|
2944 | 2973 | try: |
|
2945 | 2974 | return runner(coro) |
|
2946 | 2975 | except BaseException as e: |
|
2947 | 2976 | info = ExecutionInfo(raw_cell, store_history, silent, shell_futures) |
|
2948 | 2977 | result = ExecutionResult(info) |
|
2949 | 2978 | result.error_in_exec = e |
|
2950 | 2979 | self.showtraceback(running_compiled_code=True) |
|
2951 | 2980 | return result |
|
2952 | 2981 | |
|
2953 | 2982 | def should_run_async( |
|
2954 | 2983 | self, raw_cell: str, *, transformed_cell=None, preprocessing_exc_tuple=None |
|
2955 | 2984 | ) -> bool: |
|
2956 | 2985 | """Return whether a cell should be run asynchronously via a coroutine runner |
|
2957 | 2986 | |
|
2958 | 2987 | Parameters |
|
2959 | 2988 | ---------- |
|
2960 | 2989 | raw_cell: str |
|
2961 | 2990 | The code to be executed |
|
2962 | 2991 | |
|
2963 | 2992 | Returns |
|
2964 | 2993 | ------- |
|
2965 | 2994 | result: bool |
|
2966 | 2995 | Whether the code needs to be run with a coroutine runner or not |
|
2967 | 2996 | |
|
2968 | 2997 | .. versionadded: 7.0 |
|
2969 | 2998 | """ |
|
2970 | 2999 | if not self.autoawait: |
|
2971 | 3000 | return False |
|
2972 | 3001 | if preprocessing_exc_tuple is not None: |
|
2973 | 3002 | return False |
|
2974 | 3003 | assert preprocessing_exc_tuple is None |
|
2975 | 3004 | if transformed_cell is None: |
|
2976 | 3005 | warnings.warn( |
|
2977 | 3006 | "`should_run_async` will not call `transform_cell`" |
|
2978 | 3007 | " automatically in the future. Please pass the result to" |
|
2979 | 3008 | " `transformed_cell` argument and any exception that happen" |
|
2980 | 3009 | " during the" |
|
2981 | 3010 | "transform in `preprocessing_exc_tuple` in" |
|
2982 | 3011 | " IPython 7.17 and above.", |
|
2983 | 3012 | DeprecationWarning, |
|
2984 | 3013 | stacklevel=2, |
|
2985 | 3014 | ) |
|
2986 | 3015 | try: |
|
2987 | 3016 | cell = self.transform_cell(raw_cell) |
|
2988 | 3017 | except Exception: |
|
2989 | 3018 | # any exception during transform will be raised |
|
2990 | 3019 | # prior to execution |
|
2991 | 3020 | return False |
|
2992 | 3021 | else: |
|
2993 | 3022 | cell = transformed_cell |
|
2994 | 3023 | return _should_be_async(cell) |
|
2995 | 3024 | |
|
2996 | 3025 | async def run_cell_async( |
|
2997 | 3026 | self, |
|
2998 | 3027 | raw_cell: str, |
|
2999 | 3028 | store_history=False, |
|
3000 | 3029 | silent=False, |
|
3001 | 3030 | shell_futures=True, |
|
3002 | 3031 | *, |
|
3003 | 3032 | transformed_cell: Optional[str] = None, |
|
3004 | 3033 | preprocessing_exc_tuple: Optional[Any] = None |
|
3005 | 3034 | ) -> ExecutionResult: |
|
3006 | 3035 | """Run a complete IPython cell asynchronously. |
|
3007 | 3036 | |
|
3008 | 3037 | Parameters |
|
3009 | 3038 | ---------- |
|
3010 | 3039 | raw_cell : str |
|
3011 | 3040 | The code (including IPython code such as %magic functions) to run. |
|
3012 | 3041 | store_history : bool |
|
3013 | 3042 | If True, the raw and translated cell will be stored in IPython's |
|
3014 | 3043 | history. For user code calling back into IPython's machinery, this |
|
3015 | 3044 | should be set to False. |
|
3016 | 3045 | silent : bool |
|
3017 | 3046 | If True, avoid side-effects, such as implicit displayhooks and |
|
3018 | 3047 | and logging. silent=True forces store_history=False. |
|
3019 | 3048 | shell_futures : bool |
|
3020 | 3049 | If True, the code will share future statements with the interactive |
|
3021 | 3050 | shell. It will both be affected by previous __future__ imports, and |
|
3022 | 3051 | any __future__ imports in the code will affect the shell. If False, |
|
3023 | 3052 | __future__ imports are not shared in either direction. |
|
3024 | 3053 | transformed_cell: str |
|
3025 | 3054 | cell that was passed through transformers |
|
3026 | 3055 | preprocessing_exc_tuple: |
|
3027 | 3056 | trace if the transformation failed. |
|
3028 | 3057 | |
|
3029 | 3058 | Returns |
|
3030 | 3059 | ------- |
|
3031 | 3060 | result : :class:`ExecutionResult` |
|
3032 | 3061 | |
|
3033 | 3062 | .. versionadded: 7.0 |
|
3034 | 3063 | """ |
|
3035 | 3064 | info = ExecutionInfo( |
|
3036 | 3065 | raw_cell, store_history, silent, shell_futures) |
|
3037 | 3066 | result = ExecutionResult(info) |
|
3038 | 3067 | |
|
3039 | 3068 | if (not raw_cell) or raw_cell.isspace(): |
|
3040 | 3069 | self.last_execution_succeeded = True |
|
3041 | 3070 | self.last_execution_result = result |
|
3042 | 3071 | return result |
|
3043 | 3072 | |
|
3044 | 3073 | if silent: |
|
3045 | 3074 | store_history = False |
|
3046 | 3075 | |
|
3047 | 3076 | if store_history: |
|
3048 | 3077 | result.execution_count = self.execution_count |
|
3049 | 3078 | |
|
3050 | 3079 | def error_before_exec(value): |
|
3051 | 3080 | if store_history: |
|
3052 | 3081 | self.execution_count += 1 |
|
3053 | 3082 | result.error_before_exec = value |
|
3054 | 3083 | self.last_execution_succeeded = False |
|
3055 | 3084 | self.last_execution_result = result |
|
3056 | 3085 | return result |
|
3057 | 3086 | |
|
3058 | 3087 | self.events.trigger('pre_execute') |
|
3059 | 3088 | if not silent: |
|
3060 | 3089 | self.events.trigger('pre_run_cell', info) |
|
3061 | 3090 | |
|
3062 | 3091 | if transformed_cell is None: |
|
3063 | 3092 | warnings.warn( |
|
3064 | 3093 | "`run_cell_async` will not call `transform_cell`" |
|
3065 | 3094 | " automatically in the future. Please pass the result to" |
|
3066 | 3095 | " `transformed_cell` argument and any exception that happen" |
|
3067 | 3096 | " during the" |
|
3068 | 3097 | "transform in `preprocessing_exc_tuple` in" |
|
3069 | 3098 | " IPython 7.17 and above.", |
|
3070 | 3099 | DeprecationWarning, |
|
3071 | 3100 | stacklevel=2, |
|
3072 | 3101 | ) |
|
3073 | 3102 | # If any of our input transformation (input_transformer_manager or |
|
3074 | 3103 | # prefilter_manager) raises an exception, we store it in this variable |
|
3075 | 3104 | # so that we can display the error after logging the input and storing |
|
3076 | 3105 | # it in the history. |
|
3077 | 3106 | try: |
|
3078 | 3107 | cell = self.transform_cell(raw_cell) |
|
3079 | 3108 | except Exception: |
|
3080 | 3109 | preprocessing_exc_tuple = sys.exc_info() |
|
3081 | 3110 | cell = raw_cell # cell has to exist so it can be stored/logged |
|
3082 | 3111 | else: |
|
3083 | 3112 | preprocessing_exc_tuple = None |
|
3084 | 3113 | else: |
|
3085 | 3114 | if preprocessing_exc_tuple is None: |
|
3086 | 3115 | cell = transformed_cell |
|
3087 | 3116 | else: |
|
3088 | 3117 | cell = raw_cell |
|
3089 | 3118 | |
|
3090 | 3119 | # Store raw and processed history |
|
3091 | 3120 | if store_history: |
|
3092 | 3121 | self.history_manager.store_inputs(self.execution_count, |
|
3093 | 3122 | cell, raw_cell) |
|
3094 | 3123 | if not silent: |
|
3095 | 3124 | self.logger.log(cell, raw_cell) |
|
3096 | 3125 | |
|
3097 | 3126 | # Display the exception if input processing failed. |
|
3098 | 3127 | if preprocessing_exc_tuple is not None: |
|
3099 | 3128 | self.showtraceback(preprocessing_exc_tuple) |
|
3100 | 3129 | if store_history: |
|
3101 | 3130 | self.execution_count += 1 |
|
3102 | 3131 | return error_before_exec(preprocessing_exc_tuple[1]) |
|
3103 | 3132 | |
|
3104 | 3133 | # Our own compiler remembers the __future__ environment. If we want to |
|
3105 | 3134 | # run code with a separate __future__ environment, use the default |
|
3106 | 3135 | # compiler |
|
3107 | 3136 | compiler = self.compile if shell_futures else self.compiler_class() |
|
3108 | 3137 | |
|
3109 | 3138 | _run_async = False |
|
3110 | 3139 | |
|
3111 | 3140 | with self.builtin_trap: |
|
3112 |
cell_name = |
|
|
3113 | cell, self.execution_count, raw_code=raw_cell | |
|
3114 | ) | |
|
3141 | cell_name = compiler.cache(cell, self.execution_count, raw_code=raw_cell) | |
|
3115 | 3142 | |
|
3116 | 3143 | with self.display_trap: |
|
3117 | 3144 | # Compile to bytecode |
|
3118 | 3145 | try: |
|
3119 | 3146 | if sys.version_info < (3,8) and self.autoawait: |
|
3120 | 3147 | if _should_be_async(cell): |
|
3121 | 3148 | # the code AST below will not be user code: we wrap it |
|
3122 | 3149 | # in an `async def`. This will likely make some AST |
|
3123 | 3150 | # transformer below miss some transform opportunity and |
|
3124 | 3151 | # introduce a small coupling to run_code (in which we |
|
3125 | 3152 | # bake some assumptions of what _ast_asyncify returns. |
|
3126 | 3153 | # they are ways around (like grafting part of the ast |
|
3127 | 3154 | # later: |
|
3128 | 3155 | # - Here, return code_ast.body[0].body[1:-1], as well |
|
3129 | 3156 | # as last expression in return statement which is |
|
3130 | 3157 | # the user code part. |
|
3131 | 3158 | # - Let it go through the AST transformers, and graft |
|
3132 | 3159 | # - it back after the AST transform |
|
3133 | 3160 | # But that seem unreasonable, at least while we |
|
3134 | 3161 | # do not need it. |
|
3135 | 3162 | code_ast = _ast_asyncify(cell, 'async-def-wrapper') |
|
3136 | 3163 | _run_async = True |
|
3137 | 3164 | else: |
|
3138 | 3165 | code_ast = compiler.ast_parse(cell, filename=cell_name) |
|
3139 | 3166 | else: |
|
3140 | 3167 | code_ast = compiler.ast_parse(cell, filename=cell_name) |
|
3141 | 3168 | except self.custom_exceptions as e: |
|
3142 | 3169 | etype, value, tb = sys.exc_info() |
|
3143 | 3170 | self.CustomTB(etype, value, tb) |
|
3144 | 3171 | return error_before_exec(e) |
|
3145 | 3172 | except IndentationError as e: |
|
3146 | 3173 | self.showindentationerror() |
|
3147 | 3174 | return error_before_exec(e) |
|
3148 | 3175 | except (OverflowError, SyntaxError, ValueError, TypeError, |
|
3149 | 3176 | MemoryError) as e: |
|
3150 | 3177 | self.showsyntaxerror() |
|
3151 | 3178 | return error_before_exec(e) |
|
3152 | 3179 | |
|
3153 | 3180 | # Apply AST transformations |
|
3154 | 3181 | try: |
|
3155 | 3182 | code_ast = self.transform_ast(code_ast) |
|
3156 | 3183 | except InputRejected as e: |
|
3157 | 3184 | self.showtraceback() |
|
3158 | 3185 | return error_before_exec(e) |
|
3159 | 3186 | |
|
3160 | 3187 | # Give the displayhook a reference to our ExecutionResult so it |
|
3161 | 3188 | # can fill in the output value. |
|
3162 | 3189 | self.displayhook.exec_result = result |
|
3163 | 3190 | |
|
3164 | 3191 | # Execute the user code |
|
3165 | 3192 | interactivity = "none" if silent else self.ast_node_interactivity |
|
3166 | 3193 | if _run_async: |
|
3167 | 3194 | interactivity = 'async' |
|
3168 | 3195 | |
|
3169 | 3196 | has_raised = await self.run_ast_nodes(code_ast.body, cell_name, |
|
3170 | 3197 | interactivity=interactivity, compiler=compiler, result=result) |
|
3171 | 3198 | |
|
3172 | 3199 | self.last_execution_succeeded = not has_raised |
|
3173 | 3200 | self.last_execution_result = result |
|
3174 | 3201 | |
|
3175 | 3202 | # Reset this so later displayed values do not modify the |
|
3176 | 3203 | # ExecutionResult |
|
3177 | 3204 | self.displayhook.exec_result = None |
|
3178 | 3205 | |
|
3179 | 3206 | if store_history: |
|
3180 | 3207 | # Write output to the database. Does nothing unless |
|
3181 | 3208 | # history output logging is enabled. |
|
3182 | 3209 | self.history_manager.store_output(self.execution_count) |
|
3183 | 3210 | # Each cell is a *single* input, regardless of how many lines it has |
|
3184 | 3211 | self.execution_count += 1 |
|
3185 | 3212 | |
|
3186 | 3213 | return result |
|
3187 | 3214 | |
|
3188 | 3215 | def transform_cell(self, raw_cell): |
|
3189 | 3216 | """Transform an input cell before parsing it. |
|
3190 | 3217 | |
|
3191 | 3218 | Static transformations, implemented in IPython.core.inputtransformer2, |
|
3192 | 3219 | deal with things like ``%magic`` and ``!system`` commands. |
|
3193 | 3220 | These run on all input. |
|
3194 | 3221 | Dynamic transformations, for things like unescaped magics and the exit |
|
3195 | 3222 | autocall, depend on the state of the interpreter. |
|
3196 | 3223 | These only apply to single line inputs. |
|
3197 | 3224 | |
|
3198 | 3225 | These string-based transformations are followed by AST transformations; |
|
3199 | 3226 | see :meth:`transform_ast`. |
|
3200 | 3227 | """ |
|
3201 | 3228 | # Static input transformations |
|
3202 | 3229 | cell = self.input_transformer_manager.transform_cell(raw_cell) |
|
3203 | 3230 | |
|
3204 | 3231 | if len(cell.splitlines()) == 1: |
|
3205 | 3232 | # Dynamic transformations - only applied for single line commands |
|
3206 | 3233 | with self.builtin_trap: |
|
3207 | 3234 | # use prefilter_lines to handle trailing newlines |
|
3208 | 3235 | # restore trailing newline for ast.parse |
|
3209 | 3236 | cell = self.prefilter_manager.prefilter_lines(cell) + '\n' |
|
3210 | 3237 | |
|
3211 | 3238 | lines = cell.splitlines(keepends=True) |
|
3212 | 3239 | for transform in self.input_transformers_post: |
|
3213 | 3240 | lines = transform(lines) |
|
3214 | 3241 | cell = ''.join(lines) |
|
3215 | 3242 | |
|
3216 | 3243 | return cell |
|
3217 | 3244 | |
|
3218 | 3245 | def transform_ast(self, node): |
|
3219 | 3246 | """Apply the AST transformations from self.ast_transformers |
|
3220 | 3247 | |
|
3221 | 3248 | Parameters |
|
3222 | 3249 | ---------- |
|
3223 | 3250 | node : ast.Node |
|
3224 | 3251 | The root node to be transformed. Typically called with the ast.Module |
|
3225 | 3252 | produced by parsing user input. |
|
3226 | 3253 | |
|
3227 | 3254 | Returns |
|
3228 | 3255 | ------- |
|
3229 | 3256 | An ast.Node corresponding to the node it was called with. Note that it |
|
3230 | 3257 | may also modify the passed object, so don't rely on references to the |
|
3231 | 3258 | original AST. |
|
3232 | 3259 | """ |
|
3233 | 3260 | for transformer in self.ast_transformers: |
|
3234 | 3261 | try: |
|
3235 | 3262 | node = transformer.visit(node) |
|
3236 | 3263 | except InputRejected: |
|
3237 | 3264 | # User-supplied AST transformers can reject an input by raising |
|
3238 | 3265 | # an InputRejected. Short-circuit in this case so that we |
|
3239 | 3266 | # don't unregister the transform. |
|
3240 | 3267 | raise |
|
3241 | 3268 | except Exception: |
|
3242 | 3269 | warn("AST transformer %r threw an error. It will be unregistered." % transformer) |
|
3243 | 3270 | self.ast_transformers.remove(transformer) |
|
3244 | 3271 | |
|
3245 | 3272 | if self.ast_transformers: |
|
3246 | 3273 | ast.fix_missing_locations(node) |
|
3247 | 3274 | return node |
|
3248 | 3275 | |
|
3249 | 3276 | async def run_ast_nodes(self, nodelist:ListType[AST], cell_name:str, interactivity='last_expr', |
|
3250 | 3277 | compiler=compile, result=None): |
|
3251 | 3278 | """Run a sequence of AST nodes. The execution mode depends on the |
|
3252 | 3279 | interactivity parameter. |
|
3253 | 3280 | |
|
3254 | 3281 | Parameters |
|
3255 | 3282 | ---------- |
|
3256 | 3283 | nodelist : list |
|
3257 | 3284 | A sequence of AST nodes to run. |
|
3258 | 3285 | cell_name : str |
|
3259 | 3286 | Will be passed to the compiler as the filename of the cell. Typically |
|
3260 | 3287 | the value returned by ip.compile.cache(cell). |
|
3261 | 3288 | interactivity : str |
|
3262 | 3289 | 'all', 'last', 'last_expr' , 'last_expr_or_assign' or 'none', |
|
3263 | 3290 | specifying which nodes should be run interactively (displaying output |
|
3264 | 3291 | from expressions). 'last_expr' will run the last node interactively |
|
3265 | 3292 | only if it is an expression (i.e. expressions in loops or other blocks |
|
3266 | 3293 | are not displayed) 'last_expr_or_assign' will run the last expression |
|
3267 | 3294 | or the last assignment. Other values for this parameter will raise a |
|
3268 | 3295 | ValueError. |
|
3269 | 3296 | |
|
3270 | 3297 | Experimental value: 'async' Will try to run top level interactive |
|
3271 | 3298 | async/await code in default runner, this will not respect the |
|
3272 | 3299 | interactivity setting and will only run the last node if it is an |
|
3273 | 3300 | expression. |
|
3274 | 3301 | |
|
3275 | 3302 | compiler : callable |
|
3276 | 3303 | A function with the same interface as the built-in compile(), to turn |
|
3277 | 3304 | the AST nodes into code objects. Default is the built-in compile(). |
|
3278 | 3305 | result : ExecutionResult, optional |
|
3279 | 3306 | An object to store exceptions that occur during execution. |
|
3280 | 3307 | |
|
3281 | 3308 | Returns |
|
3282 | 3309 | ------- |
|
3283 | 3310 | True if an exception occurred while running code, False if it finished |
|
3284 | 3311 | running. |
|
3285 | 3312 | """ |
|
3286 | 3313 | if not nodelist: |
|
3287 | 3314 | return |
|
3288 | 3315 | |
|
3289 | 3316 | if interactivity == 'last_expr_or_assign': |
|
3290 | 3317 | if isinstance(nodelist[-1], _assign_nodes): |
|
3291 | 3318 | asg = nodelist[-1] |
|
3292 | 3319 | if isinstance(asg, ast.Assign) and len(asg.targets) == 1: |
|
3293 | 3320 | target = asg.targets[0] |
|
3294 | 3321 | elif isinstance(asg, _single_targets_nodes): |
|
3295 | 3322 | target = asg.target |
|
3296 | 3323 | else: |
|
3297 | 3324 | target = None |
|
3298 | 3325 | if isinstance(target, ast.Name): |
|
3299 | 3326 | nnode = ast.Expr(ast.Name(target.id, ast.Load())) |
|
3300 | 3327 | ast.fix_missing_locations(nnode) |
|
3301 | 3328 | nodelist.append(nnode) |
|
3302 | 3329 | interactivity = 'last_expr' |
|
3303 | 3330 | |
|
3304 | 3331 | _async = False |
|
3305 | 3332 | if interactivity == 'last_expr': |
|
3306 | 3333 | if isinstance(nodelist[-1], ast.Expr): |
|
3307 | 3334 | interactivity = "last" |
|
3308 | 3335 | else: |
|
3309 | 3336 | interactivity = "none" |
|
3310 | 3337 | |
|
3311 | 3338 | if interactivity == 'none': |
|
3312 | 3339 | to_run_exec, to_run_interactive = nodelist, [] |
|
3313 | 3340 | elif interactivity == 'last': |
|
3314 | 3341 | to_run_exec, to_run_interactive = nodelist[:-1], nodelist[-1:] |
|
3315 | 3342 | elif interactivity == 'all': |
|
3316 | 3343 | to_run_exec, to_run_interactive = [], nodelist |
|
3317 | 3344 | elif interactivity == 'async': |
|
3318 | 3345 | to_run_exec, to_run_interactive = [], nodelist |
|
3319 | 3346 | _async = True |
|
3320 | 3347 | else: |
|
3321 | 3348 | raise ValueError("Interactivity was %r" % interactivity) |
|
3322 | 3349 | |
|
3323 | 3350 | try: |
|
3324 | 3351 | if _async and sys.version_info > (3,8): |
|
3325 | 3352 | raise ValueError("This branch should never happen on Python 3.8 and above, " |
|
3326 | 3353 | "please try to upgrade IPython and open a bug report with your case.") |
|
3327 | 3354 | if _async: |
|
3328 | 3355 | # If interactivity is async the semantics of run_code are |
|
3329 | 3356 | # completely different Skip usual machinery. |
|
3330 | 3357 | mod = Module(nodelist, []) |
|
3331 | 3358 | async_wrapper_code = compiler(mod, cell_name, 'exec') |
|
3332 | 3359 | exec(async_wrapper_code, self.user_global_ns, self.user_ns) |
|
3333 | 3360 | async_code = removed_co_newlocals(self.user_ns.pop('async-def-wrapper')).__code__ |
|
3334 | 3361 | if (await self.run_code(async_code, result, async_=True)): |
|
3335 | 3362 | return True |
|
3336 | 3363 | else: |
|
3337 | 3364 | if sys.version_info > (3, 8): |
|
3338 | 3365 | def compare(code): |
|
3339 | 3366 | is_async = (inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE) |
|
3340 | 3367 | return is_async |
|
3341 | 3368 | else: |
|
3342 | 3369 | def compare(code): |
|
3343 | 3370 | return _async |
|
3344 | 3371 | |
|
3345 | 3372 | # refactor that to just change the mod constructor. |
|
3346 | 3373 | to_run = [] |
|
3347 | 3374 | for node in to_run_exec: |
|
3348 | 3375 | to_run.append((node, 'exec')) |
|
3349 | 3376 | |
|
3350 | 3377 | for node in to_run_interactive: |
|
3351 | 3378 | to_run.append((node, 'single')) |
|
3352 | 3379 | |
|
3353 | 3380 | for node,mode in to_run: |
|
3354 | 3381 | if mode == 'exec': |
|
3355 | 3382 | mod = Module([node], []) |
|
3356 | 3383 | elif mode == 'single': |
|
3357 | 3384 | mod = ast.Interactive([node]) |
|
3358 | 3385 | with compiler.extra_flags(getattr(ast, 'PyCF_ALLOW_TOP_LEVEL_AWAIT', 0x0) if self.autoawait else 0x0): |
|
3359 | 3386 | code = compiler(mod, cell_name, mode) |
|
3360 | 3387 | asy = compare(code) |
|
3361 | 3388 | if (await self.run_code(code, result, async_=asy)): |
|
3362 | 3389 | return True |
|
3363 | 3390 | |
|
3364 | 3391 | # Flush softspace |
|
3365 | 3392 | if softspace(sys.stdout, 0): |
|
3366 | 3393 | print() |
|
3367 | 3394 | |
|
3368 | 3395 | except: |
|
3369 | 3396 | # It's possible to have exceptions raised here, typically by |
|
3370 | 3397 | # compilation of odd code (such as a naked 'return' outside a |
|
3371 | 3398 | # function) that did parse but isn't valid. Typically the exception |
|
3372 | 3399 | # is a SyntaxError, but it's safest just to catch anything and show |
|
3373 | 3400 | # the user a traceback. |
|
3374 | 3401 | |
|
3375 | 3402 | # We do only one try/except outside the loop to minimize the impact |
|
3376 | 3403 | # on runtime, and also because if any node in the node list is |
|
3377 | 3404 | # broken, we should stop execution completely. |
|
3378 | 3405 | if result: |
|
3379 | 3406 | result.error_before_exec = sys.exc_info()[1] |
|
3380 | 3407 | self.showtraceback() |
|
3381 | 3408 | return True |
|
3382 | 3409 | |
|
3383 | 3410 | return False |
|
3384 | 3411 | |
|
3385 | 3412 | def _async_exec(self, code_obj: types.CodeType, user_ns: dict): |
|
3386 | 3413 | """ |
|
3387 | 3414 | Evaluate an asynchronous code object using a code runner |
|
3388 | 3415 | |
|
3389 | 3416 | Fake asynchronous execution of code_object in a namespace via a proxy namespace. |
|
3390 | 3417 | |
|
3391 | 3418 | Returns coroutine object, which can be executed via async loop runner |
|
3392 | 3419 | |
|
3393 | 3420 | WARNING: The semantics of `async_exec` are quite different from `exec`, |
|
3394 | 3421 | in particular you can only pass a single namespace. It also return a |
|
3395 | 3422 | handle to the value of the last things returned by code_object. |
|
3396 | 3423 | """ |
|
3397 | 3424 | |
|
3398 | 3425 | return eval(code_obj, user_ns) |
|
3399 | 3426 | |
|
3400 | 3427 | async def run_code(self, code_obj, result=None, *, async_=False): |
|
3401 | 3428 | """Execute a code object. |
|
3402 | 3429 | |
|
3403 | 3430 | When an exception occurs, self.showtraceback() is called to display a |
|
3404 | 3431 | traceback. |
|
3405 | 3432 | |
|
3406 | 3433 | Parameters |
|
3407 | 3434 | ---------- |
|
3408 | 3435 | code_obj : code object |
|
3409 | 3436 | A compiled code object, to be executed |
|
3410 | 3437 | result : ExecutionResult, optional |
|
3411 | 3438 | An object to store exceptions that occur during execution. |
|
3412 | 3439 | async_ : Bool (Experimental) |
|
3413 | 3440 | Attempt to run top-level asynchronous code in a default loop. |
|
3414 | 3441 | |
|
3415 | 3442 | Returns |
|
3416 | 3443 | ------- |
|
3417 | 3444 | False : successful execution. |
|
3418 | 3445 | True : an error occurred. |
|
3419 | 3446 | """ |
|
3420 | 3447 | # special value to say that anything above is IPython and should be |
|
3421 | 3448 | # hidden. |
|
3422 | 3449 | __tracebackhide__ = "__ipython_bottom__" |
|
3423 | 3450 | # Set our own excepthook in case the user code tries to call it |
|
3424 | 3451 | # directly, so that the IPython crash handler doesn't get triggered |
|
3425 | 3452 | old_excepthook, sys.excepthook = sys.excepthook, self.excepthook |
|
3426 | 3453 | |
|
3427 | 3454 | # we save the original sys.excepthook in the instance, in case config |
|
3428 | 3455 | # code (such as magics) needs access to it. |
|
3429 | 3456 | self.sys_excepthook = old_excepthook |
|
3430 | 3457 | outflag = True # happens in more places, so it's easier as default |
|
3431 | 3458 | try: |
|
3432 | 3459 | try: |
|
3433 | 3460 | self.hooks.pre_run_code_hook() |
|
3434 | 3461 | if async_ and sys.version_info < (3,8): |
|
3435 | 3462 | last_expr = (await self._async_exec(code_obj, self.user_ns)) |
|
3436 | 3463 | code = compile('last_expr', 'fake', "single") |
|
3437 | 3464 | exec(code, {'last_expr': last_expr}) |
|
3438 | 3465 | elif async_ : |
|
3439 | 3466 | await eval(code_obj, self.user_global_ns, self.user_ns) |
|
3440 | 3467 | else: |
|
3441 | 3468 | exec(code_obj, self.user_global_ns, self.user_ns) |
|
3442 | 3469 | finally: |
|
3443 | 3470 | # Reset our crash handler in place |
|
3444 | 3471 | sys.excepthook = old_excepthook |
|
3445 | 3472 | except SystemExit as e: |
|
3446 | 3473 | if result is not None: |
|
3447 | 3474 | result.error_in_exec = e |
|
3448 | 3475 | self.showtraceback(exception_only=True) |
|
3449 | 3476 | warn("To exit: use 'exit', 'quit', or Ctrl-D.", stacklevel=1) |
|
3450 | 3477 | except self.custom_exceptions: |
|
3451 | 3478 | etype, value, tb = sys.exc_info() |
|
3452 | 3479 | if result is not None: |
|
3453 | 3480 | result.error_in_exec = value |
|
3454 | 3481 | self.CustomTB(etype, value, tb) |
|
3455 | 3482 | except: |
|
3456 | 3483 | if result is not None: |
|
3457 | 3484 | result.error_in_exec = sys.exc_info()[1] |
|
3458 | 3485 | self.showtraceback(running_compiled_code=True) |
|
3459 | 3486 | else: |
|
3460 | 3487 | outflag = False |
|
3461 | 3488 | return outflag |
|
3462 | 3489 | |
|
3463 | 3490 | # For backwards compatibility |
|
3464 | 3491 | runcode = run_code |
|
3465 | 3492 | |
|
3466 | 3493 | def check_complete(self, code: str) -> Tuple[str, str]: |
|
3467 | 3494 | """Return whether a block of code is ready to execute, or should be continued |
|
3468 | 3495 | |
|
3469 | 3496 | Parameters |
|
3470 | 3497 | ---------- |
|
3471 | 3498 | source : string |
|
3472 | 3499 | Python input code, which can be multiline. |
|
3473 | 3500 | |
|
3474 | 3501 | Returns |
|
3475 | 3502 | ------- |
|
3476 | 3503 | status : str |
|
3477 | 3504 | One of 'complete', 'incomplete', or 'invalid' if source is not a |
|
3478 | 3505 | prefix of valid code. |
|
3479 | 3506 | indent : str |
|
3480 | 3507 | When status is 'incomplete', this is some whitespace to insert on |
|
3481 | 3508 | the next line of the prompt. |
|
3482 | 3509 | """ |
|
3483 | 3510 | status, nspaces = self.input_transformer_manager.check_complete(code) |
|
3484 | 3511 | return status, ' ' * (nspaces or 0) |
|
3485 | 3512 | |
|
3486 | 3513 | #------------------------------------------------------------------------- |
|
3487 | 3514 | # Things related to GUI support and pylab |
|
3488 | 3515 | #------------------------------------------------------------------------- |
|
3489 | 3516 | |
|
3490 | 3517 | active_eventloop = None |
|
3491 | 3518 | |
|
3492 | 3519 | def enable_gui(self, gui=None): |
|
3493 | 3520 | raise NotImplementedError('Implement enable_gui in a subclass') |
|
3494 | 3521 | |
|
3495 | 3522 | def enable_matplotlib(self, gui=None): |
|
3496 | 3523 | """Enable interactive matplotlib and inline figure support. |
|
3497 | 3524 | |
|
3498 | 3525 | This takes the following steps: |
|
3499 | 3526 | |
|
3500 | 3527 | 1. select the appropriate eventloop and matplotlib backend |
|
3501 | 3528 | 2. set up matplotlib for interactive use with that backend |
|
3502 | 3529 | 3. configure formatters for inline figure display |
|
3503 | 3530 | 4. enable the selected gui eventloop |
|
3504 | 3531 | |
|
3505 | 3532 | Parameters |
|
3506 | 3533 | ---------- |
|
3507 | 3534 | gui : optional, string |
|
3508 | 3535 | If given, dictates the choice of matplotlib GUI backend to use |
|
3509 | 3536 | (should be one of IPython's supported backends, 'qt', 'osx', 'tk', |
|
3510 | 3537 | 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by |
|
3511 | 3538 | matplotlib (as dictated by the matplotlib build-time options plus the |
|
3512 | 3539 | user's matplotlibrc configuration file). Note that not all backends |
|
3513 | 3540 | make sense in all contexts, for example a terminal ipython can't |
|
3514 | 3541 | display figures inline. |
|
3515 | 3542 | """ |
|
3516 | 3543 | from IPython.core import pylabtools as pt |
|
3544 | from matplotlib_inline.backend_inline import configure_inline_support | |
|
3517 | 3545 | gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select) |
|
3518 | 3546 | |
|
3519 | 3547 | if gui != 'inline': |
|
3520 | 3548 | # If we have our first gui selection, store it |
|
3521 | 3549 | if self.pylab_gui_select is None: |
|
3522 | 3550 | self.pylab_gui_select = gui |
|
3523 | 3551 | # Otherwise if they are different |
|
3524 | 3552 | elif gui != self.pylab_gui_select: |
|
3525 | 3553 | print('Warning: Cannot change to a different GUI toolkit: %s.' |
|
3526 | 3554 | ' Using %s instead.' % (gui, self.pylab_gui_select)) |
|
3527 | 3555 | gui, backend = pt.find_gui_and_backend(self.pylab_gui_select) |
|
3528 | 3556 | |
|
3529 | 3557 | pt.activate_matplotlib(backend) |
|
3530 |
|
|
|
3558 | configure_inline_support(self, backend) | |
|
3531 | 3559 | |
|
3532 | 3560 | # Now we must activate the gui pylab wants to use, and fix %run to take |
|
3533 | 3561 | # plot updates into account |
|
3534 | 3562 | self.enable_gui(gui) |
|
3535 | 3563 | self.magics_manager.registry['ExecutionMagics'].default_runner = \ |
|
3536 | 3564 | pt.mpl_runner(self.safe_execfile) |
|
3537 | 3565 | |
|
3538 | 3566 | return gui, backend |
|
3539 | 3567 | |
|
3540 | 3568 | def enable_pylab(self, gui=None, import_all=True, welcome_message=False): |
|
3541 | 3569 | """Activate pylab support at runtime. |
|
3542 | 3570 | |
|
3543 | 3571 | This turns on support for matplotlib, preloads into the interactive |
|
3544 | 3572 | namespace all of numpy and pylab, and configures IPython to correctly |
|
3545 | 3573 | interact with the GUI event loop. The GUI backend to be used can be |
|
3546 | 3574 | optionally selected with the optional ``gui`` argument. |
|
3547 | 3575 | |
|
3548 | 3576 | This method only adds preloading the namespace to InteractiveShell.enable_matplotlib. |
|
3549 | 3577 | |
|
3550 | 3578 | Parameters |
|
3551 | 3579 | ---------- |
|
3552 | 3580 | gui : optional, string |
|
3553 | 3581 | If given, dictates the choice of matplotlib GUI backend to use |
|
3554 | 3582 | (should be one of IPython's supported backends, 'qt', 'osx', 'tk', |
|
3555 | 3583 | 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by |
|
3556 | 3584 | matplotlib (as dictated by the matplotlib build-time options plus the |
|
3557 | 3585 | user's matplotlibrc configuration file). Note that not all backends |
|
3558 | 3586 | make sense in all contexts, for example a terminal ipython can't |
|
3559 | 3587 | display figures inline. |
|
3560 | 3588 | import_all : optional, bool, default: True |
|
3561 | 3589 | Whether to do `from numpy import *` and `from pylab import *` |
|
3562 | 3590 | in addition to module imports. |
|
3563 | 3591 | welcome_message : deprecated |
|
3564 | 3592 | This argument is ignored, no welcome message will be displayed. |
|
3565 | 3593 | """ |
|
3566 | 3594 | from IPython.core.pylabtools import import_pylab |
|
3567 | 3595 | |
|
3568 | 3596 | gui, backend = self.enable_matplotlib(gui) |
|
3569 | 3597 | |
|
3570 | 3598 | # We want to prevent the loading of pylab to pollute the user's |
|
3571 | 3599 | # namespace as shown by the %who* magics, so we execute the activation |
|
3572 | 3600 | # code in an empty namespace, and we update *both* user_ns and |
|
3573 | 3601 | # user_ns_hidden with this information. |
|
3574 | 3602 | ns = {} |
|
3575 | 3603 | import_pylab(ns, import_all) |
|
3576 | 3604 | # warn about clobbered names |
|
3577 | 3605 | ignored = {"__builtins__"} |
|
3578 | 3606 | both = set(ns).intersection(self.user_ns).difference(ignored) |
|
3579 | 3607 | clobbered = [ name for name in both if self.user_ns[name] is not ns[name] ] |
|
3580 | 3608 | self.user_ns.update(ns) |
|
3581 | 3609 | self.user_ns_hidden.update(ns) |
|
3582 | 3610 | return gui, backend, clobbered |
|
3583 | 3611 | |
|
3584 | 3612 | #------------------------------------------------------------------------- |
|
3585 | 3613 | # Utilities |
|
3586 | 3614 | #------------------------------------------------------------------------- |
|
3587 | 3615 | |
|
3588 | 3616 | def var_expand(self, cmd, depth=0, formatter=DollarFormatter()): |
|
3589 | 3617 | """Expand python variables in a string. |
|
3590 | 3618 | |
|
3591 | 3619 | The depth argument indicates how many frames above the caller should |
|
3592 | 3620 | be walked to look for the local namespace where to expand variables. |
|
3593 | 3621 | |
|
3594 | 3622 | The global namespace for expansion is always the user's interactive |
|
3595 | 3623 | namespace. |
|
3596 | 3624 | """ |
|
3597 | 3625 | ns = self.user_ns.copy() |
|
3598 | 3626 | try: |
|
3599 | 3627 | frame = sys._getframe(depth+1) |
|
3600 | 3628 | except ValueError: |
|
3601 | 3629 | # This is thrown if there aren't that many frames on the stack, |
|
3602 | 3630 | # e.g. if a script called run_line_magic() directly. |
|
3603 | 3631 | pass |
|
3604 | 3632 | else: |
|
3605 | 3633 | ns.update(frame.f_locals) |
|
3606 | 3634 | |
|
3607 | 3635 | try: |
|
3608 | 3636 | # We have to use .vformat() here, because 'self' is a valid and common |
|
3609 | 3637 | # name, and expanding **ns for .format() would make it collide with |
|
3610 | 3638 | # the 'self' argument of the method. |
|
3611 | 3639 | cmd = formatter.vformat(cmd, args=[], kwargs=ns) |
|
3612 | 3640 | except Exception: |
|
3613 | 3641 | # if formatter couldn't format, just let it go untransformed |
|
3614 | 3642 | pass |
|
3615 | 3643 | return cmd |
|
3616 | 3644 | |
|
3617 | 3645 | def mktempfile(self, data=None, prefix='ipython_edit_'): |
|
3618 | 3646 | """Make a new tempfile and return its filename. |
|
3619 | 3647 | |
|
3620 | 3648 | This makes a call to tempfile.mkstemp (created in a tempfile.mkdtemp), |
|
3621 | 3649 | but it registers the created filename internally so ipython cleans it up |
|
3622 | 3650 | at exit time. |
|
3623 | 3651 | |
|
3624 | 3652 | Optional inputs: |
|
3625 | 3653 | |
|
3626 | 3654 | - data(None): if data is given, it gets written out to the temp file |
|
3627 | 3655 | immediately, and the file is closed again.""" |
|
3628 | 3656 | |
|
3629 | 3657 | dir_path = Path(tempfile.mkdtemp(prefix=prefix)) |
|
3630 | 3658 | self.tempdirs.append(dir_path) |
|
3631 | 3659 | |
|
3632 | 3660 | handle, filename = tempfile.mkstemp(".py", prefix, dir=str(dir_path)) |
|
3633 | 3661 | os.close(handle) # On Windows, there can only be one open handle on a file |
|
3634 | 3662 | |
|
3635 | 3663 | file_path = Path(filename) |
|
3636 | 3664 | self.tempfiles.append(file_path) |
|
3637 | 3665 | |
|
3638 | 3666 | if data: |
|
3639 | 3667 | file_path.write_text(data) |
|
3640 | 3668 | return filename |
|
3641 | 3669 | |
|
3642 | 3670 | @undoc |
|
3643 | 3671 | def write(self,data): |
|
3644 | 3672 | """DEPRECATED: Write a string to the default output""" |
|
3645 | 3673 | warn('InteractiveShell.write() is deprecated, use sys.stdout instead', |
|
3646 | 3674 | DeprecationWarning, stacklevel=2) |
|
3647 | 3675 | sys.stdout.write(data) |
|
3648 | 3676 | |
|
3649 | 3677 | @undoc |
|
3650 | 3678 | def write_err(self,data): |
|
3651 | 3679 | """DEPRECATED: Write a string to the default error output""" |
|
3652 | 3680 | warn('InteractiveShell.write_err() is deprecated, use sys.stderr instead', |
|
3653 | 3681 | DeprecationWarning, stacklevel=2) |
|
3654 | 3682 | sys.stderr.write(data) |
|
3655 | 3683 | |
|
3656 | 3684 | def ask_yes_no(self, prompt, default=None, interrupt=None): |
|
3657 | 3685 | if self.quiet: |
|
3658 | 3686 | return True |
|
3659 | 3687 | return ask_yes_no(prompt,default,interrupt) |
|
3660 | 3688 | |
|
3661 | 3689 | def show_usage(self): |
|
3662 | 3690 | """Show a usage message""" |
|
3663 | 3691 | page.page(IPython.core.usage.interactive_usage) |
|
3664 | 3692 | |
|
3665 | 3693 | def extract_input_lines(self, range_str, raw=False): |
|
3666 | 3694 | """Return as a string a set of input history slices. |
|
3667 | 3695 | |
|
3668 | 3696 | Parameters |
|
3669 | 3697 | ---------- |
|
3670 |
range_str : str |
|
|
3698 | range_str : str | |
|
3671 | 3699 | The set of slices is given as a string, like "~5/6-~4/2 4:8 9", |
|
3672 | 3700 | since this function is for use by magic functions which get their |
|
3673 | 3701 | arguments as strings. The number before the / is the session |
|
3674 | 3702 | number: ~n goes n back from the current session. |
|
3675 | 3703 | |
|
3704 | If empty string is given, returns history of current session | |
|
3705 | without the last input. | |
|
3706 | ||
|
3676 | 3707 | raw : bool, optional |
|
3677 | 3708 | By default, the processed input is used. If this is true, the raw |
|
3678 | 3709 | input history is used instead. |
|
3679 | 3710 | |
|
3680 | 3711 | Notes |
|
3681 | 3712 | ----- |
|
3682 | 3713 | |
|
3683 | 3714 | Slices can be described with two notations: |
|
3684 | 3715 | |
|
3685 | 3716 | * ``N:M`` -> standard python form, means including items N...(M-1). |
|
3686 | 3717 | * ``N-M`` -> include items N..M (closed endpoint). |
|
3687 | 3718 | """ |
|
3688 | 3719 | lines = self.history_manager.get_range_by_str(range_str, raw=raw) |
|
3689 |
|
|
|
3720 | text = "\n".join(x for _, _, x in lines) | |
|
3721 | ||
|
3722 | # Skip the last line, as it's probably the magic that called this | |
|
3723 | if not range_str: | |
|
3724 | if "\n" not in text: | |
|
3725 | text = "" | |
|
3726 | else: | |
|
3727 | text = text[: text.rfind("\n")] | |
|
3728 | ||
|
3729 | return text | |
|
3690 | 3730 | |
|
3691 | 3731 | def find_user_code(self, target, raw=True, py_only=False, skip_encoding_cookie=True, search_ns=False): |
|
3692 | 3732 | """Get a code string from history, file, url, or a string or macro. |
|
3693 | 3733 | |
|
3694 | 3734 | This is mainly used by magic functions. |
|
3695 | 3735 | |
|
3696 | 3736 | Parameters |
|
3697 | 3737 | ---------- |
|
3698 | ||
|
3699 | 3738 | target : str |
|
3700 | ||
|
3701 | 3739 | A string specifying code to retrieve. This will be tried respectively |
|
3702 | 3740 | as: ranges of input history (see %history for syntax), url, |
|
3703 | 3741 | corresponding .py file, filename, or an expression evaluating to a |
|
3704 | 3742 | string or Macro in the user namespace. |
|
3705 | 3743 | |
|
3744 | If empty string is given, returns complete history of current | |
|
3745 | session, without the last line. | |
|
3746 | ||
|
3706 | 3747 | raw : bool |
|
3707 | 3748 | If true (default), retrieve raw history. Has no effect on the other |
|
3708 | 3749 | retrieval mechanisms. |
|
3709 | 3750 | |
|
3710 | 3751 | py_only : bool (default False) |
|
3711 | 3752 | Only try to fetch python code, do not try alternative methods to decode file |
|
3712 | 3753 | if unicode fails. |
|
3713 | 3754 | |
|
3714 | 3755 | Returns |
|
3715 | 3756 | ------- |
|
3716 | 3757 | A string of code. |
|
3717 | 3758 | |
|
3718 | 3759 | ValueError is raised if nothing is found, and TypeError if it evaluates |
|
3719 | 3760 | to an object of another type. In each case, .args[0] is a printable |
|
3720 | 3761 | message. |
|
3721 | 3762 | """ |
|
3722 | 3763 | code = self.extract_input_lines(target, raw=raw) # Grab history |
|
3723 | 3764 | if code: |
|
3724 | 3765 | return code |
|
3725 | 3766 | try: |
|
3726 | 3767 | if target.startswith(('http://', 'https://')): |
|
3727 | 3768 | return openpy.read_py_url(target, skip_encoding_cookie=skip_encoding_cookie) |
|
3728 | 3769 | except UnicodeDecodeError as e: |
|
3729 | 3770 | if not py_only : |
|
3730 | 3771 | # Deferred import |
|
3731 | 3772 | from urllib.request import urlopen |
|
3732 | 3773 | response = urlopen(target) |
|
3733 | 3774 | return response.read().decode('latin1') |
|
3734 | 3775 | raise ValueError(("'%s' seem to be unreadable.") % target) from e |
|
3735 | 3776 | |
|
3736 | 3777 | potential_target = [target] |
|
3737 | 3778 | try : |
|
3738 | 3779 | potential_target.insert(0,get_py_filename(target)) |
|
3739 | 3780 | except IOError: |
|
3740 | 3781 | pass |
|
3741 | 3782 | |
|
3742 | 3783 | for tgt in potential_target : |
|
3743 | 3784 | if os.path.isfile(tgt): # Read file |
|
3744 | 3785 | try : |
|
3745 | 3786 | return openpy.read_py_file(tgt, skip_encoding_cookie=skip_encoding_cookie) |
|
3746 | 3787 | except UnicodeDecodeError as e: |
|
3747 | 3788 | if not py_only : |
|
3748 | 3789 | with io_open(tgt,'r', encoding='latin1') as f : |
|
3749 | 3790 | return f.read() |
|
3750 | 3791 | raise ValueError(("'%s' seem to be unreadable.") % target) from e |
|
3751 | 3792 | elif os.path.isdir(os.path.expanduser(tgt)): |
|
3752 | 3793 | raise ValueError("'%s' is a directory, not a regular file." % target) |
|
3753 | 3794 | |
|
3754 | 3795 | if search_ns: |
|
3755 | 3796 | # Inspect namespace to load object source |
|
3756 | 3797 | object_info = self.object_inspect(target, detail_level=1) |
|
3757 | 3798 | if object_info['found'] and object_info['source']: |
|
3758 | 3799 | return object_info['source'] |
|
3759 | 3800 | |
|
3760 | 3801 | try: # User namespace |
|
3761 | 3802 | codeobj = eval(target, self.user_ns) |
|
3762 | 3803 | except Exception as e: |
|
3763 | 3804 | raise ValueError(("'%s' was not found in history, as a file, url, " |
|
3764 | 3805 | "nor in the user namespace.") % target) from e |
|
3765 | 3806 | |
|
3766 | 3807 | if isinstance(codeobj, str): |
|
3767 | 3808 | return codeobj |
|
3768 | 3809 | elif isinstance(codeobj, Macro): |
|
3769 | 3810 | return codeobj.value |
|
3770 | 3811 | |
|
3771 | 3812 | raise TypeError("%s is neither a string nor a macro." % target, |
|
3772 | 3813 | codeobj) |
|
3773 | 3814 | |
|
3815 | def _atexit_once(self): | |
|
3816 | """ | |
|
3817 | At exist operation that need to be called at most once. | |
|
3818 | Second call to this function per instance will do nothing. | |
|
3819 | """ | |
|
3820 | ||
|
3821 | if not getattr(self, "_atexit_once_called", False): | |
|
3822 | self._atexit_once_called = True | |
|
3823 | # Clear all user namespaces to release all references cleanly. | |
|
3824 | self.reset(new_session=False) | |
|
3825 | # Close the history session (this stores the end time and line count) | |
|
3826 | # this must be *before* the tempfile cleanup, in case of temporary | |
|
3827 | # history db | |
|
3828 | self.history_manager.end_session() | |
|
3829 | self.history_manager = None | |
|
3830 | ||
|
3774 | 3831 | #------------------------------------------------------------------------- |
|
3775 | 3832 | # Things related to IPython exiting |
|
3776 | 3833 | #------------------------------------------------------------------------- |
|
3777 | 3834 | def atexit_operations(self): |
|
3778 | 3835 | """This will be executed at the time of exit. |
|
3779 | 3836 | |
|
3780 | 3837 | Cleanup operations and saving of persistent data that is done |
|
3781 | 3838 | unconditionally by IPython should be performed here. |
|
3782 | 3839 | |
|
3783 | 3840 | For things that may depend on startup flags or platform specifics (such |
|
3784 | 3841 | as having readline or not), register a separate atexit function in the |
|
3785 | 3842 | code that has the appropriate information, rather than trying to |
|
3786 | 3843 | clutter |
|
3787 | 3844 | """ |
|
3788 | # Close the history session (this stores the end time and line count) | |
|
3789 | # this must be *before* the tempfile cleanup, in case of temporary | |
|
3790 | # history db | |
|
3791 | self.history_manager.end_session() | |
|
3845 | self._atexit_once() | |
|
3792 | 3846 | |
|
3793 | 3847 | # Cleanup all tempfiles and folders left around |
|
3794 | 3848 | for tfile in self.tempfiles: |
|
3795 | 3849 | try: |
|
3796 | 3850 | tfile.unlink() |
|
3851 | self.tempfiles.remove(tfile) | |
|
3797 | 3852 | except FileNotFoundError: |
|
3798 | 3853 | pass |
|
3799 | ||
|
3854 | del self.tempfiles | |
|
3800 | 3855 | for tdir in self.tempdirs: |
|
3801 | 3856 | try: |
|
3802 | 3857 | tdir.rmdir() |
|
3858 | self.tempdirs.remove(tdir) | |
|
3803 | 3859 | except FileNotFoundError: |
|
3804 | 3860 | pass |
|
3861 | del self.tempdirs | |
|
3805 | 3862 | |
|
3806 | # Clear all user namespaces to release all references cleanly. | |
|
3807 | self.reset(new_session=False) | |
|
3808 | 3863 | |
|
3809 | 3864 | # Run user hooks |
|
3810 | 3865 | self.hooks.shutdown_hook() |
|
3811 | 3866 | |
|
3812 | 3867 | def cleanup(self): |
|
3813 | 3868 | self.restore_sys_module_state() |
|
3814 | 3869 | |
|
3815 | 3870 | |
|
3816 | 3871 | # Overridden in terminal subclass to change prompts |
|
3817 | 3872 | def switch_doctest_mode(self, mode): |
|
3818 | 3873 | pass |
|
3819 | 3874 | |
|
3820 | 3875 | |
|
3821 | 3876 | class InteractiveShellABC(metaclass=abc.ABCMeta): |
|
3822 | 3877 | """An abstract base class for InteractiveShell.""" |
|
3823 | 3878 | |
|
3824 | 3879 | InteractiveShellABC.register(InteractiveShell) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: file was removed | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now