##// END OF EJS Templates
Merge branch 'master' into dev
Adam Hackbarth -
r26087:4403225e merge
parent child Browse files
Show More
@@ -290,7 +290,7 b' class Pdb(OldPdb):'
290 290
291 291 def hidden_frames(self, stack):
292 292 """
293 Given an index in the stack return wether it should be skipped.
293 Given an index in the stack return whether it should be skipped.
294 294
295 295 This is used in up/down and where to skip frames.
296 296 """
@@ -713,7 +713,9 b' class Pdb(OldPdb):'
713 713 break
714 714 else:
715 715 # if no break occured.
716 self.error("all frames above hidden")
716 self.error(
717 "all frames above hidden, use `skip_hidden False` to get get into those."
718 )
717 719 return
718 720
719 721 Colors = self.color_scheme_table.active_colors
@@ -756,7 +758,9 b' class Pdb(OldPdb):'
756 758 if counter >= count:
757 759 break
758 760 else:
759 self.error("all frames bellow hidden")
761 self.error(
762 "all frames bellow hidden, use `skip_hidden False` to get get into those."
763 )
760 764 return
761 765
762 766 Colors = self.color_scheme_table.active_colors
@@ -1086,7 +1086,7 b' class Video(DisplayObject):'
1086 1086 if url is None and isinstance(data, str) and data.startswith(('http:', 'https:')):
1087 1087 url = data
1088 1088 data = None
1089 elif os.path.exists(data):
1089 elif data is not None and os.path.exists(data):
1090 1090 filename = data
1091 1091 data = None
1092 1092
@@ -757,6 +757,7 b' class InteractiveShell(SingletonConfigurable):'
757 757 self.meta = Struct()
758 758
759 759 # Temporary files used for various purposes. Deleted at exit.
760 # The files here are stored with Path from Pathlib
760 761 self.tempfiles = []
761 762 self.tempdirs = []
762 763
@@ -3595,16 +3596,17 b' class InteractiveShell(SingletonConfigurable):'
3595 3596 - data(None): if data is given, it gets written out to the temp file
3596 3597 immediately, and the file is closed again."""
3597 3598
3598 dirname = tempfile.mkdtemp(prefix=prefix)
3599 self.tempdirs.append(dirname)
3599 dir_path = Path(tempfile.mkdtemp(prefix=prefix))
3600 self.tempdirs.append(dir_path)
3600 3601
3601 handle, filename = tempfile.mkstemp('.py', prefix, dir=dirname)
3602 handle, filename = tempfile.mkstemp(".py", prefix, dir=str(dir_path))
3602 3603 os.close(handle) # On Windows, there can only be one open handle on a file
3603 self.tempfiles.append(filename)
3604
3605 file_path = Path(filename)
3606 self.tempfiles.append(file_path)
3604 3607
3605 3608 if data:
3606 with open(filename, 'w') as tmp_file:
3607 tmp_file.write(data)
3609 file_path.write_text(data)
3608 3610 return filename
3609 3611
3610 3612 @undoc
@@ -3761,14 +3763,14 b' class InteractiveShell(SingletonConfigurable):'
3761 3763 # Cleanup all tempfiles and folders left around
3762 3764 for tfile in self.tempfiles:
3763 3765 try:
3764 os.unlink(tfile)
3765 except OSError:
3766 tfile.unlink()
3767 except FileNotFoundError:
3766 3768 pass
3767 3769
3768 3770 for tdir in self.tempdirs:
3769 3771 try:
3770 os.rmdir(tdir)
3771 except OSError:
3772 tdir.rmdir()
3773 except FileNotFoundError:
3772 3774 pass
3773 3775
3774 3776 # Clear all user namespaces to release all references cleanly.
@@ -40,6 +40,7 b' from IPython.utils.timing import clock, clock2'
40 40 from warnings import warn
41 41 from logging import error
42 42 from io import StringIO
43 from pathlib import Path
43 44
44 45 if sys.version_info > (3,8):
45 46 from ast import Module
@@ -362,8 +363,7 b' class ExecutionMagics(Magics):'
362 363 print('\n*** Profile stats marshalled to file',\
363 364 repr(dump_file)+'.',sys_exit)
364 365 if text_file:
365 with open(text_file, 'w') as pfile:
366 pfile.write(output)
366 Path(text_file).write_text(output)
367 367 print('\n*** Profile printout saved to text file',\
368 368 repr(text_file)+'.',sys_exit)
369 369
@@ -724,7 +724,7 b' class ExecutionMagics(Magics):'
724 724 sys.argv = [filename] + args # put in the proper filename
725 725
726 726 if 'n' in opts:
727 name = os.path.splitext(os.path.basename(filename))[0]
727 name = Path(filename).stem
728 728 else:
729 729 name = '__main__'
730 730
@@ -8,37 +8,38 b''
8 8 # The full license is in the file COPYING.txt, distributed with this software.
9 9 #-----------------------------------------------------------------------------
10 10
11 import os
12 11 import re
13 12 import shlex
14 13 import sys
15 14
15 from pathlib import Path
16 16 from IPython.core.magic import Magics, magics_class, line_magic
17 17
18 18
19 19 def _is_conda_environment():
20 20 """Return True if the current Python executable is in a conda env"""
21 21 # TODO: does this need to change on windows?
22 conda_history = os.path.join(sys.prefix, 'conda-meta', 'history')
23 return os.path.exists(conda_history)
22 return Path(sys.prefix, "conda-meta", "history").exists()
24 23
25 24
26 25 def _get_conda_executable():
27 26 """Find the path to the conda executable"""
28 27 # Check if there is a conda executable in the same directory as the Python executable.
29 28 # This is the case within conda's root environment.
30 conda = os.path.join(os.path.dirname(sys.executable), 'conda')
31 if os.path.isfile(conda):
32 return conda
29 conda = Path(sys.executable).parent / "conda"
30 if conda.isfile():
31 return str(conda)
33 32
34 33 # Otherwise, attempt to extract the executable from conda history.
35 34 # This applies in any conda environment.
36 R = re.compile(r"^#\s*cmd:\s*(?P<command>.*conda)\s[create|install]")
37 with open(os.path.join(sys.prefix, 'conda-meta', 'history')) as f:
38 for line in f:
39 match = R.match(line)
40 if match:
41 return match.groupdict()['command']
35 history = Path(sys.prefix, "conda-meta", "history").read_text()
36 match = re.search(
37 r"^#\s*cmd:\s*(?P<command>.*conda)\s[create|install]",
38 history,
39 flags=re.MULTILINE,
40 )
41 if match:
42 return match.groupdict()["command"]
42 43
43 44 # Fallback: assume conda is available on the system path.
44 45 return "conda"
@@ -1,5 +1,5 b''
1 1 """
2 Test for async helpers.
2 Test for async helpers.
3 3
4 4 Should only trigger on python 3.5+ or will have syntax errors.
5 5 """
@@ -9,6 +9,13 b' from textwrap import dedent, indent'
9 9 from unittest import TestCase
10 10 from IPython.testing.decorators import skip_without
11 11 import sys
12 from typing import TYPE_CHECKING
13
14 if TYPE_CHECKING:
15 from IPython import get_ipython
16
17 ip = get_ipython()
18
12 19
13 20 iprc = lambda x: ip.run_cell(dedent(x)).raise_error()
14 21 iprc_nr = lambda x: ip.run_cell(dedent(x))
@@ -275,7 +282,7 b' class AsyncTest(TestCase):'
275 282 await sleep(0.1)
276 283 """
277 284 )
278
285
279 286 if sys.version_info < (3,9):
280 287 # new pgen parser in 3.9 does not raise MemoryError on too many nested
281 288 # parens anymore
@@ -138,7 +138,9 b' def _get_inline_config():'
138 138 from ipykernel.pylab.config import InlineBackend
139 139 return InlineBackend.instance()
140 140
141 @dec.skip_without('matplotlib')
141
142 @dec.skip_without("ipykernel")
143 @dec.skip_without("matplotlib")
142 144 def test_set_matplotlib_close():
143 145 cfg = _get_inline_config()
144 146 cfg.close_figures = False
@@ -173,7 +175,9 b' def test_set_matplotlib_formats():'
173 175 else:
174 176 nt.assert_not_in(Figure, f)
175 177
176 @dec.skip_without('matplotlib')
178
179 @dec.skip_without("ipykernel")
180 @dec.skip_without("matplotlib")
177 181 def test_set_matplotlib_formats_kwargs():
178 182 from matplotlib.figure import Figure
179 183 ip = get_ipython()
@@ -14,6 +14,7 b' from unittest import TestCase'
14 14 from unittest import mock
15 15 from importlib import invalidate_caches
16 16 from io import StringIO
17 from pathlib import Path
17 18
18 19 import nose.tools as nt
19 20
@@ -831,8 +832,7 b' def test_file():'
831 832 'line1',
832 833 'line2',
833 834 ]))
834 with open(fname) as f:
835 s = f.read()
835 s = Path(fname).read_text()
836 836 nt.assert_in('line1\n', s)
837 837 nt.assert_in('line2', s)
838 838
@@ -846,8 +846,7 b' def test_file_single_quote():'
846 846 'line1',
847 847 'line2',
848 848 ]))
849 with open(fname) as f:
850 s = f.read()
849 s = Path(fname).read_text()
851 850 nt.assert_in('line1\n', s)
852 851 nt.assert_in('line2', s)
853 852
@@ -861,8 +860,7 b' def test_file_double_quote():'
861 860 'line1',
862 861 'line2',
863 862 ]))
864 with open(fname) as f:
865 s = f.read()
863 s = Path(fname).read_text()
866 864 nt.assert_in('line1\n', s)
867 865 nt.assert_in('line2', s)
868 866
@@ -876,8 +874,7 b' def test_file_var_expand():'
876 874 'line1',
877 875 'line2',
878 876 ]))
879 with open(fname) as f:
880 s = f.read()
877 s = Path(fname).read_text()
881 878 nt.assert_in('line1\n', s)
882 879 nt.assert_in('line2', s)
883 880
@@ -908,8 +905,7 b' def test_file_amend():'
908 905 'line3',
909 906 'line4',
910 907 ]))
911 with open(fname) as f:
912 s = f.read()
908 s = Path(fname).read_text()
913 909 nt.assert_in('line1\n', s)
914 910 nt.assert_in('line3\n', s)
915 911
@@ -922,8 +918,7 b' def test_file_spaces():'
922 918 'line1',
923 919 'line2',
924 920 ]))
925 with open(fname) as f:
926 s = f.read()
921 s = Path(fname).read_text()
927 922 nt.assert_in('line1\n', s)
928 923 nt.assert_in('line2', s)
929 924
@@ -1063,15 +1058,13 b' def test_save():'
1063 1058 with TemporaryDirectory() as tmpdir:
1064 1059 file = os.path.join(tmpdir, "testsave.py")
1065 1060 ip.run_line_magic("save", "%s 1-10" % file)
1066 with open(file) as f:
1067 content = f.read()
1068 nt.assert_equal(content.count(cmds[0]), 1)
1069 nt.assert_in('coding: utf-8', content)
1061 content = Path(file).read_text()
1062 nt.assert_equal(content.count(cmds[0]), 1)
1063 nt.assert_in("coding: utf-8", content)
1070 1064 ip.run_line_magic("save", "-a %s 1-10" % file)
1071 with open(file) as f:
1072 content = f.read()
1073 nt.assert_equal(content.count(cmds[0]), 2)
1074 nt.assert_in('coding: utf-8', content)
1065 content = Path(file).read_text()
1066 nt.assert_equal(content.count(cmds[0]), 2)
1067 nt.assert_in("coding: utf-8", content)
1075 1068
1076 1069
1077 1070 def test_store():
@@ -1231,8 +1224,7 b' def test_run_module_from_import_hook():'
1231 1224 "Test that a module can be loaded via an import hook"
1232 1225 with TemporaryDirectory() as tmpdir:
1233 1226 fullpath = os.path.join(tmpdir, 'my_tmp.py')
1234 with open(fullpath, 'w') as f:
1235 f.write(TEST_MODULE)
1227 Path(fullpath).write_text(TEST_MODULE)
1236 1228
1237 1229 class MyTempImporter(object):
1238 1230 def __init__(self):
@@ -1248,8 +1240,7 b' def test_run_module_from_import_hook():'
1248 1240 return imp.load_source('my_tmp', fullpath)
1249 1241
1250 1242 def get_code(self, fullname):
1251 with open(fullpath, 'r') as f:
1252 return compile(f.read(), 'foo', 'exec')
1243 return compile(Path(fullpath).read_text(), "foo", "exec")
1253 1244
1254 1245 def is_package(self, __):
1255 1246 return False
@@ -699,9 +699,10 b' class VerboseTB(TBTools):'
699 699
700 700 frames = []
701 701 skipped = 0
702 for r in records:
702 lastrecord = len(records) - 1
703 for i, r in enumerate(records):
703 704 if not isinstance(r, stack_data.RepeatedFrames) and self.skip_hidden:
704 if r.frame.f_locals.get("__tracebackhide__", 0):
705 if r.frame.f_locals.get("__tracebackhide__", 0) and i != lastrecord:
705 706 skipped += 1
706 707 continue
707 708 if skipped:
@@ -4,7 +4,7 b''
4 4 # Copyright (c) IPython Development Team.
5 5 # Distributed under the terms of the Modified BSD License.
6 6
7 import os
7 from pathlib import Path
8 8
9 9 import nose.tools as nt
10 10
@@ -12,20 +12,22 b' from IPython.utils.syspathcontext import prepended_to_syspath'
12 12 from IPython.utils.tempdir import TemporaryDirectory
13 13 from IPython.lib.deepreload import reload as dreload
14 14
15
15 16 def test_deepreload():
16 17 "Test that dreload does deep reloads and skips excluded modules."
17 18 with TemporaryDirectory() as tmpdir:
18 19 with prepended_to_syspath(tmpdir):
19 with open(os.path.join(tmpdir, 'A.py'), 'w') as f:
20 tmpdirpath = Path(tmpdir)
21 with open(tmpdirpath / "A.py", "w") as f:
20 22 f.write("class Object(object):\n pass\n")
21 with open(os.path.join(tmpdir, 'B.py'), 'w') as f:
23 with open(tmpdirpath / "B.py", "w") as f:
22 24 f.write("import A\n")
23 25 import A
24 26 import B
25 27
26 28 # Test that A is not reloaded.
27 29 obj = A.Object()
28 dreload(B, exclude=['A'])
30 dreload(B, exclude=["A"])
29 31 nt.assert_true(isinstance(obj, A.Object))
30 32
31 33 # Test that A is reloaded.
@@ -16,7 +16,7 b' Requirements'
16 16
17 17 The documentation must be built using Python 3.
18 18
19 In additions to :ref:`devinstall`,
19 In addition to :ref:`devinstall`,
20 20 the following tools are needed to build the documentation:
21 21
22 22 - sphinx
@@ -40,7 +40,7 b' html``.'
40 40
41 41 ``make html_noapi`` - same as above, but without running the auto-generated API
42 42 docs. When you are working on the narrative documentation, the most time
43 consuming portion of the build process is the processing and rending of the
43 consuming portion of the build process is the processing and rendering of the
44 44 API documentation. This build target skips that.
45 45
46 46 ``make pdf`` will compile a pdf from the documentation.
@@ -53,7 +53,7 b' previous docs build.'
53 53 To remove the previous docs build you can use ``make clean``.
54 54 You can also combine ``clean`` with other `make` commands;
55 55 for example,
56 ``make clean html`` will do a complete rebuild of the docs or `make clean pdf` will do a complete build of the pdf.
56 ``make clean html`` will do a complete rebuild of the docs or ``make clean pdf`` will do a complete build of the pdf.
57 57
58 58
59 59 Continuous Integration
@@ -2,7 +2,7 b''
2 2
3 3 from os.path import join, dirname, abspath
4 4 import inspect
5
5 from pathlib import Path
6 6 from IPython.terminal.ipapp import TerminalIPythonApp
7 7 from ipykernel.kernelapp import IPKernelApp
8 8 from traitlets import Undefined
@@ -118,8 +118,7 b' def write_doc(name, title, app, preamble=None):'
118 118
119 119 if __name__ == '__main__':
120 120 # Touch this file for the make target
121 with open(generated, 'w'):
122 pass
121 Path(generated).write_text("")
123 122
124 123 write_doc('terminal', 'Terminal IPython options', TerminalIPythonApp())
125 124 write_doc('kernel', 'IPython kernel options', IPKernelApp(),
@@ -1,3 +1,4 b''
1
1 2 from pathlib import Path
2 3 from IPython.core.alias import Alias
3 4 from IPython.core.interactiveshell import InteractiveShell
@@ -61,7 +62,7 b' for name, func in sorted(magics["cell"].items(), key=sortkey):'
61 62 format_docstring(func),
62 63 ""])
63 64
65
64 66 src_path = Path(__file__).parent
65 67 dest = src_path.joinpath("source", "interactive", "magics-generated.txt")
66 with open(dest, "w") as f:
67 f.write("\n".join(output))
68 dest.write_text("\n".join(output))
@@ -10,12 +10,15 b' IPython Sphinx Directive'
10 10 The IPython Sphinx Directive is in 'beta' and currently under
11 11 active development. Improvements to the code or documentation are welcome!
12 12
13 The ipython directive is a stateful ipython shell for embedding in
14 sphinx documents. It knows about standard ipython prompts, and
15 extracts the input and output lines. These prompts will be renumbered
16 starting at ``1``. The inputs will be fed to an embedded ipython
17 interpreter and the outputs from that interpreter will be inserted as
18 well. For example, code blocks like the following::
13 .. |rst| replace:: reStructured text
14
15 The :rst:dir:`ipython` directive is a stateful shell that can be used
16 in |rst| files.
17
18 It knows about standard ipython prompts, and extracts the input and output
19 lines. These prompts will be renumbered starting at ``1``. The inputs will be
20 fed to an embedded ipython interpreter and the outputs from that interpreter
21 will be inserted as well. For example, code blocks like the following::
19 22
20 23 .. ipython::
21 24
@@ -42,6 +45,48 b' will be rendered as'
42 45 document that generates the rendered output.
43 46
44 47
48 Directive and options
49 =====================
50
51 The IPython directive takes a number of options detailed here.
52
53 .. rst:directive:: ipython
54
55 Create an IPython directive.
56
57 .. rst:directive:option:: doctest
58
59 Run a doctest on IPython code blocks in rst.
60
61 .. rst:directive:option:: python
62
63 Used to indicate that the relevant code block does not have IPython prompts.
64
65 .. rst:directive:option:: okexcept
66
67 Allow the code block to raise an exception.
68
69 .. rst:directive:option:: okwarning
70
71 Allow the code block to emit an warning.
72
73 .. rst:directive:option:: suppress
74
75 Silence any warnings or expected errors.
76
77 .. rst:directive:option:: verbatim
78
79 A noop that allows for any text to be syntax highlighted as valid IPython code.
80
81 .. rst:directive:option:: savefig: OUTFILE [IMAGE_OPTIONS]
82
83 Save output from matplotlib to *outfile*.
84
85 It's important to note that all of these options can be used for the entire
86 directive block or they can decorate individual lines of code as explained
87 in :ref:`pseudo-decorators`.
88
89
45 90 Persisting the Python session across IPython directive blocks
46 91 =============================================================
47 92
@@ -393,6 +438,8 b' Pretty much anything you can do with the ipython code, you can do with'
393 438 with a simple python script. Obviously, though it doesn't make sense
394 439 to use the doctest option.
395 440
441 .. _pseudo-decorators:
442
396 443 Pseudo-Decorators
397 444 =================
398 445
@@ -7,26 +7,24 b' whatsnew/development.rst (chronologically ordered), and deletes the snippets.'
7 7
8 8 import io
9 9 import sys
10 from glob import glob
11 from os.path import dirname, basename, abspath, join as pjoin
10 from pathlib import Path
12 11 from subprocess import check_call, check_output
13 12
14 repo_root = dirname(dirname(abspath(__file__)))
15 whatsnew_dir = pjoin(repo_root, 'docs', 'source', 'whatsnew')
16 pr_dir = pjoin(whatsnew_dir, 'pr')
17 target = pjoin(whatsnew_dir, 'development.rst')
13 repo_root = Path(__file__).resolve().parent.parent
14 whatsnew_dir = repo_root / "docs" / "source" / "whatsnew"
15 pr_dir = whatsnew_dir / "pr"
16 target = whatsnew_dir / "development.rst"
18 17
19 18 FEATURE_MARK = ".. DO NOT EDIT THIS LINE BEFORE RELEASE. FEATURE INSERTION POINT."
20 19 INCOMPAT_MARK = ".. DO NOT EDIT THIS LINE BEFORE RELEASE. INCOMPAT INSERTION POINT."
21 20
22 21 # 1. Collect the whatsnew snippet files ---------------------------------------
23 22
24 files = set(glob(pjoin(pr_dir, '*.rst')))
23 files = set(pr_dir.glob("*.rst"))
25 24 # Ignore explanatory and example files
26 files.difference_update({pjoin(pr_dir, f) for f in {
27 'incompat-switching-to-perl.rst',
28 'antigravity-feature.rst'}
29 })
25 files.difference_update(
26 {pr_dir / f for f in {"incompat-switching-to-perl.rst", "antigravity-feature.rst"}}
27 )
30 28
31 29 if not files:
32 30 print("No automatic update available for what's new")
@@ -34,30 +32,31 b' if not files:'
34 32
35 33
36 34 def getmtime(f):
37 return check_output(['git', 'log', '-1', '--format="%ai"', '--', f])
35 return check_output(["git", "log", "-1", '--format="%ai"', "--", f])
36
38 37
39 38 files = sorted(files, key=getmtime)
40 39
41 40 features, incompats = [], []
42 41 for path in files:
43 with io.open(path, encoding='utf-8') as f:
42 with io.open(path, encoding="utf-8") as f:
44 43 try:
45 44 content = f.read().rstrip()
46 45 except Exception as e:
47 46 raise Exception('Error reading "{}"'.format(f)) from e
48 47
49 if basename(path).startswith('incompat-'):
48 if path.name.startswith("incompat-"):
50 49 incompats.append(content)
51 50 else:
52 51 features.append(content)
53 52
54 53 # Put the insertion markers back on the end, so they're ready for next time.
55 feature_block = '\n\n'.join(features + [FEATURE_MARK])
56 incompat_block = '\n\n'.join(incompats + [INCOMPAT_MARK])
54 feature_block = "\n\n".join(features + [FEATURE_MARK])
55 incompat_block = "\n\n".join(incompats + [INCOMPAT_MARK])
57 56
58 57 # 2. Update the target file ---------------------------------------------------
59 58
60 with io.open(target, encoding='utf-8') as f:
59 with io.open(target, encoding="utf-8") as f:
61 60 content = f.read()
62 61
63 62 assert content.count(FEATURE_MARK) == 1
@@ -67,16 +66,16 b' content = content.replace(FEATURE_MARK, feature_block)'
67 66 content = content.replace(INCOMPAT_MARK, incompat_block)
68 67
69 68 # Clean trailing whitespace
70 content = '\n'.join(l.rstrip() for l in content.splitlines())
69 content = "\n".join(l.rstrip() for l in content.splitlines())
71 70
72 with io.open(target, 'w', encoding='utf-8') as f:
71 with io.open(target, "w", encoding="utf-8") as f:
73 72 f.write(content)
74 73
75 74 # 3. Stage the changes in git -------------------------------------------------
76 75
77 76 for file in files:
78 check_call(['git', 'rm', file])
77 check_call(["git", "rm", file])
79 78
80 check_call(['git', 'add', target])
79 check_call(["git", "add", target])
81 80
82 81 print("Merged what's new changes. Check the diff and commit the change.")
General Comments 0
You need to be logged in to leave comments. Login now