##// END OF EJS Templates
Merge branch 'master' into dev
Adam Hackbarth -
r26087:4403225e merge
parent child Browse files
Show More
@@ -290,7 +290,7 b' class Pdb(OldPdb):'
290
290
291 def hidden_frames(self, stack):
291 def hidden_frames(self, stack):
292 """
292 """
293 Given an index in the stack return wether it should be skipped.
293 Given an index in the stack return whether it should be skipped.
294
294
295 This is used in up/down and where to skip frames.
295 This is used in up/down and where to skip frames.
296 """
296 """
@@ -713,7 +713,9 b' class Pdb(OldPdb):'
713 break
713 break
714 else:
714 else:
715 # if no break occured.
715 # if no break occured.
716 self.error("all frames above hidden")
716 self.error(
717 "all frames above hidden, use `skip_hidden False` to get get into those."
718 )
717 return
719 return
718
720
719 Colors = self.color_scheme_table.active_colors
721 Colors = self.color_scheme_table.active_colors
@@ -756,7 +758,9 b' class Pdb(OldPdb):'
756 if counter >= count:
758 if counter >= count:
757 break
759 break
758 else:
760 else:
759 self.error("all frames bellow hidden")
761 self.error(
762 "all frames bellow hidden, use `skip_hidden False` to get get into those."
763 )
760 return
764 return
761
765
762 Colors = self.color_scheme_table.active_colors
766 Colors = self.color_scheme_table.active_colors
@@ -1086,7 +1086,7 b' class Video(DisplayObject):'
1086 if url is None and isinstance(data, str) and data.startswith(('http:', 'https:')):
1086 if url is None and isinstance(data, str) and data.startswith(('http:', 'https:')):
1087 url = data
1087 url = data
1088 data = None
1088 data = None
1089 elif os.path.exists(data):
1089 elif data is not None and os.path.exists(data):
1090 filename = data
1090 filename = data
1091 data = None
1091 data = None
1092
1092
@@ -757,6 +757,7 b' class InteractiveShell(SingletonConfigurable):'
757 self.meta = Struct()
757 self.meta = Struct()
758
758
759 # Temporary files used for various purposes. Deleted at exit.
759 # Temporary files used for various purposes. Deleted at exit.
760 # The files here are stored with Path from Pathlib
760 self.tempfiles = []
761 self.tempfiles = []
761 self.tempdirs = []
762 self.tempdirs = []
762
763
@@ -3595,16 +3596,17 b' class InteractiveShell(SingletonConfigurable):'
3595 - data(None): if data is given, it gets written out to the temp file
3596 - data(None): if data is given, it gets written out to the temp file
3596 immediately, and the file is closed again."""
3597 immediately, and the file is closed again."""
3597
3598
3598 dirname = tempfile.mkdtemp(prefix=prefix)
3599 dir_path = Path(tempfile.mkdtemp(prefix=prefix))
3599 self.tempdirs.append(dirname)
3600 self.tempdirs.append(dir_path)
3600
3601
3601 handle, filename = tempfile.mkstemp('.py', prefix, dir=dirname)
3602 handle, filename = tempfile.mkstemp(".py", prefix, dir=str(dir_path))
3602 os.close(handle) # On Windows, there can only be one open handle on a file
3603 os.close(handle) # On Windows, there can only be one open handle on a file
3603 self.tempfiles.append(filename)
3604
3605 file_path = Path(filename)
3606 self.tempfiles.append(file_path)
3604
3607
3605 if data:
3608 if data:
3606 with open(filename, 'w') as tmp_file:
3609 file_path.write_text(data)
3607 tmp_file.write(data)
3608 return filename
3610 return filename
3609
3611
3610 @undoc
3612 @undoc
@@ -3761,14 +3763,14 b' class InteractiveShell(SingletonConfigurable):'
3761 # Cleanup all tempfiles and folders left around
3763 # Cleanup all tempfiles and folders left around
3762 for tfile in self.tempfiles:
3764 for tfile in self.tempfiles:
3763 try:
3765 try:
3764 os.unlink(tfile)
3766 tfile.unlink()
3765 except OSError:
3767 except FileNotFoundError:
3766 pass
3768 pass
3767
3769
3768 for tdir in self.tempdirs:
3770 for tdir in self.tempdirs:
3769 try:
3771 try:
3770 os.rmdir(tdir)
3772 tdir.rmdir()
3771 except OSError:
3773 except FileNotFoundError:
3772 pass
3774 pass
3773
3775
3774 # Clear all user namespaces to release all references cleanly.
3776 # Clear all user namespaces to release all references cleanly.
@@ -40,6 +40,7 b' from IPython.utils.timing import clock, clock2'
40 from warnings import warn
40 from warnings import warn
41 from logging import error
41 from logging import error
42 from io import StringIO
42 from io import StringIO
43 from pathlib import Path
43
44
44 if sys.version_info > (3,8):
45 if sys.version_info > (3,8):
45 from ast import Module
46 from ast import Module
@@ -362,8 +363,7 b' class ExecutionMagics(Magics):'
362 print('\n*** Profile stats marshalled to file',\
363 print('\n*** Profile stats marshalled to file',\
363 repr(dump_file)+'.',sys_exit)
364 repr(dump_file)+'.',sys_exit)
364 if text_file:
365 if text_file:
365 with open(text_file, 'w') as pfile:
366 Path(text_file).write_text(output)
366 pfile.write(output)
367 print('\n*** Profile printout saved to text file',\
367 print('\n*** Profile printout saved to text file',\
368 repr(text_file)+'.',sys_exit)
368 repr(text_file)+'.',sys_exit)
369
369
@@ -724,7 +724,7 b' class ExecutionMagics(Magics):'
724 sys.argv = [filename] + args # put in the proper filename
724 sys.argv = [filename] + args # put in the proper filename
725
725
726 if 'n' in opts:
726 if 'n' in opts:
727 name = os.path.splitext(os.path.basename(filename))[0]
727 name = Path(filename).stem
728 else:
728 else:
729 name = '__main__'
729 name = '__main__'
730
730
@@ -8,37 +8,38 b''
8 # The full license is in the file COPYING.txt, distributed with this software.
8 # The full license is in the file COPYING.txt, distributed with this software.
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10
10
11 import os
12 import re
11 import re
13 import shlex
12 import shlex
14 import sys
13 import sys
15
14
15 from pathlib import Path
16 from IPython.core.magic import Magics, magics_class, line_magic
16 from IPython.core.magic import Magics, magics_class, line_magic
17
17
18
18
19 def _is_conda_environment():
19 def _is_conda_environment():
20 """Return True if the current Python executable is in a conda env"""
20 """Return True if the current Python executable is in a conda env"""
21 # TODO: does this need to change on windows?
21 # TODO: does this need to change on windows?
22 conda_history = os.path.join(sys.prefix, 'conda-meta', 'history')
22 return Path(sys.prefix, "conda-meta", "history").exists()
23 return os.path.exists(conda_history)
24
23
25
24
26 def _get_conda_executable():
25 def _get_conda_executable():
27 """Find the path to the conda executable"""
26 """Find the path to the conda executable"""
28 # Check if there is a conda executable in the same directory as the Python executable.
27 # Check if there is a conda executable in the same directory as the Python executable.
29 # This is the case within conda's root environment.
28 # This is the case within conda's root environment.
30 conda = os.path.join(os.path.dirname(sys.executable), 'conda')
29 conda = Path(sys.executable).parent / "conda"
31 if os.path.isfile(conda):
30 if conda.isfile():
32 return conda
31 return str(conda)
33
32
34 # Otherwise, attempt to extract the executable from conda history.
33 # Otherwise, attempt to extract the executable from conda history.
35 # This applies in any conda environment.
34 # This applies in any conda environment.
36 R = re.compile(r"^#\s*cmd:\s*(?P<command>.*conda)\s[create|install]")
35 history = Path(sys.prefix, "conda-meta", "history").read_text()
37 with open(os.path.join(sys.prefix, 'conda-meta', 'history')) as f:
36 match = re.search(
38 for line in f:
37 r"^#\s*cmd:\s*(?P<command>.*conda)\s[create|install]",
39 match = R.match(line)
38 history,
40 if match:
39 flags=re.MULTILINE,
41 return match.groupdict()['command']
40 )
41 if match:
42 return match.groupdict()["command"]
42
43
43 # Fallback: assume conda is available on the system path.
44 # Fallback: assume conda is available on the system path.
44 return "conda"
45 return "conda"
@@ -1,5 +1,5 b''
1 """
1 """
2 Test for async helpers.
2 Test for async helpers.
3
3
4 Should only trigger on python 3.5+ or will have syntax errors.
4 Should only trigger on python 3.5+ or will have syntax errors.
5 """
5 """
@@ -9,6 +9,13 b' from textwrap import dedent, indent'
9 from unittest import TestCase
9 from unittest import TestCase
10 from IPython.testing.decorators import skip_without
10 from IPython.testing.decorators import skip_without
11 import sys
11 import sys
12 from typing import TYPE_CHECKING
13
14 if TYPE_CHECKING:
15 from IPython import get_ipython
16
17 ip = get_ipython()
18
12
19
13 iprc = lambda x: ip.run_cell(dedent(x)).raise_error()
20 iprc = lambda x: ip.run_cell(dedent(x)).raise_error()
14 iprc_nr = lambda x: ip.run_cell(dedent(x))
21 iprc_nr = lambda x: ip.run_cell(dedent(x))
@@ -275,7 +282,7 b' class AsyncTest(TestCase):'
275 await sleep(0.1)
282 await sleep(0.1)
276 """
283 """
277 )
284 )
278
285
279 if sys.version_info < (3,9):
286 if sys.version_info < (3,9):
280 # new pgen parser in 3.9 does not raise MemoryError on too many nested
287 # new pgen parser in 3.9 does not raise MemoryError on too many nested
281 # parens anymore
288 # parens anymore
@@ -138,7 +138,9 b' def _get_inline_config():'
138 from ipykernel.pylab.config import InlineBackend
138 from ipykernel.pylab.config import InlineBackend
139 return InlineBackend.instance()
139 return InlineBackend.instance()
140
140
141 @dec.skip_without('matplotlib')
141
142 @dec.skip_without("ipykernel")
143 @dec.skip_without("matplotlib")
142 def test_set_matplotlib_close():
144 def test_set_matplotlib_close():
143 cfg = _get_inline_config()
145 cfg = _get_inline_config()
144 cfg.close_figures = False
146 cfg.close_figures = False
@@ -173,7 +175,9 b' def test_set_matplotlib_formats():'
173 else:
175 else:
174 nt.assert_not_in(Figure, f)
176 nt.assert_not_in(Figure, f)
175
177
176 @dec.skip_without('matplotlib')
178
179 @dec.skip_without("ipykernel")
180 @dec.skip_without("matplotlib")
177 def test_set_matplotlib_formats_kwargs():
181 def test_set_matplotlib_formats_kwargs():
178 from matplotlib.figure import Figure
182 from matplotlib.figure import Figure
179 ip = get_ipython()
183 ip = get_ipython()
@@ -14,6 +14,7 b' from unittest import TestCase'
14 from unittest import mock
14 from unittest import mock
15 from importlib import invalidate_caches
15 from importlib import invalidate_caches
16 from io import StringIO
16 from io import StringIO
17 from pathlib import Path
17
18
18 import nose.tools as nt
19 import nose.tools as nt
19
20
@@ -831,8 +832,7 b' def test_file():'
831 'line1',
832 'line1',
832 'line2',
833 'line2',
833 ]))
834 ]))
834 with open(fname) as f:
835 s = Path(fname).read_text()
835 s = f.read()
836 nt.assert_in('line1\n', s)
836 nt.assert_in('line1\n', s)
837 nt.assert_in('line2', s)
837 nt.assert_in('line2', s)
838
838
@@ -846,8 +846,7 b' def test_file_single_quote():'
846 'line1',
846 'line1',
847 'line2',
847 'line2',
848 ]))
848 ]))
849 with open(fname) as f:
849 s = Path(fname).read_text()
850 s = f.read()
851 nt.assert_in('line1\n', s)
850 nt.assert_in('line1\n', s)
852 nt.assert_in('line2', s)
851 nt.assert_in('line2', s)
853
852
@@ -861,8 +860,7 b' def test_file_double_quote():'
861 'line1',
860 'line1',
862 'line2',
861 'line2',
863 ]))
862 ]))
864 with open(fname) as f:
863 s = Path(fname).read_text()
865 s = f.read()
866 nt.assert_in('line1\n', s)
864 nt.assert_in('line1\n', s)
867 nt.assert_in('line2', s)
865 nt.assert_in('line2', s)
868
866
@@ -876,8 +874,7 b' def test_file_var_expand():'
876 'line1',
874 'line1',
877 'line2',
875 'line2',
878 ]))
876 ]))
879 with open(fname) as f:
877 s = Path(fname).read_text()
880 s = f.read()
881 nt.assert_in('line1\n', s)
878 nt.assert_in('line1\n', s)
882 nt.assert_in('line2', s)
879 nt.assert_in('line2', s)
883
880
@@ -908,8 +905,7 b' def test_file_amend():'
908 'line3',
905 'line3',
909 'line4',
906 'line4',
910 ]))
907 ]))
911 with open(fname) as f:
908 s = Path(fname).read_text()
912 s = f.read()
913 nt.assert_in('line1\n', s)
909 nt.assert_in('line1\n', s)
914 nt.assert_in('line3\n', s)
910 nt.assert_in('line3\n', s)
915
911
@@ -922,8 +918,7 b' def test_file_spaces():'
922 'line1',
918 'line1',
923 'line2',
919 'line2',
924 ]))
920 ]))
925 with open(fname) as f:
921 s = Path(fname).read_text()
926 s = f.read()
927 nt.assert_in('line1\n', s)
922 nt.assert_in('line1\n', s)
928 nt.assert_in('line2', s)
923 nt.assert_in('line2', s)
929
924
@@ -1063,15 +1058,13 b' def test_save():'
1063 with TemporaryDirectory() as tmpdir:
1058 with TemporaryDirectory() as tmpdir:
1064 file = os.path.join(tmpdir, "testsave.py")
1059 file = os.path.join(tmpdir, "testsave.py")
1065 ip.run_line_magic("save", "%s 1-10" % file)
1060 ip.run_line_magic("save", "%s 1-10" % file)
1066 with open(file) as f:
1061 content = Path(file).read_text()
1067 content = f.read()
1062 nt.assert_equal(content.count(cmds[0]), 1)
1068 nt.assert_equal(content.count(cmds[0]), 1)
1063 nt.assert_in("coding: utf-8", content)
1069 nt.assert_in('coding: utf-8', content)
1070 ip.run_line_magic("save", "-a %s 1-10" % file)
1064 ip.run_line_magic("save", "-a %s 1-10" % file)
1071 with open(file) as f:
1065 content = Path(file).read_text()
1072 content = f.read()
1066 nt.assert_equal(content.count(cmds[0]), 2)
1073 nt.assert_equal(content.count(cmds[0]), 2)
1067 nt.assert_in("coding: utf-8", content)
1074 nt.assert_in('coding: utf-8', content)
1075
1068
1076
1069
1077 def test_store():
1070 def test_store():
@@ -1231,8 +1224,7 b' def test_run_module_from_import_hook():'
1231 "Test that a module can be loaded via an import hook"
1224 "Test that a module can be loaded via an import hook"
1232 with TemporaryDirectory() as tmpdir:
1225 with TemporaryDirectory() as tmpdir:
1233 fullpath = os.path.join(tmpdir, 'my_tmp.py')
1226 fullpath = os.path.join(tmpdir, 'my_tmp.py')
1234 with open(fullpath, 'w') as f:
1227 Path(fullpath).write_text(TEST_MODULE)
1235 f.write(TEST_MODULE)
1236
1228
1237 class MyTempImporter(object):
1229 class MyTempImporter(object):
1238 def __init__(self):
1230 def __init__(self):
@@ -1248,8 +1240,7 b' def test_run_module_from_import_hook():'
1248 return imp.load_source('my_tmp', fullpath)
1240 return imp.load_source('my_tmp', fullpath)
1249
1241
1250 def get_code(self, fullname):
1242 def get_code(self, fullname):
1251 with open(fullpath, 'r') as f:
1243 return compile(Path(fullpath).read_text(), "foo", "exec")
1252 return compile(f.read(), 'foo', 'exec')
1253
1244
1254 def is_package(self, __):
1245 def is_package(self, __):
1255 return False
1246 return False
@@ -699,9 +699,10 b' class VerboseTB(TBTools):'
699
699
700 frames = []
700 frames = []
701 skipped = 0
701 skipped = 0
702 for r in records:
702 lastrecord = len(records) - 1
703 for i, r in enumerate(records):
703 if not isinstance(r, stack_data.RepeatedFrames) and self.skip_hidden:
704 if not isinstance(r, stack_data.RepeatedFrames) and self.skip_hidden:
704 if r.frame.f_locals.get("__tracebackhide__", 0):
705 if r.frame.f_locals.get("__tracebackhide__", 0) and i != lastrecord:
705 skipped += 1
706 skipped += 1
706 continue
707 continue
707 if skipped:
708 if skipped:
@@ -4,7 +4,7 b''
4 # Copyright (c) IPython Development Team.
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
5 # Distributed under the terms of the Modified BSD License.
6
6
7 import os
7 from pathlib import Path
8
8
9 import nose.tools as nt
9 import nose.tools as nt
10
10
@@ -12,20 +12,22 b' from IPython.utils.syspathcontext import prepended_to_syspath'
12 from IPython.utils.tempdir import TemporaryDirectory
12 from IPython.utils.tempdir import TemporaryDirectory
13 from IPython.lib.deepreload import reload as dreload
13 from IPython.lib.deepreload import reload as dreload
14
14
15
15 def test_deepreload():
16 def test_deepreload():
16 "Test that dreload does deep reloads and skips excluded modules."
17 "Test that dreload does deep reloads and skips excluded modules."
17 with TemporaryDirectory() as tmpdir:
18 with TemporaryDirectory() as tmpdir:
18 with prepended_to_syspath(tmpdir):
19 with prepended_to_syspath(tmpdir):
19 with open(os.path.join(tmpdir, 'A.py'), 'w') as f:
20 tmpdirpath = Path(tmpdir)
21 with open(tmpdirpath / "A.py", "w") as f:
20 f.write("class Object(object):\n pass\n")
22 f.write("class Object(object):\n pass\n")
21 with open(os.path.join(tmpdir, 'B.py'), 'w') as f:
23 with open(tmpdirpath / "B.py", "w") as f:
22 f.write("import A\n")
24 f.write("import A\n")
23 import A
25 import A
24 import B
26 import B
25
27
26 # Test that A is not reloaded.
28 # Test that A is not reloaded.
27 obj = A.Object()
29 obj = A.Object()
28 dreload(B, exclude=['A'])
30 dreload(B, exclude=["A"])
29 nt.assert_true(isinstance(obj, A.Object))
31 nt.assert_true(isinstance(obj, A.Object))
30
32
31 # Test that A is reloaded.
33 # Test that A is reloaded.
@@ -16,7 +16,7 b' Requirements'
16
16
17 The documentation must be built using Python 3.
17 The documentation must be built using Python 3.
18
18
19 In additions to :ref:`devinstall`,
19 In addition to :ref:`devinstall`,
20 the following tools are needed to build the documentation:
20 the following tools are needed to build the documentation:
21
21
22 - sphinx
22 - sphinx
@@ -40,7 +40,7 b' html``.'
40
40
41 ``make html_noapi`` - same as above, but without running the auto-generated API
41 ``make html_noapi`` - same as above, but without running the auto-generated API
42 docs. When you are working on the narrative documentation, the most time
42 docs. When you are working on the narrative documentation, the most time
43 consuming portion of the build process is the processing and rending of the
43 consuming portion of the build process is the processing and rendering of the
44 API documentation. This build target skips that.
44 API documentation. This build target skips that.
45
45
46 ``make pdf`` will compile a pdf from the documentation.
46 ``make pdf`` will compile a pdf from the documentation.
@@ -53,7 +53,7 b' previous docs build.'
53 To remove the previous docs build you can use ``make clean``.
53 To remove the previous docs build you can use ``make clean``.
54 You can also combine ``clean`` with other `make` commands;
54 You can also combine ``clean`` with other `make` commands;
55 for example,
55 for example,
56 ``make clean html`` will do a complete rebuild of the docs or `make clean pdf` will do a complete build of the pdf.
56 ``make clean html`` will do a complete rebuild of the docs or ``make clean pdf`` will do a complete build of the pdf.
57
57
58
58
59 Continuous Integration
59 Continuous Integration
@@ -2,7 +2,7 b''
2
2
3 from os.path import join, dirname, abspath
3 from os.path import join, dirname, abspath
4 import inspect
4 import inspect
5
5 from pathlib import Path
6 from IPython.terminal.ipapp import TerminalIPythonApp
6 from IPython.terminal.ipapp import TerminalIPythonApp
7 from ipykernel.kernelapp import IPKernelApp
7 from ipykernel.kernelapp import IPKernelApp
8 from traitlets import Undefined
8 from traitlets import Undefined
@@ -118,8 +118,7 b' def write_doc(name, title, app, preamble=None):'
118
118
119 if __name__ == '__main__':
119 if __name__ == '__main__':
120 # Touch this file for the make target
120 # Touch this file for the make target
121 with open(generated, 'w'):
121 Path(generated).write_text("")
122 pass
123
122
124 write_doc('terminal', 'Terminal IPython options', TerminalIPythonApp())
123 write_doc('terminal', 'Terminal IPython options', TerminalIPythonApp())
125 write_doc('kernel', 'IPython kernel options', IPKernelApp(),
124 write_doc('kernel', 'IPython kernel options', IPKernelApp(),
@@ -1,3 +1,4 b''
1
1 from pathlib import Path
2 from pathlib import Path
2 from IPython.core.alias import Alias
3 from IPython.core.alias import Alias
3 from IPython.core.interactiveshell import InteractiveShell
4 from IPython.core.interactiveshell import InteractiveShell
@@ -61,7 +62,7 b' for name, func in sorted(magics["cell"].items(), key=sortkey):'
61 format_docstring(func),
62 format_docstring(func),
62 ""])
63 ""])
63
64
65
64 src_path = Path(__file__).parent
66 src_path = Path(__file__).parent
65 dest = src_path.joinpath("source", "interactive", "magics-generated.txt")
67 dest = src_path.joinpath("source", "interactive", "magics-generated.txt")
66 with open(dest, "w") as f:
68 dest.write_text("\n".join(output))
67 f.write("\n".join(output))
@@ -10,12 +10,15 b' IPython Sphinx Directive'
10 The IPython Sphinx Directive is in 'beta' and currently under
10 The IPython Sphinx Directive is in 'beta' and currently under
11 active development. Improvements to the code or documentation are welcome!
11 active development. Improvements to the code or documentation are welcome!
12
12
13 The ipython directive is a stateful ipython shell for embedding in
13 .. |rst| replace:: reStructured text
14 sphinx documents. It knows about standard ipython prompts, and
14
15 extracts the input and output lines. These prompts will be renumbered
15 The :rst:dir:`ipython` directive is a stateful shell that can be used
16 starting at ``1``. The inputs will be fed to an embedded ipython
16 in |rst| files.
17 interpreter and the outputs from that interpreter will be inserted as
17
18 well. For example, code blocks like the following::
18 It knows about standard ipython prompts, and extracts the input and output
19 lines. These prompts will be renumbered starting at ``1``. The inputs will be
20 fed to an embedded ipython interpreter and the outputs from that interpreter
21 will be inserted as well. For example, code blocks like the following::
19
22
20 .. ipython::
23 .. ipython::
21
24
@@ -42,6 +45,48 b' will be rendered as'
42 document that generates the rendered output.
45 document that generates the rendered output.
43
46
44
47
48 Directive and options
49 =====================
50
51 The IPython directive takes a number of options detailed here.
52
53 .. rst:directive:: ipython
54
55 Create an IPython directive.
56
57 .. rst:directive:option:: doctest
58
59 Run a doctest on IPython code blocks in rst.
60
61 .. rst:directive:option:: python
62
63 Used to indicate that the relevant code block does not have IPython prompts.
64
65 .. rst:directive:option:: okexcept
66
67 Allow the code block to raise an exception.
68
69 .. rst:directive:option:: okwarning
70
71 Allow the code block to emit an warning.
72
73 .. rst:directive:option:: suppress
74
75 Silence any warnings or expected errors.
76
77 .. rst:directive:option:: verbatim
78
79 A noop that allows for any text to be syntax highlighted as valid IPython code.
80
81 .. rst:directive:option:: savefig: OUTFILE [IMAGE_OPTIONS]
82
83 Save output from matplotlib to *outfile*.
84
85 It's important to note that all of these options can be used for the entire
86 directive block or they can decorate individual lines of code as explained
87 in :ref:`pseudo-decorators`.
88
89
45 Persisting the Python session across IPython directive blocks
90 Persisting the Python session across IPython directive blocks
46 =============================================================
91 =============================================================
47
92
@@ -393,6 +438,8 b' Pretty much anything you can do with the ipython code, you can do with'
393 with a simple python script. Obviously, though it doesn't make sense
438 with a simple python script. Obviously, though it doesn't make sense
394 to use the doctest option.
439 to use the doctest option.
395
440
441 .. _pseudo-decorators:
442
396 Pseudo-Decorators
443 Pseudo-Decorators
397 =================
444 =================
398
445
@@ -7,26 +7,24 b' whatsnew/development.rst (chronologically ordered), and deletes the snippets.'
7
7
8 import io
8 import io
9 import sys
9 import sys
10 from glob import glob
10 from pathlib import Path
11 from os.path import dirname, basename, abspath, join as pjoin
12 from subprocess import check_call, check_output
11 from subprocess import check_call, check_output
13
12
14 repo_root = dirname(dirname(abspath(__file__)))
13 repo_root = Path(__file__).resolve().parent.parent
15 whatsnew_dir = pjoin(repo_root, 'docs', 'source', 'whatsnew')
14 whatsnew_dir = repo_root / "docs" / "source" / "whatsnew"
16 pr_dir = pjoin(whatsnew_dir, 'pr')
15 pr_dir = whatsnew_dir / "pr"
17 target = pjoin(whatsnew_dir, 'development.rst')
16 target = whatsnew_dir / "development.rst"
18
17
19 FEATURE_MARK = ".. DO NOT EDIT THIS LINE BEFORE RELEASE. FEATURE INSERTION POINT."
18 FEATURE_MARK = ".. DO NOT EDIT THIS LINE BEFORE RELEASE. FEATURE INSERTION POINT."
20 INCOMPAT_MARK = ".. DO NOT EDIT THIS LINE BEFORE RELEASE. INCOMPAT INSERTION POINT."
19 INCOMPAT_MARK = ".. DO NOT EDIT THIS LINE BEFORE RELEASE. INCOMPAT INSERTION POINT."
21
20
22 # 1. Collect the whatsnew snippet files ---------------------------------------
21 # 1. Collect the whatsnew snippet files ---------------------------------------
23
22
24 files = set(glob(pjoin(pr_dir, '*.rst')))
23 files = set(pr_dir.glob("*.rst"))
25 # Ignore explanatory and example files
24 # Ignore explanatory and example files
26 files.difference_update({pjoin(pr_dir, f) for f in {
25 files.difference_update(
27 'incompat-switching-to-perl.rst',
26 {pr_dir / f for f in {"incompat-switching-to-perl.rst", "antigravity-feature.rst"}}
28 'antigravity-feature.rst'}
27 )
29 })
30
28
31 if not files:
29 if not files:
32 print("No automatic update available for what's new")
30 print("No automatic update available for what's new")
@@ -34,30 +32,31 b' if not files:'
34
32
35
33
36 def getmtime(f):
34 def getmtime(f):
37 return check_output(['git', 'log', '-1', '--format="%ai"', '--', f])
35 return check_output(["git", "log", "-1", '--format="%ai"', "--", f])
36
38
37
39 files = sorted(files, key=getmtime)
38 files = sorted(files, key=getmtime)
40
39
41 features, incompats = [], []
40 features, incompats = [], []
42 for path in files:
41 for path in files:
43 with io.open(path, encoding='utf-8') as f:
42 with io.open(path, encoding="utf-8") as f:
44 try:
43 try:
45 content = f.read().rstrip()
44 content = f.read().rstrip()
46 except Exception as e:
45 except Exception as e:
47 raise Exception('Error reading "{}"'.format(f)) from e
46 raise Exception('Error reading "{}"'.format(f)) from e
48
47
49 if basename(path).startswith('incompat-'):
48 if path.name.startswith("incompat-"):
50 incompats.append(content)
49 incompats.append(content)
51 else:
50 else:
52 features.append(content)
51 features.append(content)
53
52
54 # Put the insertion markers back on the end, so they're ready for next time.
53 # Put the insertion markers back on the end, so they're ready for next time.
55 feature_block = '\n\n'.join(features + [FEATURE_MARK])
54 feature_block = "\n\n".join(features + [FEATURE_MARK])
56 incompat_block = '\n\n'.join(incompats + [INCOMPAT_MARK])
55 incompat_block = "\n\n".join(incompats + [INCOMPAT_MARK])
57
56
58 # 2. Update the target file ---------------------------------------------------
57 # 2. Update the target file ---------------------------------------------------
59
58
60 with io.open(target, encoding='utf-8') as f:
59 with io.open(target, encoding="utf-8") as f:
61 content = f.read()
60 content = f.read()
62
61
63 assert content.count(FEATURE_MARK) == 1
62 assert content.count(FEATURE_MARK) == 1
@@ -67,16 +66,16 b' content = content.replace(FEATURE_MARK, feature_block)'
67 content = content.replace(INCOMPAT_MARK, incompat_block)
66 content = content.replace(INCOMPAT_MARK, incompat_block)
68
67
69 # Clean trailing whitespace
68 # Clean trailing whitespace
70 content = '\n'.join(l.rstrip() for l in content.splitlines())
69 content = "\n".join(l.rstrip() for l in content.splitlines())
71
70
72 with io.open(target, 'w', encoding='utf-8') as f:
71 with io.open(target, "w", encoding="utf-8") as f:
73 f.write(content)
72 f.write(content)
74
73
75 # 3. Stage the changes in git -------------------------------------------------
74 # 3. Stage the changes in git -------------------------------------------------
76
75
77 for file in files:
76 for file in files:
78 check_call(['git', 'rm', file])
77 check_call(["git", "rm", file])
79
78
80 check_call(['git', 'add', target])
79 check_call(["git", "add", target])
81
80
82 print("Merged what's new changes. Check the diff and commit the change.")
81 print("Merged what's new changes. Check the diff and commit the change.")
General Comments 0
You need to be logged in to leave comments. Login now