Show More
@@ -1,133 +1,135 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | """Script to commit the doc build outputs into the github-pages repo. |
|
2 | """Script to commit the doc build outputs into the github-pages repo. | |
3 |
|
3 | |||
4 | Use: |
|
4 | Use: | |
5 |
|
5 | |||
6 | gh-pages.py [tag] |
|
6 | gh-pages.py [tag] | |
7 |
|
7 | |||
8 | If no tag is given, the current output of 'git describe' is used. If given, |
|
8 | If no tag is given, the current output of 'git describe' is used. If given, | |
9 | that is how the resulting directory will be named. |
|
9 | that is how the resulting directory will be named. | |
10 |
|
10 | |||
11 | In practice, you should use either actual clean tags from a current build or |
|
11 | In practice, you should use either actual clean tags from a current build or | |
12 | something like 'current' as a stable URL for the most current version of the """ |
|
12 | something like 'current' as a stable URL for the most current version of the """ | |
13 |
|
13 | |||
14 | #----------------------------------------------------------------------------- |
|
14 | #----------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 | #----------------------------------------------------------------------------- |
|
16 | #----------------------------------------------------------------------------- | |
|
17 | from __future__ import print_function | |||
|
18 | ||||
17 | import os |
|
19 | import os | |
18 | import shutil |
|
20 | import shutil | |
19 | import sys |
|
21 | import sys | |
20 | from os import chdir as cd |
|
22 | from os import chdir as cd | |
21 | from os.path import join as pjoin |
|
23 | from os.path import join as pjoin | |
22 |
|
24 | |||
23 | from subprocess import Popen, PIPE, CalledProcessError, check_call |
|
25 | from subprocess import Popen, PIPE, CalledProcessError, check_call | |
24 |
|
26 | |||
25 | #----------------------------------------------------------------------------- |
|
27 | #----------------------------------------------------------------------------- | |
26 | # Globals |
|
28 | # Globals | |
27 | #----------------------------------------------------------------------------- |
|
29 | #----------------------------------------------------------------------------- | |
28 |
|
30 | |||
29 | pages_dir = 'gh-pages' |
|
31 | pages_dir = 'gh-pages' | |
30 | html_dir = 'build/html' |
|
32 | html_dir = 'build/html' | |
31 | pdf_dir = 'build/latex' |
|
33 | pdf_dir = 'build/latex' | |
32 | pages_repo = 'git@github.com:ipython/ipython-doc.git' |
|
34 | pages_repo = 'git@github.com:ipython/ipython-doc.git' | |
33 |
|
35 | |||
34 | #----------------------------------------------------------------------------- |
|
36 | #----------------------------------------------------------------------------- | |
35 | # Functions |
|
37 | # Functions | |
36 | #----------------------------------------------------------------------------- |
|
38 | #----------------------------------------------------------------------------- | |
37 | def sh(cmd): |
|
39 | def sh(cmd): | |
38 | """Execute command in a subshell, return status code.""" |
|
40 | """Execute command in a subshell, return status code.""" | |
39 | return check_call(cmd, shell=True) |
|
41 | return check_call(cmd, shell=True) | |
40 |
|
42 | |||
41 |
|
43 | |||
42 | def sh2(cmd): |
|
44 | def sh2(cmd): | |
43 | """Execute command in a subshell, return stdout. |
|
45 | """Execute command in a subshell, return stdout. | |
44 |
|
46 | |||
45 | Stderr is unbuffered from the subshell.x""" |
|
47 | Stderr is unbuffered from the subshell.x""" | |
46 | p = Popen(cmd, stdout=PIPE, shell=True) |
|
48 | p = Popen(cmd, stdout=PIPE, shell=True) | |
47 | out = p.communicate()[0] |
|
49 | out = p.communicate()[0] | |
48 | retcode = p.returncode |
|
50 | retcode = p.returncode | |
49 | if retcode: |
|
51 | if retcode: | |
50 | raise CalledProcessError(retcode, cmd) |
|
52 | raise CalledProcessError(retcode, cmd) | |
51 | else: |
|
53 | else: | |
52 | return out.rstrip() |
|
54 | return out.rstrip() | |
53 |
|
55 | |||
54 |
|
56 | |||
55 | def sh3(cmd): |
|
57 | def sh3(cmd): | |
56 | """Execute command in a subshell, return stdout, stderr |
|
58 | """Execute command in a subshell, return stdout, stderr | |
57 |
|
59 | |||
58 | If anything appears in stderr, print it out to sys.stderr""" |
|
60 | If anything appears in stderr, print it out to sys.stderr""" | |
59 | p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) |
|
61 | p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) | |
60 | out, err = p.communicate() |
|
62 | out, err = p.communicate() | |
61 | retcode = p.returncode |
|
63 | retcode = p.returncode | |
62 | if retcode: |
|
64 | if retcode: | |
63 | raise CalledProcessError(retcode, cmd) |
|
65 | raise CalledProcessError(retcode, cmd) | |
64 | else: |
|
66 | else: | |
65 | return out.rstrip(), err.rstrip() |
|
67 | return out.rstrip(), err.rstrip() | |
66 |
|
68 | |||
67 |
|
69 | |||
68 | def init_repo(path): |
|
70 | def init_repo(path): | |
69 | """clone the gh-pages repo if we haven't already.""" |
|
71 | """clone the gh-pages repo if we haven't already.""" | |
70 | sh("git clone %s %s"%(pages_repo, path)) |
|
72 | sh("git clone %s %s"%(pages_repo, path)) | |
71 | here = os.getcwdu() |
|
73 | here = os.getcwdu() | |
72 | cd(path) |
|
74 | cd(path) | |
73 | sh('git checkout gh-pages') |
|
75 | sh('git checkout gh-pages') | |
74 | cd(here) |
|
76 | cd(here) | |
75 |
|
77 | |||
76 | #----------------------------------------------------------------------------- |
|
78 | #----------------------------------------------------------------------------- | |
77 | # Script starts |
|
79 | # Script starts | |
78 | #----------------------------------------------------------------------------- |
|
80 | #----------------------------------------------------------------------------- | |
79 | if __name__ == '__main__': |
|
81 | if __name__ == '__main__': | |
80 | # The tag can be given as a positional argument |
|
82 | # The tag can be given as a positional argument | |
81 | try: |
|
83 | try: | |
82 | tag = sys.argv[1] |
|
84 | tag = sys.argv[1] | |
83 | except IndexError: |
|
85 | except IndexError: | |
84 | tag = "dev" |
|
86 | tag = "dev" | |
85 |
|
87 | |||
86 | startdir = os.getcwdu() |
|
88 | startdir = os.getcwdu() | |
87 | if not os.path.exists(pages_dir): |
|
89 | if not os.path.exists(pages_dir): | |
88 | # init the repo |
|
90 | # init the repo | |
89 | init_repo(pages_dir) |
|
91 | init_repo(pages_dir) | |
90 | else: |
|
92 | else: | |
91 | # ensure up-to-date before operating |
|
93 | # ensure up-to-date before operating | |
92 | cd(pages_dir) |
|
94 | cd(pages_dir) | |
93 | sh('git checkout gh-pages') |
|
95 | sh('git checkout gh-pages') | |
94 | sh('git pull') |
|
96 | sh('git pull') | |
95 | cd(startdir) |
|
97 | cd(startdir) | |
96 |
|
98 | |||
97 | dest = pjoin(pages_dir, tag) |
|
99 | dest = pjoin(pages_dir, tag) | |
98 |
|
100 | |||
99 | # don't `make html` here, because gh-pages already depends on html in Makefile |
|
101 | # don't `make html` here, because gh-pages already depends on html in Makefile | |
100 | # sh('make html') |
|
102 | # sh('make html') | |
101 | if tag != 'dev': |
|
103 | if tag != 'dev': | |
102 | # only build pdf for non-dev targets |
|
104 | # only build pdf for non-dev targets | |
103 | #sh2('make pdf') |
|
105 | #sh2('make pdf') | |
104 | pass |
|
106 | pass | |
105 |
|
107 | |||
106 | # This is pretty unforgiving: we unconditionally nuke the destination |
|
108 | # This is pretty unforgiving: we unconditionally nuke the destination | |
107 | # directory, and then copy the html tree in there |
|
109 | # directory, and then copy the html tree in there | |
108 | shutil.rmtree(dest, ignore_errors=True) |
|
110 | shutil.rmtree(dest, ignore_errors=True) | |
109 | shutil.copytree(html_dir, dest) |
|
111 | shutil.copytree(html_dir, dest) | |
110 | if tag != 'dev': |
|
112 | if tag != 'dev': | |
111 | #shutil.copy(pjoin(pdf_dir, 'ipython.pdf'), pjoin(dest, 'ipython.pdf')) |
|
113 | #shutil.copy(pjoin(pdf_dir, 'ipython.pdf'), pjoin(dest, 'ipython.pdf')) | |
112 | pass |
|
114 | pass | |
113 |
|
115 | |||
114 | try: |
|
116 | try: | |
115 | cd(pages_dir) |
|
117 | cd(pages_dir) | |
116 | branch = sh2('git rev-parse --abbrev-ref HEAD').strip() |
|
118 | branch = sh2('git rev-parse --abbrev-ref HEAD').strip() | |
117 | if branch != 'gh-pages': |
|
119 | if branch != 'gh-pages': | |
118 | e = 'On %r, git branch is %r, MUST be "gh-pages"' % (pages_dir, |
|
120 | e = 'On %r, git branch is %r, MUST be "gh-pages"' % (pages_dir, | |
119 | branch) |
|
121 | branch) | |
120 | raise RuntimeError(e) |
|
122 | raise RuntimeError(e) | |
121 |
|
123 | |||
122 | sh('git add -A %s' % tag) |
|
124 | sh('git add -A %s' % tag) | |
123 | sh('git commit -m"Updated doc release: %s"' % tag) |
|
125 | sh('git commit -m"Updated doc release: %s"' % tag) | |
124 |
|
126 | print() | ||
125 |
print |
|
127 | print('Most recent 3 commits:') | |
126 | sys.stdout.flush() |
|
128 | sys.stdout.flush() | |
127 | sh('git --no-pager log --oneline HEAD~3..') |
|
129 | sh('git --no-pager log --oneline HEAD~3..') | |
128 | finally: |
|
130 | finally: | |
129 | cd(startdir) |
|
131 | cd(startdir) | |
130 |
|
132 | |||
131 |
|
133 | print() | ||
132 |
print |
|
134 | print('Now verify the build in: %r' % dest) | |
133 |
print |
|
135 | print("If everything looks good, 'git push'") |
@@ -1,253 +1,253 | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # |
|
2 | # | |
3 | # IPython documentation build configuration file. |
|
3 | # IPython documentation build configuration file. | |
4 |
|
4 | |||
5 | # NOTE: This file has been edited manually from the auto-generated one from |
|
5 | # NOTE: This file has been edited manually from the auto-generated one from | |
6 | # sphinx. Do NOT delete and re-generate. If any changes from sphinx are |
|
6 | # sphinx. Do NOT delete and re-generate. If any changes from sphinx are | |
7 | # needed, generate a scratch one and merge by hand any new fields needed. |
|
7 | # needed, generate a scratch one and merge by hand any new fields needed. | |
8 |
|
8 | |||
9 | # |
|
9 | # | |
10 | # This file is execfile()d with the current directory set to its containing dir. |
|
10 | # This file is execfile()d with the current directory set to its containing dir. | |
11 | # |
|
11 | # | |
12 | # The contents of this file are pickled, so don't put values in the namespace |
|
12 | # The contents of this file are pickled, so don't put values in the namespace | |
13 | # that aren't pickleable (module imports are okay, they're removed automatically). |
|
13 | # that aren't pickleable (module imports are okay, they're removed automatically). | |
14 | # |
|
14 | # | |
15 | # All configuration values have a default value; values that are commented out |
|
15 | # All configuration values have a default value; values that are commented out | |
16 | # serve to show the default value. |
|
16 | # serve to show the default value. | |
17 |
|
17 | |||
18 | import sys, os |
|
18 | import sys, os | |
19 |
|
19 | |||
20 | ON_RTD = os.environ.get('READTHEDOCS', None) == 'True' |
|
20 | ON_RTD = os.environ.get('READTHEDOCS', None) == 'True' | |
21 |
|
21 | |||
22 | if ON_RTD: |
|
22 | if ON_RTD: | |
23 | # Mock the presence of matplotlib, which we don't have on RTD |
|
23 | # Mock the presence of matplotlib, which we don't have on RTD | |
24 | # see |
|
24 | # see | |
25 | # http://read-the-docs.readthedocs.org/en/latest/faq.html |
|
25 | # http://read-the-docs.readthedocs.org/en/latest/faq.html | |
26 | tags.add('rtd') |
|
26 | tags.add('rtd') | |
27 |
|
27 | |||
28 | # If your extensions are in another directory, add it here. If the directory |
|
28 | # If your extensions are in another directory, add it here. If the directory | |
29 | # is relative to the documentation root, use os.path.abspath to make it |
|
29 | # is relative to the documentation root, use os.path.abspath to make it | |
30 | # absolute, like shown here. |
|
30 | # absolute, like shown here. | |
31 | sys.path.insert(0, os.path.abspath('../sphinxext')) |
|
31 | sys.path.insert(0, os.path.abspath('../sphinxext')) | |
32 |
|
32 | |||
33 | # We load the ipython release info into a dict by explicit execution |
|
33 | # We load the ipython release info into a dict by explicit execution | |
34 | iprelease = {} |
|
34 | iprelease = {} | |
35 | execfile('../../IPython/core/release.py',iprelease) |
|
35 | execfile('../../IPython/core/release.py',iprelease) | |
36 |
|
36 | |||
37 | # General configuration |
|
37 | # General configuration | |
38 | # --------------------- |
|
38 | # --------------------- | |
39 |
|
39 | |||
40 | # Add any Sphinx extension module names here, as strings. They can be extensions |
|
40 | # Add any Sphinx extension module names here, as strings. They can be extensions | |
41 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. |
|
41 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | |
42 | extensions = [ |
|
42 | extensions = [ | |
43 | 'matplotlib.sphinxext.mathmpl', |
|
43 | 'matplotlib.sphinxext.mathmpl', | |
44 | 'matplotlib.sphinxext.only_directives', |
|
44 | 'matplotlib.sphinxext.only_directives', | |
45 | 'matplotlib.sphinxext.plot_directive', |
|
45 | 'matplotlib.sphinxext.plot_directive', | |
46 | 'sphinx.ext.autodoc', |
|
46 | 'sphinx.ext.autodoc', | |
47 | 'sphinx.ext.autosummary', |
|
47 | 'sphinx.ext.autosummary', | |
48 | 'sphinx.ext.doctest', |
|
48 | 'sphinx.ext.doctest', | |
49 | 'sphinx.ext.inheritance_diagram', |
|
49 | 'sphinx.ext.inheritance_diagram', | |
50 | 'sphinx.ext.intersphinx', |
|
50 | 'sphinx.ext.intersphinx', | |
51 | 'IPython.sphinxext.ipython_console_highlighting', |
|
51 | 'IPython.sphinxext.ipython_console_highlighting', | |
52 | 'IPython.sphinxext.ipython_directive', |
|
52 | 'IPython.sphinxext.ipython_directive', | |
53 | 'numpydoc', # to preprocess docstrings |
|
53 | 'numpydoc', # to preprocess docstrings | |
54 | 'github', # for easy GitHub links |
|
54 | 'github', # for easy GitHub links | |
55 | 'magics', |
|
55 | 'magics', | |
56 | ] |
|
56 | ] | |
57 |
|
57 | |||
58 | if ON_RTD: |
|
58 | if ON_RTD: | |
59 | # Remove extensions not currently supported on RTD |
|
59 | # Remove extensions not currently supported on RTD | |
60 | extensions.remove('matplotlib.sphinxext.only_directives') |
|
60 | extensions.remove('matplotlib.sphinxext.only_directives') | |
61 | extensions.remove('matplotlib.sphinxext.mathmpl') |
|
61 | extensions.remove('matplotlib.sphinxext.mathmpl') | |
62 | extensions.remove('matplotlib.sphinxext.plot_directive') |
|
62 | extensions.remove('matplotlib.sphinxext.plot_directive') | |
63 | extensions.remove('IPython.sphinxext.ipython_directive') |
|
63 | extensions.remove('IPython.sphinxext.ipython_directive') | |
64 | extensions.remove('IPython.sphinxext.ipython_console_highlighting') |
|
64 | extensions.remove('IPython.sphinxext.ipython_console_highlighting') | |
65 |
|
65 | |||
66 | # Add any paths that contain templates here, relative to this directory. |
|
66 | # Add any paths that contain templates here, relative to this directory. | |
67 | templates_path = ['_templates'] |
|
67 | templates_path = ['_templates'] | |
68 |
|
68 | |||
69 | # The suffix of source filenames. |
|
69 | # The suffix of source filenames. | |
70 | source_suffix = '.rst' |
|
70 | source_suffix = '.rst' | |
71 |
|
71 | |||
72 | if iprelease['_version_extra'] == 'dev': |
|
72 | if iprelease['_version_extra'] == 'dev': | |
73 | rst_prolog = """ |
|
73 | rst_prolog = """ | |
74 | .. note:: |
|
74 | .. note:: | |
75 |
|
75 | |||
76 | This documentation is for a development version of IPython. There may be |
|
76 | This documentation is for a development version of IPython. There may be | |
77 | significant differences from the latest stable release. |
|
77 | significant differences from the latest stable release. | |
78 |
|
78 | |||
79 | """ |
|
79 | """ | |
80 |
|
80 | |||
81 | # The master toctree document. |
|
81 | # The master toctree document. | |
82 | master_doc = 'index' |
|
82 | master_doc = 'index' | |
83 |
|
83 | |||
84 | # General substitutions. |
|
84 | # General substitutions. | |
85 | project = 'IPython' |
|
85 | project = 'IPython' | |
86 | copyright = 'The IPython Development Team' |
|
86 | copyright = 'The IPython Development Team' | |
87 |
|
87 | |||
88 | # ghissue config |
|
88 | # ghissue config | |
89 | github_project_url = "https://github.com/ipython/ipython" |
|
89 | github_project_url = "https://github.com/ipython/ipython" | |
90 |
|
90 | |||
91 | # numpydoc config |
|
91 | # numpydoc config | |
92 | numpydoc_show_class_members = False # Otherwise Sphinx emits thousands of warnings |
|
92 | numpydoc_show_class_members = False # Otherwise Sphinx emits thousands of warnings | |
93 | numpydoc_class_members_toctree = False |
|
93 | numpydoc_class_members_toctree = False | |
94 |
|
94 | |||
95 | # The default replacements for |version| and |release|, also used in various |
|
95 | # The default replacements for |version| and |release|, also used in various | |
96 | # other places throughout the built documents. |
|
96 | # other places throughout the built documents. | |
97 | # |
|
97 | # | |
98 | # The full version, including alpha/beta/rc tags. |
|
98 | # The full version, including alpha/beta/rc tags. | |
99 | release = "%s" % iprelease['version'] |
|
99 | release = "%s" % iprelease['version'] | |
100 | # Just the X.Y.Z part, no '-dev' |
|
100 | # Just the X.Y.Z part, no '-dev' | |
101 | version = iprelease['version'].split('-', 1)[0] |
|
101 | version = iprelease['version'].split('-', 1)[0] | |
102 |
|
102 | |||
103 |
|
103 | |||
104 | # There are two options for replacing |today|: either, you set today to some |
|
104 | # There are two options for replacing |today|: either, you set today to some | |
105 | # non-false value, then it is used: |
|
105 | # non-false value, then it is used: | |
106 | #today = '' |
|
106 | #today = '' | |
107 | # Else, today_fmt is used as the format for a strftime call. |
|
107 | # Else, today_fmt is used as the format for a strftime call. | |
108 | today_fmt = '%B %d, %Y' |
|
108 | today_fmt = '%B %d, %Y' | |
109 |
|
109 | |||
110 | # List of documents that shouldn't be included in the build. |
|
110 | # List of documents that shouldn't be included in the build. | |
111 | #unused_docs = [] |
|
111 | #unused_docs = [] | |
112 |
|
112 | |||
113 | # Exclude these glob-style patterns when looking for source files. They are |
|
113 | # Exclude these glob-style patterns when looking for source files. They are | |
114 | # relative to the source/ directory. |
|
114 | # relative to the source/ directory. | |
115 | exclude_patterns = ['whatsnew/pr'] |
|
115 | exclude_patterns = ['whatsnew/pr'] | |
116 |
|
116 | |||
117 |
|
117 | |||
118 | # If true, '()' will be appended to :func: etc. cross-reference text. |
|
118 | # If true, '()' will be appended to :func: etc. cross-reference text. | |
119 | #add_function_parentheses = True |
|
119 | #add_function_parentheses = True | |
120 |
|
120 | |||
121 | # If true, the current module name will be prepended to all description |
|
121 | # If true, the current module name will be prepended to all description | |
122 | # unit titles (such as .. function::). |
|
122 | # unit titles (such as .. function::). | |
123 | #add_module_names = True |
|
123 | #add_module_names = True | |
124 |
|
124 | |||
125 | # If true, sectionauthor and moduleauthor directives will be shown in the |
|
125 | # If true, sectionauthor and moduleauthor directives will be shown in the | |
126 | # output. They are ignored by default. |
|
126 | # output. They are ignored by default. | |
127 | #show_authors = False |
|
127 | #show_authors = False | |
128 |
|
128 | |||
129 | # The name of the Pygments (syntax highlighting) style to use. |
|
129 | # The name of the Pygments (syntax highlighting) style to use. | |
130 | pygments_style = 'sphinx' |
|
130 | pygments_style = 'sphinx' | |
131 |
|
131 | |||
132 | # Set the default role so we can use `foo` instead of ``foo`` |
|
132 | # Set the default role so we can use `foo` instead of ``foo`` | |
133 | default_role = 'literal' |
|
133 | default_role = 'literal' | |
134 |
|
134 | |||
135 | # Options for HTML output |
|
135 | # Options for HTML output | |
136 | # ----------------------- |
|
136 | # ----------------------- | |
137 |
|
137 | |||
138 | # The style sheet to use for HTML and HTML Help pages. A file of that name |
|
138 | # The style sheet to use for HTML and HTML Help pages. A file of that name | |
139 | # must exist either in Sphinx' static/ path, or in one of the custom paths |
|
139 | # must exist either in Sphinx' static/ path, or in one of the custom paths | |
140 | # given in html_static_path. |
|
140 | # given in html_static_path. | |
141 | html_style = 'default.css' |
|
141 | html_style = 'default.css' | |
142 |
|
142 | |||
143 | # The name for this set of Sphinx documents. If None, it defaults to |
|
143 | # The name for this set of Sphinx documents. If None, it defaults to | |
144 | # "<project> v<release> documentation". |
|
144 | # "<project> v<release> documentation". | |
145 | #html_title = None |
|
145 | #html_title = None | |
146 |
|
146 | |||
147 | # The name of an image file (within the static path) to place at the top of |
|
147 | # The name of an image file (within the static path) to place at the top of | |
148 | # the sidebar. |
|
148 | # the sidebar. | |
149 | #html_logo = None |
|
149 | #html_logo = None | |
150 |
|
150 | |||
151 | # Add any paths that contain custom static files (such as style sheets) here, |
|
151 | # Add any paths that contain custom static files (such as style sheets) here, | |
152 | # relative to this directory. They are copied after the builtin static files, |
|
152 | # relative to this directory. They are copied after the builtin static files, | |
153 | # so a file named "default.css" will overwrite the builtin "default.css". |
|
153 | # so a file named "default.css" will overwrite the builtin "default.css". | |
154 | html_static_path = ['_static'] |
|
154 | html_static_path = ['_static'] | |
155 |
|
155 | |||
156 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, |
|
156 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | |
157 | # using the given strftime format. |
|
157 | # using the given strftime format. | |
158 | html_last_updated_fmt = '%b %d, %Y' |
|
158 | html_last_updated_fmt = '%b %d, %Y' | |
159 |
|
159 | |||
160 | # If true, SmartyPants will be used to convert quotes and dashes to |
|
160 | # If true, SmartyPants will be used to convert quotes and dashes to | |
161 | # typographically correct entities. |
|
161 | # typographically correct entities. | |
162 | #html_use_smartypants = True |
|
162 | #html_use_smartypants = True | |
163 |
|
163 | |||
164 | # Custom sidebar templates, maps document names to template names. |
|
164 | # Custom sidebar templates, maps document names to template names. | |
165 | #html_sidebars = {} |
|
165 | #html_sidebars = {} | |
166 |
|
166 | |||
167 | # Additional templates that should be rendered to pages, maps page names to |
|
167 | # Additional templates that should be rendered to pages, maps page names to | |
168 | # template names. |
|
168 | # template names. | |
169 | html_additional_pages = { |
|
169 | html_additional_pages = { | |
170 | 'interactive/htmlnotebook': 'notebook_redirect.html', |
|
170 | 'interactive/htmlnotebook': 'notebook_redirect.html', | |
171 | 'interactive/notebook': 'notebook_redirect.html', |
|
171 | 'interactive/notebook': 'notebook_redirect.html', | |
172 | 'interactive/nbconvert': 'notebook_redirect.html', |
|
172 | 'interactive/nbconvert': 'notebook_redirect.html', | |
173 | 'interactive/public_server': 'notebook_redirect.html', |
|
173 | 'interactive/public_server': 'notebook_redirect.html', | |
174 | } |
|
174 | } | |
175 |
|
175 | |||
176 | # If false, no module index is generated. |
|
176 | # If false, no module index is generated. | |
177 | #html_use_modindex = True |
|
177 | #html_use_modindex = True | |
178 |
|
178 | |||
179 | # If true, the reST sources are included in the HTML build as _sources/<name>. |
|
179 | # If true, the reST sources are included in the HTML build as _sources/<name>. | |
180 | #html_copy_source = True |
|
180 | #html_copy_source = True | |
181 |
|
181 | |||
182 | # If true, an OpenSearch description file will be output, and all pages will |
|
182 | # If true, an OpenSearch description file will be output, and all pages will | |
183 | # contain a <link> tag referring to it. The value of this option must be the |
|
183 | # contain a <link> tag referring to it. The value of this option must be the | |
184 | # base URL from which the finished HTML is served. |
|
184 | # base URL from which the finished HTML is served. | |
185 | #html_use_opensearch = '' |
|
185 | #html_use_opensearch = '' | |
186 |
|
186 | |||
187 | # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). |
|
187 | # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). | |
188 | #html_file_suffix = '' |
|
188 | #html_file_suffix = '' | |
189 |
|
189 | |||
190 | # Output file base name for HTML help builder. |
|
190 | # Output file base name for HTML help builder. | |
191 | htmlhelp_basename = 'ipythondoc' |
|
191 | htmlhelp_basename = 'ipythondoc' | |
192 |
|
192 | |||
193 | intersphinx_mapping = {'python': ('http://docs.python.org/2/', None), |
|
193 | intersphinx_mapping = {'python': ('http://docs.python.org/2/', None), | |
194 | 'rpy2': ('http://rpy.sourceforge.net/rpy2/doc-2.4/html/', None)} |
|
194 | 'rpy2': ('http://rpy.sourceforge.net/rpy2/doc-2.4/html/', None)} | |
195 |
|
195 | |||
196 | # Options for LaTeX output |
|
196 | # Options for LaTeX output | |
197 | # ------------------------ |
|
197 | # ------------------------ | |
198 |
|
198 | |||
199 | # The paper size ('letter' or 'a4'). |
|
199 | # The paper size ('letter' or 'a4'). | |
200 | latex_paper_size = 'letter' |
|
200 | latex_paper_size = 'letter' | |
201 |
|
201 | |||
202 | # The font size ('10pt', '11pt' or '12pt'). |
|
202 | # The font size ('10pt', '11pt' or '12pt'). | |
203 | latex_font_size = '11pt' |
|
203 | latex_font_size = '11pt' | |
204 |
|
204 | |||
205 | # Grouping the document tree into LaTeX files. List of tuples |
|
205 | # Grouping the document tree into LaTeX files. List of tuples | |
206 | # (source start file, target name, title, author, document class [howto/manual]). |
|
206 | # (source start file, target name, title, author, document class [howto/manual]). | |
207 |
|
207 | |||
208 | latex_documents = [ |
|
208 | latex_documents = [ | |
209 | ('index', 'ipython.tex', 'IPython Documentation', |
|
209 | ('index', 'ipython.tex', 'IPython Documentation', | |
210 |
u |
|
210 | u"""The IPython Development Team""", 'manual', True), | |
211 | ('parallel/winhpc_index', 'winhpc_whitepaper.tex', |
|
211 | ('parallel/winhpc_index', 'winhpc_whitepaper.tex', | |
212 | 'Using IPython on Windows HPC Server 2008', |
|
212 | 'Using IPython on Windows HPC Server 2008', | |
213 |
u |
|
213 | u"Brian E. Granger", 'manual', True) | |
214 | ] |
|
214 | ] | |
215 |
|
215 | |||
216 | # The name of an image file (relative to this directory) to place at the top of |
|
216 | # The name of an image file (relative to this directory) to place at the top of | |
217 | # the title page. |
|
217 | # the title page. | |
218 | #latex_logo = None |
|
218 | #latex_logo = None | |
219 |
|
219 | |||
220 | # For "manual" documents, if this is true, then toplevel headings are parts, |
|
220 | # For "manual" documents, if this is true, then toplevel headings are parts, | |
221 | # not chapters. |
|
221 | # not chapters. | |
222 | #latex_use_parts = False |
|
222 | #latex_use_parts = False | |
223 |
|
223 | |||
224 | # Additional stuff for the LaTeX preamble. |
|
224 | # Additional stuff for the LaTeX preamble. | |
225 | #latex_preamble = '' |
|
225 | #latex_preamble = '' | |
226 |
|
226 | |||
227 | # Documents to append as an appendix to all manuals. |
|
227 | # Documents to append as an appendix to all manuals. | |
228 | #latex_appendices = [] |
|
228 | #latex_appendices = [] | |
229 |
|
229 | |||
230 | # If false, no module index is generated. |
|
230 | # If false, no module index is generated. | |
231 | latex_use_modindex = True |
|
231 | latex_use_modindex = True | |
232 |
|
232 | |||
233 |
|
233 | |||
234 | # Options for texinfo output |
|
234 | # Options for texinfo output | |
235 | # -------------------------- |
|
235 | # -------------------------- | |
236 |
|
236 | |||
237 | texinfo_documents = [ |
|
237 | texinfo_documents = [ | |
238 | (master_doc, 'ipython', 'IPython Documentation', |
|
238 | (master_doc, 'ipython', 'IPython Documentation', | |
239 | 'The IPython Development Team', |
|
239 | 'The IPython Development Team', | |
240 | 'IPython', |
|
240 | 'IPython', | |
241 | 'IPython Documentation', |
|
241 | 'IPython Documentation', | |
242 | 'Programming', |
|
242 | 'Programming', | |
243 | 1), |
|
243 | 1), | |
244 | ] |
|
244 | ] | |
245 |
|
245 | |||
246 | modindex_common_prefix = ['IPython.'] |
|
246 | modindex_common_prefix = ['IPython.'] | |
247 |
|
247 | |||
248 |
|
248 | |||
249 | # Cleanup |
|
249 | # Cleanup | |
250 | # ------- |
|
250 | # ------- | |
251 | # delete release info to avoid pickling errors from sphinx |
|
251 | # delete release info to avoid pickling errors from sphinx | |
252 |
|
252 | |||
253 | del iprelease |
|
253 | del iprelease |
@@ -1,87 +1,88 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | """ |
|
2 | """ | |
3 | Script for setting up and using [all]reduce with a binary-tree engine interconnect. |
|
3 | Script for setting up and using [all]reduce with a binary-tree engine interconnect. | |
4 |
|
4 | |||
5 | usage: `python bintree_script.py` |
|
5 | usage: `python bintree_script.py` | |
6 |
|
6 | |||
7 | This spanning tree strategy ensures that a single node node mailbox will never |
|
7 | This spanning tree strategy ensures that a single node node mailbox will never | |
8 | receive more that 2 messages at once. This is very important to scale to large |
|
8 | receive more that 2 messages at once. This is very important to scale to large | |
9 | clusters (e.g. 1000 nodes) since if you have many incoming messages of a couple |
|
9 | clusters (e.g. 1000 nodes) since if you have many incoming messages of a couple | |
10 | of megabytes you might saturate the network interface of a single node and |
|
10 | of megabytes you might saturate the network interface of a single node and | |
11 | potentially its memory buffers if the messages are not consumed in a streamed |
|
11 | potentially its memory buffers if the messages are not consumed in a streamed | |
12 | manner. |
|
12 | manner. | |
13 |
|
13 | |||
14 | Note that the AllReduce scheme implemented with the spanning tree strategy |
|
14 | Note that the AllReduce scheme implemented with the spanning tree strategy | |
15 | impose the aggregation function to be commutative and distributive. It might |
|
15 | impose the aggregation function to be commutative and distributive. It might | |
16 | not be the case if you implement the naive gather / reduce / broadcast strategy |
|
16 | not be the case if you implement the naive gather / reduce / broadcast strategy | |
17 | where you can reorder the partial data before performing the reduce. |
|
17 | where you can reorder the partial data before performing the reduce. | |
18 | """ |
|
18 | """ | |
|
19 | from __future__ import print_function | |||
19 |
|
20 | |||
20 | from IPython.parallel import Client, Reference |
|
21 | from IPython.parallel import Client, Reference | |
21 |
|
22 | |||
22 |
|
23 | |||
23 | # connect client and create views |
|
24 | # connect client and create views | |
24 | rc = Client() |
|
25 | rc = Client() | |
25 | rc.block=True |
|
26 | rc.block=True | |
26 | ids = rc.ids |
|
27 | ids = rc.ids | |
27 |
|
28 | |||
28 | root_id = ids[0] |
|
29 | root_id = ids[0] | |
29 | root = rc[root_id] |
|
30 | root = rc[root_id] | |
30 |
|
31 | |||
31 | view = rc[:] |
|
32 | view = rc[:] | |
32 |
|
33 | |||
33 | # run bintree.py script defining bintree functions, etc. |
|
34 | # run bintree.py script defining bintree functions, etc. | |
34 | execfile('bintree.py') |
|
35 | exec(compile(open('bintree.py').read(), 'bintree.py', 'exec')) | |
35 |
|
36 | |||
36 | # generate binary tree of parents |
|
37 | # generate binary tree of parents | |
37 | btree = bintree(ids) |
|
38 | btree = bintree(ids) | |
38 |
|
39 | |||
39 |
print |
|
40 | print("setting up binary tree interconnect:") | |
40 | print_bintree(btree) |
|
41 | print_bintree(btree) | |
41 |
|
42 | |||
42 | view.run('bintree.py') |
|
43 | view.run('bintree.py') | |
43 | view.scatter('id', ids, flatten=True) |
|
44 | view.scatter('id', ids, flatten=True) | |
44 | view['root_id'] = root_id |
|
45 | view['root_id'] = root_id | |
45 |
|
46 | |||
46 | # create the Communicator objects on the engines |
|
47 | # create the Communicator objects on the engines | |
47 | view.execute('com = BinaryTreeCommunicator(id, root = id==root_id )') |
|
48 | view.execute('com = BinaryTreeCommunicator(id, root = id==root_id )') | |
48 | pub_url = root.apply_sync(lambda : com.pub_url) |
|
49 | pub_url = root.apply_sync(lambda : com.pub_url) | |
49 |
|
50 | |||
50 | # gather the connection information into a dict |
|
51 | # gather the connection information into a dict | |
51 | ar = view.apply_async(lambda : com.info) |
|
52 | ar = view.apply_async(lambda : com.info) | |
52 | peers = ar.get_dict() |
|
53 | peers = ar.get_dict() | |
53 | # this is a dict, keyed by engine ID, of the connection info for the EngineCommunicators |
|
54 | # this is a dict, keyed by engine ID, of the connection info for the EngineCommunicators | |
54 |
|
55 | |||
55 | # connect the engines to each other: |
|
56 | # connect the engines to each other: | |
56 | def connect(com, peers, tree, pub_url, root_id): |
|
57 | def connect(com, peers, tree, pub_url, root_id): | |
57 | """this function will be called on the engines""" |
|
58 | """this function will be called on the engines""" | |
58 | com.connect(peers, tree, pub_url, root_id) |
|
59 | com.connect(peers, tree, pub_url, root_id) | |
59 |
|
60 | |||
60 | view.apply_sync(connect, Reference('com'), peers, btree, pub_url, root_id) |
|
61 | view.apply_sync(connect, Reference('com'), peers, btree, pub_url, root_id) | |
61 |
|
62 | |||
62 | # functions that can be used for reductions |
|
63 | # functions that can be used for reductions | |
63 | # max and min builtins can be used as well |
|
64 | # max and min builtins can be used as well | |
64 | def add(a,b): |
|
65 | def add(a,b): | |
65 | """cumulative sum reduction""" |
|
66 | """cumulative sum reduction""" | |
66 | return a+b |
|
67 | return a+b | |
67 |
|
68 | |||
68 | def mul(a,b): |
|
69 | def mul(a,b): | |
69 | """cumulative product reduction""" |
|
70 | """cumulative product reduction""" | |
70 | return a*b |
|
71 | return a*b | |
71 |
|
72 | |||
72 | view['add'] = add |
|
73 | view['add'] = add | |
73 | view['mul'] = mul |
|
74 | view['mul'] = mul | |
74 |
|
75 | |||
75 | # scatter some data |
|
76 | # scatter some data | |
76 | data = range(1000) |
|
77 | data = list(range(1000)) | |
77 | view.scatter('data', data) |
|
78 | view.scatter('data', data) | |
78 |
|
79 | |||
79 | # perform cumulative sum via allreduce |
|
80 | # perform cumulative sum via allreduce | |
80 | view.execute("data_sum = com.allreduce(add, data, flat=False)") |
|
81 | view.execute("data_sum = com.allreduce(add, data, flat=False)") | |
81 |
print |
|
82 | print("allreduce sum of data on all engines:", view['data_sum']) | |
82 |
|
83 | |||
83 | # perform cumulative sum *without* final broadcast |
|
84 | # perform cumulative sum *without* final broadcast | |
84 | # when not broadcasting with allreduce, the final result resides on the root node: |
|
85 | # when not broadcasting with allreduce, the final result resides on the root node: | |
85 | view.execute("ids_sum = com.reduce(add, id, flat=True)") |
|
86 | view.execute("ids_sum = com.reduce(add, id, flat=True)") | |
86 |
print |
|
87 | print("reduce sum of engine ids (not broadcast):", root['ids_sum']) | |
87 |
print |
|
88 | print("partial result on each engine:", view['ids_sum']) |
@@ -1,205 +1,205 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | """ |
|
2 | """ | |
3 | A simple python program of solving a 2D wave equation in parallel. |
|
3 | A simple python program of solving a 2D wave equation in parallel. | |
4 | Domain partitioning and inter-processor communication |
|
4 | Domain partitioning and inter-processor communication | |
5 | are done by an object of class MPIRectPartitioner2D |
|
5 | are done by an object of class MPIRectPartitioner2D | |
6 | (which is a subclass of RectPartitioner2D and uses MPI via mpi4py) |
|
6 | (which is a subclass of RectPartitioner2D and uses MPI via mpi4py) | |
7 |
|
7 | |||
8 | An example of running the program is (8 processors, 4x2 partition, |
|
8 | An example of running the program is (8 processors, 4x2 partition, | |
9 | 400x100 grid cells):: |
|
9 | 400x100 grid cells):: | |
10 |
|
10 | |||
11 | $ ipcluster start --engines=MPIExec -n 8 # start 8 engines with mpiexec |
|
11 | $ ipcluster start --engines=MPIExec -n 8 # start 8 engines with mpiexec | |
12 | $ python parallelwave-mpi.py --grid 400 100 --partition 4 2 |
|
12 | $ python parallelwave-mpi.py --grid 400 100 --partition 4 2 | |
13 |
|
13 | |||
14 | See also parallelwave-mpi, which runs the same program, but uses MPI |
|
14 | See also parallelwave-mpi, which runs the same program, but uses MPI | |
15 | (via mpi4py) for the inter-engine communication. |
|
15 | (via mpi4py) for the inter-engine communication. | |
16 |
|
16 | |||
17 | Authors |
|
17 | Authors | |
18 | ------- |
|
18 | ------- | |
19 |
|
19 | |||
20 | * Xing Cai |
|
20 | * Xing Cai | |
21 | * Min Ragan-Kelley |
|
21 | * Min Ragan-Kelley | |
22 |
|
22 | |||
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import sys |
|
25 | import sys | |
26 | import time |
|
26 | import time | |
27 |
|
27 | |||
28 | from numpy import exp, zeros, newaxis, sqrt |
|
28 | from numpy import exp, zeros, newaxis, sqrt | |
29 |
|
29 | |||
30 | from IPython.external import argparse |
|
30 | from IPython.external import argparse | |
31 | from IPython.parallel import Client, Reference |
|
31 | from IPython.parallel import Client, Reference | |
32 |
|
32 | |||
33 | def setup_partitioner(index, num_procs, gnum_cells, parts): |
|
33 | def setup_partitioner(index, num_procs, gnum_cells, parts): | |
34 | """create a partitioner in the engine namespace""" |
|
34 | """create a partitioner in the engine namespace""" | |
35 | global partitioner |
|
35 | global partitioner | |
36 | p = MPIRectPartitioner2D(my_id=index, num_procs=num_procs) |
|
36 | p = MPIRectPartitioner2D(my_id=index, num_procs=num_procs) | |
37 | p.redim(global_num_cells=gnum_cells, num_parts=parts) |
|
37 | p.redim(global_num_cells=gnum_cells, num_parts=parts) | |
38 | p.prepare_communication() |
|
38 | p.prepare_communication() | |
39 | # put the partitioner into the global namespace: |
|
39 | # put the partitioner into the global namespace: | |
40 | partitioner=p |
|
40 | partitioner=p | |
41 |
|
41 | |||
42 | def setup_solver(*args, **kwargs): |
|
42 | def setup_solver(*args, **kwargs): | |
43 | """create a WaveSolver in the engine namespace""" |
|
43 | """create a WaveSolver in the engine namespace""" | |
44 | global solver |
|
44 | global solver | |
45 | solver = WaveSolver(*args, **kwargs) |
|
45 | solver = WaveSolver(*args, **kwargs) | |
46 |
|
46 | |||
47 | def wave_saver(u, x, y, t): |
|
47 | def wave_saver(u, x, y, t): | |
48 | """save the wave log""" |
|
48 | """save the wave log""" | |
49 | global u_hist |
|
49 | global u_hist | |
50 | global t_hist |
|
50 | global t_hist | |
51 | t_hist.append(t) |
|
51 | t_hist.append(t) | |
52 | u_hist.append(1.0*u) |
|
52 | u_hist.append(1.0*u) | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | # main program: |
|
55 | # main program: | |
56 | if __name__ == '__main__': |
|
56 | if __name__ == '__main__': | |
57 |
|
57 | |||
58 | parser = argparse.ArgumentParser() |
|
58 | parser = argparse.ArgumentParser() | |
59 | paa = parser.add_argument |
|
59 | paa = parser.add_argument | |
60 | paa('--grid', '-g', |
|
60 | paa('--grid', '-g', | |
61 | type=int, nargs=2, default=[100,100], dest='grid', |
|
61 | type=int, nargs=2, default=[100,100], dest='grid', | |
62 | help="Cells in the grid, e.g. --grid 100 200") |
|
62 | help="Cells in the grid, e.g. --grid 100 200") | |
63 | paa('--partition', '-p', |
|
63 | paa('--partition', '-p', | |
64 | type=int, nargs=2, default=None, |
|
64 | type=int, nargs=2, default=None, | |
65 | help="Process partition grid, e.g. --partition 4 2 for 4x2") |
|
65 | help="Process partition grid, e.g. --partition 4 2 for 4x2") | |
66 | paa('-c', |
|
66 | paa('-c', | |
67 | type=float, default=1., |
|
67 | type=float, default=1., | |
68 | help="Wave speed (I think)") |
|
68 | help="Wave speed (I think)") | |
69 | paa('-Ly', |
|
69 | paa('-Ly', | |
70 | type=float, default=1., |
|
70 | type=float, default=1., | |
71 | help="system size (in y)") |
|
71 | help="system size (in y)") | |
72 | paa('-Lx', |
|
72 | paa('-Lx', | |
73 | type=float, default=1., |
|
73 | type=float, default=1., | |
74 | help="system size (in x)") |
|
74 | help="system size (in x)") | |
75 | paa('-t', '--tstop', |
|
75 | paa('-t', '--tstop', | |
76 | type=float, default=1., |
|
76 | type=float, default=1., | |
77 | help="Time units to run") |
|
77 | help="Time units to run") | |
78 | paa('--profile', |
|
78 | paa('--profile', | |
79 | type=unicode, default=u'default', |
|
79 | type=unicode, default=u'default', | |
80 | help="Specify the ipcluster profile for the client to connect to.") |
|
80 | help="Specify the ipcluster profile for the client to connect to.") | |
81 | paa('--save', |
|
81 | paa('--save', | |
82 | action='store_true', |
|
82 | action='store_true', | |
83 | help="Add this flag to save the time/wave history during the run.") |
|
83 | help="Add this flag to save the time/wave history during the run.") | |
84 | paa('--scalar', |
|
84 | paa('--scalar', | |
85 | action='store_true', |
|
85 | action='store_true', | |
86 | help="Also run with scalar interior implementation, to see vector speedup.") |
|
86 | help="Also run with scalar interior implementation, to see vector speedup.") | |
87 |
|
87 | |||
88 | ns = parser.parse_args() |
|
88 | ns = parser.parse_args() | |
89 | # set up arguments |
|
89 | # set up arguments | |
90 | grid = ns.grid |
|
90 | grid = ns.grid | |
91 | partition = ns.partition |
|
91 | partition = ns.partition | |
92 | Lx = ns.Lx |
|
92 | Lx = ns.Lx | |
93 | Ly = ns.Ly |
|
93 | Ly = ns.Ly | |
94 | c = ns.c |
|
94 | c = ns.c | |
95 | tstop = ns.tstop |
|
95 | tstop = ns.tstop | |
96 | if ns.save: |
|
96 | if ns.save: | |
97 | user_action = wave_saver |
|
97 | user_action = wave_saver | |
98 | else: |
|
98 | else: | |
99 | user_action = None |
|
99 | user_action = None | |
100 |
|
100 | |||
101 | num_cells = 1.0*(grid[0]-1)*(grid[1]-1) |
|
101 | num_cells = 1.0*(grid[0]-1)*(grid[1]-1) | |
102 | final_test = True |
|
102 | final_test = True | |
103 |
|
103 | |||
104 | # create the Client |
|
104 | # create the Client | |
105 | rc = Client(profile=ns.profile) |
|
105 | rc = Client(profile=ns.profile) | |
106 | num_procs = len(rc.ids) |
|
106 | num_procs = len(rc.ids) | |
107 |
|
107 | |||
108 | if partition is None: |
|
108 | if partition is None: | |
109 | partition = [1,num_procs] |
|
109 | partition = [1,num_procs] | |
110 |
|
110 | |||
111 | assert partition[0]*partition[1] == num_procs, "can't map partition %s to %i engines"%(partition, num_procs) |
|
111 | assert partition[0]*partition[1] == num_procs, "can't map partition %s to %i engines"%(partition, num_procs) | |
112 |
|
112 | |||
113 | view = rc[:] |
|
113 | view = rc[:] | |
114 |
print |
|
114 | print("Running %s system on %s processes until %f" % (grid, partition, tstop)) | |
115 |
|
115 | |||
116 | # functions defining initial/boundary/source conditions |
|
116 | # functions defining initial/boundary/source conditions | |
117 | def I(x,y): |
|
117 | def I(x,y): | |
118 | from numpy import exp |
|
118 | from numpy import exp | |
119 | return 1.5*exp(-100*((x-0.5)**2+(y-0.5)**2)) |
|
119 | return 1.5*exp(-100*((x-0.5)**2+(y-0.5)**2)) | |
120 | def f(x,y,t): |
|
120 | def f(x,y,t): | |
121 | return 0.0 |
|
121 | return 0.0 | |
122 | # from numpy import exp,sin |
|
122 | # from numpy import exp,sin | |
123 | # return 10*exp(-(x - sin(100*t))**2) |
|
123 | # return 10*exp(-(x - sin(100*t))**2) | |
124 | def bc(x,y,t): |
|
124 | def bc(x,y,t): | |
125 | return 0.0 |
|
125 | return 0.0 | |
126 |
|
126 | |||
127 | # initial imports, setup rank |
|
127 | # initial imports, setup rank | |
128 | view.execute('\n'.join([ |
|
128 | view.execute('\n'.join([ | |
129 | "from mpi4py import MPI", |
|
129 | "from mpi4py import MPI", | |
130 | "import numpy", |
|
130 | "import numpy", | |
131 | "mpi = MPI.COMM_WORLD", |
|
131 | "mpi = MPI.COMM_WORLD", | |
132 | "my_id = MPI.COMM_WORLD.Get_rank()"]), block=True) |
|
132 | "my_id = MPI.COMM_WORLD.Get_rank()"]), block=True) | |
133 |
|
133 | |||
134 | # initialize t_hist/u_hist for saving the state at each step (optional) |
|
134 | # initialize t_hist/u_hist for saving the state at each step (optional) | |
135 | view['t_hist'] = [] |
|
135 | view['t_hist'] = [] | |
136 | view['u_hist'] = [] |
|
136 | view['u_hist'] = [] | |
137 |
|
137 | |||
138 | # set vector/scalar implementation details |
|
138 | # set vector/scalar implementation details | |
139 | impl = {} |
|
139 | impl = {} | |
140 | impl['ic'] = 'vectorized' |
|
140 | impl['ic'] = 'vectorized' | |
141 | impl['inner'] = 'scalar' |
|
141 | impl['inner'] = 'scalar' | |
142 | impl['bc'] = 'vectorized' |
|
142 | impl['bc'] = 'vectorized' | |
143 |
|
143 | |||
144 | # execute some files so that the classes we need will be defined on the engines: |
|
144 | # execute some files so that the classes we need will be defined on the engines: | |
145 | view.run('RectPartitioner.py') |
|
145 | view.run('RectPartitioner.py') | |
146 | view.run('wavesolver.py') |
|
146 | view.run('wavesolver.py') | |
147 |
|
147 | |||
148 | # setup remote partitioner |
|
148 | # setup remote partitioner | |
149 | # note that Reference means that the argument passed to setup_partitioner will be the |
|
149 | # note that Reference means that the argument passed to setup_partitioner will be the | |
150 | # object named 'my_id' in the engine's namespace |
|
150 | # object named 'my_id' in the engine's namespace | |
151 | view.apply_sync(setup_partitioner, Reference('my_id'), num_procs, grid, partition) |
|
151 | view.apply_sync(setup_partitioner, Reference('my_id'), num_procs, grid, partition) | |
152 | # wait for initial communication to complete |
|
152 | # wait for initial communication to complete | |
153 | view.execute('mpi.barrier()') |
|
153 | view.execute('mpi.barrier()') | |
154 | # setup remote solvers |
|
154 | # setup remote solvers | |
155 | view.apply_sync(setup_solver, I,f,c,bc,Lx,Ly,partitioner=Reference('partitioner'), dt=0,implementation=impl) |
|
155 | view.apply_sync(setup_solver, I,f,c,bc,Lx,Ly,partitioner=Reference('partitioner'), dt=0,implementation=impl) | |
156 |
|
156 | |||
157 | # lambda for calling solver.solve: |
|
157 | # lambda for calling solver.solve: | |
158 | _solve = lambda *args, **kwargs: solver.solve(*args, **kwargs) |
|
158 | _solve = lambda *args, **kwargs: solver.solve(*args, **kwargs) | |
159 |
|
159 | |||
160 | if ns.scalar: |
|
160 | if ns.scalar: | |
161 | impl['inner'] = 'scalar' |
|
161 | impl['inner'] = 'scalar' | |
162 | # run first with element-wise Python operations for each cell |
|
162 | # run first with element-wise Python operations for each cell | |
163 | t0 = time.time() |
|
163 | t0 = time.time() | |
164 | ar = view.apply_async(_solve, tstop, dt=0, verbose=True, final_test=final_test, user_action=user_action) |
|
164 | ar = view.apply_async(_solve, tstop, dt=0, verbose=True, final_test=final_test, user_action=user_action) | |
165 | if final_test: |
|
165 | if final_test: | |
166 | # this sum is performed element-wise as results finish |
|
166 | # this sum is performed element-wise as results finish | |
167 | s = sum(ar) |
|
167 | s = sum(ar) | |
168 | # the L2 norm (RMS) of the result: |
|
168 | # the L2 norm (RMS) of the result: | |
169 | norm = sqrt(s/num_cells) |
|
169 | norm = sqrt(s/num_cells) | |
170 | else: |
|
170 | else: | |
171 | norm = -1 |
|
171 | norm = -1 | |
172 | t1 = time.time() |
|
172 | t1 = time.time() | |
173 |
print |
|
173 | print('scalar inner-version, Wtime=%g, norm=%g' % (t1-t0, norm)) | |
174 |
|
174 | |||
175 | impl['inner'] = 'vectorized' |
|
175 | impl['inner'] = 'vectorized' | |
176 | # setup new solvers |
|
176 | # setup new solvers | |
177 | view.apply_sync(setup_solver, I,f,c,bc,Lx,Ly,partitioner=Reference('partitioner'), dt=0,implementation=impl) |
|
177 | view.apply_sync(setup_solver, I,f,c,bc,Lx,Ly,partitioner=Reference('partitioner'), dt=0,implementation=impl) | |
178 | view.execute('mpi.barrier()') |
|
178 | view.execute('mpi.barrier()') | |
179 |
|
179 | |||
180 | # run again with numpy vectorized inner-implementation |
|
180 | # run again with numpy vectorized inner-implementation | |
181 | t0 = time.time() |
|
181 | t0 = time.time() | |
182 | ar = view.apply_async(_solve, tstop, dt=0, verbose=True, final_test=final_test, user_action=user_action) |
|
182 | ar = view.apply_async(_solve, tstop, dt=0, verbose=True, final_test=final_test, user_action=user_action) | |
183 | if final_test: |
|
183 | if final_test: | |
184 | # this sum is performed element-wise as results finish |
|
184 | # this sum is performed element-wise as results finish | |
185 | s = sum(ar) |
|
185 | s = sum(ar) | |
186 | # the L2 norm (RMS) of the result: |
|
186 | # the L2 norm (RMS) of the result: | |
187 | norm = sqrt(s/num_cells) |
|
187 | norm = sqrt(s/num_cells) | |
188 | else: |
|
188 | else: | |
189 | norm = -1 |
|
189 | norm = -1 | |
190 | t1 = time.time() |
|
190 | t1 = time.time() | |
191 |
print |
|
191 | print('vector inner-version, Wtime=%g, norm=%g' % (t1-t0, norm)) | |
192 |
|
192 | |||
193 | # if ns.save is True, then u_hist stores the history of u as a list |
|
193 | # if ns.save is True, then u_hist stores the history of u as a list | |
194 | # If the partion scheme is Nx1, then u can be reconstructed via 'gather': |
|
194 | # If the partion scheme is Nx1, then u can be reconstructed via 'gather': | |
195 | if ns.save and partition[-1] == 1: |
|
195 | if ns.save and partition[-1] == 1: | |
196 | import matplotlib.pyplot as plt |
|
196 | import matplotlib.pyplot as plt | |
197 | view.execute('u_last=u_hist[-1]') |
|
197 | view.execute('u_last=u_hist[-1]') | |
198 | # map mpi IDs to IPython IDs, which may not match |
|
198 | # map mpi IDs to IPython IDs, which may not match | |
199 | ranks = view['my_id'] |
|
199 | ranks = view['my_id'] | |
200 | targets = range(len(ranks)) |
|
200 | targets = range(len(ranks)) | |
201 | for idx in range(len(ranks)): |
|
201 | for idx in range(len(ranks)): | |
202 | targets[idx] = ranks.index(idx) |
|
202 | targets[idx] = ranks.index(idx) | |
203 | u_last = rc[targets].gather('u_last', block=True) |
|
203 | u_last = rc[targets].gather('u_last', block=True) | |
204 | plt.pcolor(u_last) |
|
204 | plt.pcolor(u_last) | |
205 | plt.show() |
|
205 | plt.show() |
@@ -1,28 +1,28 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | """IPython release build script. |
|
2 | """IPython release build script. | |
3 | """ |
|
3 | """ | |
4 |
|
4 | |||
5 | import os |
|
5 | import os | |
6 | from shutil import rmtree |
|
6 | from shutil import rmtree | |
7 |
|
7 | |||
8 | from toollib import * |
|
8 | from toollib import * | |
9 |
|
9 | |||
10 | # Get main ipython dir, this will raise if it doesn't pass some checks |
|
10 | # Get main ipython dir, this will raise if it doesn't pass some checks | |
11 | ipdir = get_ipdir() |
|
11 | ipdir = get_ipdir() | |
12 | cd(ipdir) |
|
12 | cd(ipdir) | |
13 |
|
13 | |||
14 | # Load release info |
|
14 | # Load release info | |
15 | execfile(pjoin('IPython', 'core', 'release.py')) |
|
15 | execfile(pjoin('IPython', 'core', 'release.py'), globals()) | |
16 |
|
16 | |||
17 | # Check that everything compiles |
|
17 | # Check that everything compiles | |
18 | compile_tree() |
|
18 | compile_tree() | |
19 |
|
19 | |||
20 | # Cleanup |
|
20 | # Cleanup | |
21 | for d in ['build', 'dist', pjoin('docs', 'build'), pjoin('docs', 'dist'), |
|
21 | for d in ['build', 'dist', pjoin('docs', 'build'), pjoin('docs', 'dist'), | |
22 | pjoin('docs', 'source', 'api', 'generated')]: |
|
22 | pjoin('docs', 'source', 'api', 'generated')]: | |
23 | if os.path.isdir(d): |
|
23 | if os.path.isdir(d): | |
24 | rmtree(d) |
|
24 | rmtree(d) | |
25 |
|
25 | |||
26 | # Build source and binary distros |
|
26 | # Build source and binary distros | |
27 | sh(sdists) |
|
27 | sh(sdists) | |
28 | sh(wheels) |
|
28 | sh(wheels) |
@@ -1,54 +1,55 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | """Utility to look for hard tabs and \r characters in all sources. |
|
2 | """Utility to look for hard tabs and \r characters in all sources. | |
3 |
|
3 | |||
4 | Usage: |
|
4 | Usage: | |
5 |
|
5 | |||
6 | ./check_sources.py |
|
6 | ./check_sources.py | |
7 |
|
7 | |||
8 | It prints summaries and if chosen, line-by-line info of where \\t or \\r |
|
8 | It prints summaries and if chosen, line-by-line info of where \\t or \\r | |
9 | characters can be found in our source tree. |
|
9 | characters can be found in our source tree. | |
10 | """ |
|
10 | """ | |
|
11 | from __future__ import print_function | |||
11 |
|
12 | |||
12 | # Config |
|
13 | # Config | |
13 | # If true, all lines that have tabs are printed, with line number |
|
14 | # If true, all lines that have tabs are printed, with line number | |
14 | full_report_tabs = True |
|
15 | full_report_tabs = True | |
15 | # If true, all lines that have tabs are printed, with line number |
|
16 | # If true, all lines that have tabs are printed, with line number | |
16 | full_report_rets = False |
|
17 | full_report_rets = False | |
17 |
|
18 | |||
18 | # Code begins |
|
19 | # Code begins | |
19 | from IPython.external.path import path |
|
20 | from IPython.external.path import path | |
20 |
|
21 | |||
21 | rets = [] |
|
22 | rets = [] | |
22 | tabs = [] |
|
23 | tabs = [] | |
23 |
|
24 | |||
24 | for f in path('..').walkfiles('*.py'): |
|
25 | for f in path('..').walkfiles('*.py'): | |
25 | errs = '' |
|
26 | errs = '' | |
26 | cont = f.bytes() |
|
27 | cont = f.bytes() | |
27 | if '\t' in cont: |
|
28 | if '\t' in cont: | |
28 | errs+='t' |
|
29 | errs+='t' | |
29 | tabs.append(f) |
|
30 | tabs.append(f) | |
30 |
|
31 | |||
31 | if '\r' in cont: |
|
32 | if '\r' in cont: | |
32 | errs+='r' |
|
33 | errs+='r' | |
33 | rets.append(f) |
|
34 | rets.append(f) | |
34 |
|
35 | |||
35 | if errs: |
|
36 | if errs: | |
36 |
print |
|
37 | print("%3s" % errs, f) | |
37 |
|
38 | |||
38 | if 't' in errs and full_report_tabs: |
|
39 | if 't' in errs and full_report_tabs: | |
39 | for ln,line in enumerate(f.lines()): |
|
40 | for ln,line in enumerate(f.lines()): | |
40 | if '\t' in line: |
|
41 | if '\t' in line: | |
41 |
print |
|
42 | print('TAB:',ln,':',line, end=' ') | |
42 |
|
43 | |||
43 | if 'r' in errs and full_report_rets: |
|
44 | if 'r' in errs and full_report_rets: | |
44 | for ln,line in enumerate(open(f.abspath(),'rb')): |
|
45 | for ln,line in enumerate(open(f.abspath(),'rb')): | |
45 | if '\r' in line: |
|
46 | if '\r' in line: | |
46 |
print |
|
47 | print('RET:',ln,':',line, end=' ') | |
47 |
|
48 | |||
48 | # Summary at the end, to call cleanup tools if necessary |
|
49 | # Summary at the end, to call cleanup tools if necessary | |
49 | if tabs: |
|
50 | if tabs: | |
50 |
print |
|
51 | print('Hard tabs found. These can be cleaned with untabify:') | |
51 |
for f in tabs: print |
|
52 | for f in tabs: print(f, end=' ') | |
52 | if rets: |
|
53 | if rets: | |
53 |
print |
|
54 | print('Carriage returns (\\r) found in:') | |
54 |
for f in rets: print |
|
55 | for f in rets: print(f, end=' ') |
General Comments 0
You need to be logged in to leave comments.
Login now