Show More
@@ -1,217 +1,223 b'' | |||||
1 | """The official API for working with notebooks in the current format version.""" |
|
1 | """The official API for working with notebooks in the current format version.""" | |
2 |
|
2 | |||
3 | from __future__ import print_function |
|
3 | from __future__ import print_function | |
4 |
|
4 | |||
5 | from xml.etree import ElementTree as ET |
|
5 | from xml.etree import ElementTree as ET | |
6 | import re |
|
6 | import re | |
7 |
|
7 | |||
8 | from IPython.utils.py3compat import unicode_type |
|
8 | from IPython.utils.py3compat import unicode_type | |
9 |
|
9 | |||
10 | from IPython.nbformat.v3 import ( |
|
10 | from IPython.nbformat.v3 import ( | |
11 | NotebookNode, |
|
11 | NotebookNode, | |
12 | new_code_cell, new_text_cell, new_notebook, new_output, new_worksheet, |
|
12 | new_code_cell, new_text_cell, new_notebook, new_output, new_worksheet, | |
13 | parse_filename, new_metadata, new_author, new_heading_cell, nbformat, |
|
13 | parse_filename, new_metadata, new_author, new_heading_cell, nbformat, | |
14 | nbformat_minor, nbformat_schema, to_notebook_json |
|
14 | nbformat_minor, nbformat_schema, to_notebook_json | |
15 | ) |
|
15 | ) | |
16 | from IPython.nbformat import v3 as _v_latest |
|
16 | from IPython.nbformat import v3 as _v_latest | |
17 |
|
17 | |||
18 | from .reader import reads as reader_reads |
|
18 | from .reader import reads as reader_reads | |
19 | from .reader import versions |
|
19 | from .reader import versions | |
20 | from .convert import convert |
|
20 | from .convert import convert | |
21 | from .validator import validate |
|
21 | from .validator import validate | |
22 |
|
22 | |||
23 | from IPython.utils.log import get_logger |
|
23 | from IPython.utils.log import get_logger | |
24 |
|
24 | |||
|
25 | __all__ = ['NotebookNode', 'new_code_cell', 'new_text_cell', 'new_notebook', | |||
|
26 | 'new_output', 'new_worksheet', 'parse_filename', 'new_metadata', 'new_author', | |||
|
27 | 'new_heading_cell', 'nbformat', 'nbformat_minor', 'nbformat_schema', | |||
|
28 | 'to_notebook_json', 'convert', 'validate', 'NBFormatError', 'parse_py', | |||
|
29 | 'reads_json', 'writes_json', 'reads_py', 'writes_py', 'reads', 'writes', 'read', | |||
|
30 | 'write'] | |||
25 |
|
31 | |||
26 | current_nbformat = nbformat |
|
32 | current_nbformat = nbformat | |
27 | current_nbformat_minor = nbformat_minor |
|
33 | current_nbformat_minor = nbformat_minor | |
28 | current_nbformat_module = _v_latest.__name__ |
|
34 | current_nbformat_module = _v_latest.__name__ | |
29 |
|
35 | |||
30 |
|
36 | |||
31 | def docstring_nbformat_mod(func): |
|
37 | def docstring_nbformat_mod(func): | |
32 | """Decorator for docstrings referring to classes/functions accessed through |
|
38 | """Decorator for docstrings referring to classes/functions accessed through | |
33 | nbformat.current. |
|
39 | nbformat.current. | |
34 |
|
40 | |||
35 | Put {nbformat_mod} in the docstring in place of 'IPython.nbformat.v3'. |
|
41 | Put {nbformat_mod} in the docstring in place of 'IPython.nbformat.v3'. | |
36 | """ |
|
42 | """ | |
37 | func.__doc__ = func.__doc__.format(nbformat_mod=current_nbformat_module) |
|
43 | func.__doc__ = func.__doc__.format(nbformat_mod=current_nbformat_module) | |
38 | return func |
|
44 | return func | |
39 |
|
45 | |||
40 |
|
46 | |||
41 | class NBFormatError(ValueError): |
|
47 | class NBFormatError(ValueError): | |
42 | pass |
|
48 | pass | |
43 |
|
49 | |||
44 |
|
50 | |||
45 | def parse_py(s, **kwargs): |
|
51 | def parse_py(s, **kwargs): | |
46 | """Parse a string into a (nbformat, string) tuple.""" |
|
52 | """Parse a string into a (nbformat, string) tuple.""" | |
47 | nbf = current_nbformat |
|
53 | nbf = current_nbformat | |
48 | nbm = current_nbformat_minor |
|
54 | nbm = current_nbformat_minor | |
49 |
|
55 | |||
50 | pattern = r'# <nbformat>(?P<nbformat>\d+[\.\d+]*)</nbformat>' |
|
56 | pattern = r'# <nbformat>(?P<nbformat>\d+[\.\d+]*)</nbformat>' | |
51 | m = re.search(pattern,s) |
|
57 | m = re.search(pattern,s) | |
52 | if m is not None: |
|
58 | if m is not None: | |
53 | digits = m.group('nbformat').split('.') |
|
59 | digits = m.group('nbformat').split('.') | |
54 | nbf = int(digits[0]) |
|
60 | nbf = int(digits[0]) | |
55 | if len(digits) > 1: |
|
61 | if len(digits) > 1: | |
56 | nbm = int(digits[1]) |
|
62 | nbm = int(digits[1]) | |
57 |
|
63 | |||
58 | return nbf, nbm, s |
|
64 | return nbf, nbm, s | |
59 |
|
65 | |||
60 |
|
66 | |||
61 | def reads_json(nbjson, **kwargs): |
|
67 | def reads_json(nbjson, **kwargs): | |
62 | """Read a JSON notebook from a string and return the NotebookNode |
|
68 | """Read a JSON notebook from a string and return the NotebookNode | |
63 | object. Report if any JSON format errors are detected. |
|
69 | object. Report if any JSON format errors are detected. | |
64 |
|
70 | |||
65 | """ |
|
71 | """ | |
66 | nb = reader_reads(nbjson, **kwargs) |
|
72 | nb = reader_reads(nbjson, **kwargs) | |
67 | nb_current = convert(nb, current_nbformat) |
|
73 | nb_current = convert(nb, current_nbformat) | |
68 | errors = validate(nb_current) |
|
74 | errors = validate(nb_current) | |
69 | if errors: |
|
75 | if errors: | |
70 | get_logger().error( |
|
76 | get_logger().error( | |
71 | "Notebook JSON is invalid (%d errors detected during read)", |
|
77 | "Notebook JSON is invalid (%d errors detected during read)", | |
72 | len(errors)) |
|
78 | len(errors)) | |
73 | return nb_current |
|
79 | return nb_current | |
74 |
|
80 | |||
75 |
|
81 | |||
76 | def writes_json(nb, **kwargs): |
|
82 | def writes_json(nb, **kwargs): | |
77 | """Take a NotebookNode object and write out a JSON string. Report if |
|
83 | """Take a NotebookNode object and write out a JSON string. Report if | |
78 | any JSON format errors are detected. |
|
84 | any JSON format errors are detected. | |
79 |
|
85 | |||
80 | """ |
|
86 | """ | |
81 | errors = validate(nb) |
|
87 | errors = validate(nb) | |
82 | if errors: |
|
88 | if errors: | |
83 | get_logger().error( |
|
89 | get_logger().error( | |
84 | "Notebook JSON is invalid (%d errors detected during write)", |
|
90 | "Notebook JSON is invalid (%d errors detected during write)", | |
85 | len(errors)) |
|
91 | len(errors)) | |
86 | nbjson = versions[current_nbformat].writes_json(nb, **kwargs) |
|
92 | nbjson = versions[current_nbformat].writes_json(nb, **kwargs) | |
87 | return nbjson |
|
93 | return nbjson | |
88 |
|
94 | |||
89 |
|
95 | |||
90 | def reads_py(s, **kwargs): |
|
96 | def reads_py(s, **kwargs): | |
91 | """Read a .py notebook from a string and return the NotebookNode object.""" |
|
97 | """Read a .py notebook from a string and return the NotebookNode object.""" | |
92 | nbf, nbm, s = parse_py(s, **kwargs) |
|
98 | nbf, nbm, s = parse_py(s, **kwargs) | |
93 | if nbf in (2, 3): |
|
99 | if nbf in (2, 3): | |
94 | nb = versions[nbf].to_notebook_py(s, **kwargs) |
|
100 | nb = versions[nbf].to_notebook_py(s, **kwargs) | |
95 | else: |
|
101 | else: | |
96 | raise NBFormatError('Unsupported PY nbformat version: %i' % nbf) |
|
102 | raise NBFormatError('Unsupported PY nbformat version: %i' % nbf) | |
97 | return nb |
|
103 | return nb | |
98 |
|
104 | |||
99 |
|
105 | |||
100 | def writes_py(nb, **kwargs): |
|
106 | def writes_py(nb, **kwargs): | |
101 | # nbformat 3 is the latest format that supports py |
|
107 | # nbformat 3 is the latest format that supports py | |
102 | return versions[3].writes_py(nb, **kwargs) |
|
108 | return versions[3].writes_py(nb, **kwargs) | |
103 |
|
109 | |||
104 |
|
110 | |||
105 | # High level API |
|
111 | # High level API | |
106 |
|
112 | |||
107 |
|
113 | |||
108 | def reads(s, format, **kwargs): |
|
114 | def reads(s, format, **kwargs): | |
109 | """Read a notebook from a string and return the NotebookNode object. |
|
115 | """Read a notebook from a string and return the NotebookNode object. | |
110 |
|
116 | |||
111 | This function properly handles notebooks of any version. The notebook |
|
117 | This function properly handles notebooks of any version. The notebook | |
112 | returned will always be in the current version's format. |
|
118 | returned will always be in the current version's format. | |
113 |
|
119 | |||
114 | Parameters |
|
120 | Parameters | |
115 | ---------- |
|
121 | ---------- | |
116 | s : unicode |
|
122 | s : unicode | |
117 | The raw unicode string to read the notebook from. |
|
123 | The raw unicode string to read the notebook from. | |
118 | format : (u'json', u'ipynb', u'py') |
|
124 | format : (u'json', u'ipynb', u'py') | |
119 | The format that the string is in. |
|
125 | The format that the string is in. | |
120 |
|
126 | |||
121 | Returns |
|
127 | Returns | |
122 | ------- |
|
128 | ------- | |
123 | nb : NotebookNode |
|
129 | nb : NotebookNode | |
124 | The notebook that was read. |
|
130 | The notebook that was read. | |
125 | """ |
|
131 | """ | |
126 | format = unicode_type(format) |
|
132 | format = unicode_type(format) | |
127 | if format == u'json' or format == u'ipynb': |
|
133 | if format == u'json' or format == u'ipynb': | |
128 | return reads_json(s, **kwargs) |
|
134 | return reads_json(s, **kwargs) | |
129 | elif format == u'py': |
|
135 | elif format == u'py': | |
130 | return reads_py(s, **kwargs) |
|
136 | return reads_py(s, **kwargs) | |
131 | else: |
|
137 | else: | |
132 | raise NBFormatError('Unsupported format: %s' % format) |
|
138 | raise NBFormatError('Unsupported format: %s' % format) | |
133 |
|
139 | |||
134 |
|
140 | |||
135 | def writes(nb, format, **kwargs): |
|
141 | def writes(nb, format, **kwargs): | |
136 | """Write a notebook to a string in a given format in the current nbformat version. |
|
142 | """Write a notebook to a string in a given format in the current nbformat version. | |
137 |
|
143 | |||
138 | This function always writes the notebook in the current nbformat version. |
|
144 | This function always writes the notebook in the current nbformat version. | |
139 |
|
145 | |||
140 | Parameters |
|
146 | Parameters | |
141 | ---------- |
|
147 | ---------- | |
142 | nb : NotebookNode |
|
148 | nb : NotebookNode | |
143 | The notebook to write. |
|
149 | The notebook to write. | |
144 | format : (u'json', u'ipynb', u'py') |
|
150 | format : (u'json', u'ipynb', u'py') | |
145 | The format to write the notebook in. |
|
151 | The format to write the notebook in. | |
146 |
|
152 | |||
147 | Returns |
|
153 | Returns | |
148 | ------- |
|
154 | ------- | |
149 | s : unicode |
|
155 | s : unicode | |
150 | The notebook string. |
|
156 | The notebook string. | |
151 | """ |
|
157 | """ | |
152 | format = unicode_type(format) |
|
158 | format = unicode_type(format) | |
153 | if format == u'json' or format == u'ipynb': |
|
159 | if format == u'json' or format == u'ipynb': | |
154 | return writes_json(nb, **kwargs) |
|
160 | return writes_json(nb, **kwargs) | |
155 | elif format == u'py': |
|
161 | elif format == u'py': | |
156 | return writes_py(nb, **kwargs) |
|
162 | return writes_py(nb, **kwargs) | |
157 | else: |
|
163 | else: | |
158 | raise NBFormatError('Unsupported format: %s' % format) |
|
164 | raise NBFormatError('Unsupported format: %s' % format) | |
159 |
|
165 | |||
160 |
|
166 | |||
161 | def read(fp, format, **kwargs): |
|
167 | def read(fp, format, **kwargs): | |
162 | """Read a notebook from a file and return the NotebookNode object. |
|
168 | """Read a notebook from a file and return the NotebookNode object. | |
163 |
|
169 | |||
164 | This function properly handles notebooks of any version. The notebook |
|
170 | This function properly handles notebooks of any version. The notebook | |
165 | returned will always be in the current version's format. |
|
171 | returned will always be in the current version's format. | |
166 |
|
172 | |||
167 | Parameters |
|
173 | Parameters | |
168 | ---------- |
|
174 | ---------- | |
169 | fp : file |
|
175 | fp : file | |
170 | Any file-like object with a read method. |
|
176 | Any file-like object with a read method. | |
171 | format : (u'json', u'ipynb', u'py') |
|
177 | format : (u'json', u'ipynb', u'py') | |
172 | The format that the string is in. |
|
178 | The format that the string is in. | |
173 |
|
179 | |||
174 | Returns |
|
180 | Returns | |
175 | ------- |
|
181 | ------- | |
176 | nb : NotebookNode |
|
182 | nb : NotebookNode | |
177 | The notebook that was read. |
|
183 | The notebook that was read. | |
178 | """ |
|
184 | """ | |
179 | return reads(fp.read(), format, **kwargs) |
|
185 | return reads(fp.read(), format, **kwargs) | |
180 |
|
186 | |||
181 |
|
187 | |||
182 | def write(nb, fp, format, **kwargs): |
|
188 | def write(nb, fp, format, **kwargs): | |
183 | """Write a notebook to a file in a given format in the current nbformat version. |
|
189 | """Write a notebook to a file in a given format in the current nbformat version. | |
184 |
|
190 | |||
185 | This function always writes the notebook in the current nbformat version. |
|
191 | This function always writes the notebook in the current nbformat version. | |
186 |
|
192 | |||
187 | Parameters |
|
193 | Parameters | |
188 | ---------- |
|
194 | ---------- | |
189 | nb : NotebookNode |
|
195 | nb : NotebookNode | |
190 | The notebook to write. |
|
196 | The notebook to write. | |
191 | fp : file |
|
197 | fp : file | |
192 | Any file-like object with a write method. |
|
198 | Any file-like object with a write method. | |
193 | format : (u'json', u'ipynb', u'py') |
|
199 | format : (u'json', u'ipynb', u'py') | |
194 | The format to write the notebook in. |
|
200 | The format to write the notebook in. | |
195 |
|
201 | |||
196 | Returns |
|
202 | Returns | |
197 | ------- |
|
203 | ------- | |
198 | s : unicode |
|
204 | s : unicode | |
199 | The notebook string. |
|
205 | The notebook string. | |
200 | """ |
|
206 | """ | |
201 | return fp.write(writes(nb, format, **kwargs)) |
|
207 | return fp.write(writes(nb, format, **kwargs)) | |
202 |
|
208 | |||
203 | def _convert_to_metadata(): |
|
209 | def _convert_to_metadata(): | |
204 | """Convert to a notebook having notebook metadata.""" |
|
210 | """Convert to a notebook having notebook metadata.""" | |
205 | import glob |
|
211 | import glob | |
206 | for fname in glob.glob('*.ipynb'): |
|
212 | for fname in glob.glob('*.ipynb'): | |
207 | print('Converting file:',fname) |
|
213 | print('Converting file:',fname) | |
208 | with open(fname,'r') as f: |
|
214 | with open(fname,'r') as f: | |
209 | nb = read(f,u'json') |
|
215 | nb = read(f,u'json') | |
210 | md = new_metadata() |
|
216 | md = new_metadata() | |
211 | if u'name' in nb: |
|
217 | if u'name' in nb: | |
212 | md.name = nb.name |
|
218 | md.name = nb.name | |
213 | del nb[u'name'] |
|
219 | del nb[u'name'] | |
214 | nb.metadata = md |
|
220 | nb.metadata = md | |
215 | with open(fname,'w') as f: |
|
221 | with open(fname,'w') as f: | |
216 | write(nb, f, u'json') |
|
222 | write(nb, f, u'json') | |
217 |
|
223 |
@@ -1,45 +1,58 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | """Script to auto-generate our API docs. |
|
2 | """Script to auto-generate our API docs. | |
3 | """ |
|
3 | """ | |
4 | # stdlib imports |
|
4 | # stdlib imports | |
5 | import os |
|
5 | import os | |
6 | import sys |
|
6 | import sys | |
7 |
|
7 | |||
8 | # local imports |
|
8 | # local imports | |
9 | sys.path.append(os.path.abspath('sphinxext')) |
|
9 | sys.path.append(os.path.abspath('sphinxext')) | |
10 | from apigen import ApiDocWriter |
|
10 | from apigen import ApiDocWriter | |
11 |
|
11 | |||
12 | #***************************************************************************** |
|
12 | #***************************************************************************** | |
13 | if __name__ == '__main__': |
|
13 | if __name__ == '__main__': | |
14 | pjoin = os.path.join |
|
14 | pjoin = os.path.join | |
15 | package = 'IPython' |
|
15 | package = 'IPython' | |
16 | outdir = pjoin('source','api','generated') |
|
16 | outdir = pjoin('source','api','generated') | |
17 | docwriter = ApiDocWriter(package,rst_extension='.rst') |
|
17 | docwriter = ApiDocWriter(package,rst_extension='.rst') | |
18 | # You have to escape the . here because . is a special char for regexps. |
|
18 | # You have to escape the . here because . is a special char for regexps. | |
19 | # You must do make clean if you change this! |
|
19 | # You must do make clean if you change this! | |
20 | docwriter.package_skip_patterns += [r'\.external$', |
|
20 | docwriter.package_skip_patterns += [r'\.external$', | |
21 | # Extensions are documented elsewhere. |
|
21 | # Extensions are documented elsewhere. | |
22 | r'\.extensions', |
|
22 | r'\.extensions', | |
23 | r'\.config\.profile', |
|
23 | r'\.config\.profile', | |
|
24 | # These should be accessed via nbformat.current | |||
|
25 | r'\.nbformat\.v\d+', | |||
24 | ] |
|
26 | ] | |
25 |
|
27 | |||
26 | # The inputhook* modules often cause problems on import, such as trying to |
|
28 | # The inputhook* modules often cause problems on import, such as trying to | |
27 | # load incompatible Qt bindings. It's easiest to leave them all out. The |
|
29 | # load incompatible Qt bindings. It's easiest to leave them all out. The | |
28 | # main API is in the inputhook module, which is documented. |
|
30 | # main API is in the inputhook module, which is documented. | |
29 | docwriter.module_skip_patterns += [ r'\.lib\.inputhook.+', |
|
31 | docwriter.module_skip_patterns += [ r'\.lib\.inputhook.+', | |
30 | r'\.ipdoctest', |
|
32 | r'\.ipdoctest', | |
31 | r'\.testing\.plugin', |
|
33 | r'\.testing\.plugin', | |
32 | # This just prints a deprecation msg: |
|
34 | # This just prints a deprecation msg: | |
33 | r'\.frontend$', |
|
35 | r'\.frontend$', | |
34 | # We document this manually. |
|
36 | # We document this manually. | |
35 | r'\.utils\.py3compat', |
|
37 | r'\.utils\.py3compat', | |
|
38 | # These are exposed by nbformat.current | |||
|
39 | r'\.nbformat\.convert', | |||
|
40 | r'\.nbformat\.validator', | |||
36 | ] |
|
41 | ] | |
|
42 | ||||
|
43 | # These modules import functions and classes from other places to expose | |||
|
44 | # them as part of the public API. They must have __all__ defined. The | |||
|
45 | # non-API modules they import from should be excluded by the skip patterns | |||
|
46 | # above. | |||
|
47 | docwriter.names_from__all__.update({ | |||
|
48 | 'IPython.nbformat.current', | |||
|
49 | }) | |||
37 |
|
50 | |||
38 | # Now, generate the outputs |
|
51 | # Now, generate the outputs | |
39 | docwriter.write_api_docs(outdir) |
|
52 | docwriter.write_api_docs(outdir) | |
40 | # Write index with .txt extension - we can include it, but Sphinx won't try |
|
53 | # Write index with .txt extension - we can include it, but Sphinx won't try | |
41 | # to compile it |
|
54 | # to compile it | |
42 | docwriter.write_index(outdir, 'gen.txt', |
|
55 | docwriter.write_index(outdir, 'gen.txt', | |
43 | relative_to = pjoin('source','api') |
|
56 | relative_to = pjoin('source','api') | |
44 | ) |
|
57 | ) | |
45 | print ('%d files written' % len(docwriter.written_modules)) |
|
58 | print ('%d files written' % len(docwriter.written_modules)) |
@@ -1,422 +1,454 b'' | |||||
1 | """Attempt to generate templates for module reference with Sphinx |
|
1 | """Attempt to generate templates for module reference with Sphinx | |
2 |
|
2 | |||
3 | XXX - we exclude extension modules |
|
3 | XXX - we exclude extension modules | |
4 |
|
4 | |||
5 | To include extension modules, first identify them as valid in the |
|
5 | To include extension modules, first identify them as valid in the | |
6 | ``_uri2path`` method, then handle them in the ``_parse_module`` script. |
|
6 | ``_uri2path`` method, then handle them in the ``_parse_module`` script. | |
7 |
|
7 | |||
8 | We get functions and classes by parsing the text of .py files. |
|
8 | We get functions and classes by parsing the text of .py files. | |
9 | Alternatively we could import the modules for discovery, and we'd have |
|
9 | Alternatively we could import the modules for discovery, and we'd have | |
10 | to do that for extension modules. This would involve changing the |
|
10 | to do that for extension modules. This would involve changing the | |
11 | ``_parse_module`` method to work via import and introspection, and |
|
11 | ``_parse_module`` method to work via import and introspection, and | |
12 | might involve changing ``discover_modules`` (which determines which |
|
12 | might involve changing ``discover_modules`` (which determines which | |
13 | files are modules, and therefore which module URIs will be passed to |
|
13 | files are modules, and therefore which module URIs will be passed to | |
14 | ``_parse_module``). |
|
14 | ``_parse_module``). | |
15 |
|
15 | |||
16 | NOTE: this is a modified version of a script originally shipped with the |
|
16 | NOTE: this is a modified version of a script originally shipped with the | |
17 | PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed |
|
17 | PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed | |
18 | project.""" |
|
18 | project.""" | |
19 |
|
19 | |||
20 | from __future__ import print_function |
|
20 | from __future__ import print_function | |
21 |
|
21 | |||
22 | # Stdlib imports |
|
22 | # Stdlib imports | |
23 | import ast |
|
23 | import ast | |
|
24 | import inspect | |||
24 | import os |
|
25 | import os | |
25 | import re |
|
26 | import re | |
26 |
|
27 | |||
27 | class Obj(object): |
|
28 | class Obj(object): | |
28 | '''Namespace to hold arbitrary information.''' |
|
29 | '''Namespace to hold arbitrary information.''' | |
29 | def __init__(self, **kwargs): |
|
30 | def __init__(self, **kwargs): | |
30 | for k, v in kwargs.items(): |
|
31 | for k, v in kwargs.items(): | |
31 | setattr(self, k, v) |
|
32 | setattr(self, k, v) | |
32 |
|
33 | |||
33 | class FuncClsScanner(ast.NodeVisitor): |
|
34 | class FuncClsScanner(ast.NodeVisitor): | |
34 | """Scan a module for top-level functions and classes. |
|
35 | """Scan a module for top-level functions and classes. | |
35 |
|
36 | |||
36 | Skips objects with an @undoc decorator, or a name starting with '_'. |
|
37 | Skips objects with an @undoc decorator, or a name starting with '_'. | |
37 | """ |
|
38 | """ | |
38 | def __init__(self): |
|
39 | def __init__(self): | |
39 | ast.NodeVisitor.__init__(self) |
|
40 | ast.NodeVisitor.__init__(self) | |
40 | self.classes = [] |
|
41 | self.classes = [] | |
41 | self.classes_seen = set() |
|
42 | self.classes_seen = set() | |
42 | self.functions = [] |
|
43 | self.functions = [] | |
43 |
|
44 | |||
44 | @staticmethod |
|
45 | @staticmethod | |
45 | def has_undoc_decorator(node): |
|
46 | def has_undoc_decorator(node): | |
46 | return any(isinstance(d, ast.Name) and d.id == 'undoc' \ |
|
47 | return any(isinstance(d, ast.Name) and d.id == 'undoc' \ | |
47 | for d in node.decorator_list) |
|
48 | for d in node.decorator_list) | |
48 |
|
49 | |||
49 | def visit_If(self, node): |
|
50 | def visit_If(self, node): | |
50 | if isinstance(node.test, ast.Compare) \ |
|
51 | if isinstance(node.test, ast.Compare) \ | |
51 | and isinstance(node.test.left, ast.Name) \ |
|
52 | and isinstance(node.test.left, ast.Name) \ | |
52 | and node.test.left.id == '__name__': |
|
53 | and node.test.left.id == '__name__': | |
53 | return # Ignore classes defined in "if __name__ == '__main__':" |
|
54 | return # Ignore classes defined in "if __name__ == '__main__':" | |
54 |
|
55 | |||
55 | self.generic_visit(node) |
|
56 | self.generic_visit(node) | |
56 |
|
57 | |||
57 | def visit_FunctionDef(self, node): |
|
58 | def visit_FunctionDef(self, node): | |
58 | if not (node.name.startswith('_') or self.has_undoc_decorator(node)) \ |
|
59 | if not (node.name.startswith('_') or self.has_undoc_decorator(node)) \ | |
59 | and node.name not in self.functions: |
|
60 | and node.name not in self.functions: | |
60 | self.functions.append(node.name) |
|
61 | self.functions.append(node.name) | |
61 |
|
62 | |||
62 | def visit_ClassDef(self, node): |
|
63 | def visit_ClassDef(self, node): | |
63 | if not (node.name.startswith('_') or self.has_undoc_decorator(node)) \ |
|
64 | if not (node.name.startswith('_') or self.has_undoc_decorator(node)) \ | |
64 | and node.name not in self.classes_seen: |
|
65 | and node.name not in self.classes_seen: | |
65 | cls = Obj(name=node.name) |
|
66 | cls = Obj(name=node.name) | |
66 | cls.has_init = any(isinstance(n, ast.FunctionDef) and \ |
|
67 | cls.has_init = any(isinstance(n, ast.FunctionDef) and \ | |
67 | n.name=='__init__' for n in node.body) |
|
68 | n.name=='__init__' for n in node.body) | |
68 | self.classes.append(cls) |
|
69 | self.classes.append(cls) | |
69 | self.classes_seen.add(node.name) |
|
70 | self.classes_seen.add(node.name) | |
70 |
|
71 | |||
71 | def scan(self, mod): |
|
72 | def scan(self, mod): | |
72 | self.visit(mod) |
|
73 | self.visit(mod) | |
73 | return self.functions, self.classes |
|
74 | return self.functions, self.classes | |
74 |
|
75 | |||
75 | # Functions and classes |
|
76 | # Functions and classes | |
76 | class ApiDocWriter(object): |
|
77 | class ApiDocWriter(object): | |
77 | ''' Class for automatic detection and parsing of API docs |
|
78 | ''' Class for automatic detection and parsing of API docs | |
78 | to Sphinx-parsable reST format''' |
|
79 | to Sphinx-parsable reST format''' | |
79 |
|
80 | |||
80 | # only separating first two levels |
|
81 | # only separating first two levels | |
81 | rst_section_levels = ['*', '=', '-', '~', '^'] |
|
82 | rst_section_levels = ['*', '=', '-', '~', '^'] | |
82 |
|
83 | |||
83 | def __init__(self, |
|
84 | def __init__(self, | |
84 | package_name, |
|
85 | package_name, | |
85 | rst_extension='.rst', |
|
86 | rst_extension='.rst', | |
86 | package_skip_patterns=None, |
|
87 | package_skip_patterns=None, | |
87 | module_skip_patterns=None, |
|
88 | module_skip_patterns=None, | |
|
89 | names_from__all__=None, | |||
88 | ): |
|
90 | ): | |
89 | ''' Initialize package for parsing |
|
91 | ''' Initialize package for parsing | |
90 |
|
92 | |||
91 | Parameters |
|
93 | Parameters | |
92 | ---------- |
|
94 | ---------- | |
93 | package_name : string |
|
95 | package_name : string | |
94 | Name of the top-level package. *package_name* must be the |
|
96 | Name of the top-level package. *package_name* must be the | |
95 | name of an importable package |
|
97 | name of an importable package | |
96 | rst_extension : string, optional |
|
98 | rst_extension : string, optional | |
97 | Extension for reST files, default '.rst' |
|
99 | Extension for reST files, default '.rst' | |
98 | package_skip_patterns : None or sequence of {strings, regexps} |
|
100 | package_skip_patterns : None or sequence of {strings, regexps} | |
99 | Sequence of strings giving URIs of packages to be excluded |
|
101 | Sequence of strings giving URIs of packages to be excluded | |
100 | Operates on the package path, starting at (including) the |
|
102 | Operates on the package path, starting at (including) the | |
101 | first dot in the package path, after *package_name* - so, |
|
103 | first dot in the package path, after *package_name* - so, | |
102 | if *package_name* is ``sphinx``, then ``sphinx.util`` will |
|
104 | if *package_name* is ``sphinx``, then ``sphinx.util`` will | |
103 | result in ``.util`` being passed for earching by these |
|
105 | result in ``.util`` being passed for earching by these | |
104 | regexps. If is None, gives default. Default is: |
|
106 | regexps. If is None, gives default. Default is: | |
105 | ['\.tests$'] |
|
107 | ['\.tests$'] | |
106 | module_skip_patterns : None or sequence |
|
108 | module_skip_patterns : None or sequence | |
107 | Sequence of strings giving URIs of modules to be excluded |
|
109 | Sequence of strings giving URIs of modules to be excluded | |
108 | Operates on the module name including preceding URI path, |
|
110 | Operates on the module name including preceding URI path, | |
109 | back to the first dot after *package_name*. For example |
|
111 | back to the first dot after *package_name*. For example | |
110 | ``sphinx.util.console`` results in the string to search of |
|
112 | ``sphinx.util.console`` results in the string to search of | |
111 | ``.util.console`` |
|
113 | ``.util.console`` | |
112 | If is None, gives default. Default is: |
|
114 | If is None, gives default. Default is: | |
113 | ['\.setup$', '\._'] |
|
115 | ['\.setup$', '\._'] | |
|
116 | names_from__all__ : set, optional | |||
|
117 | Modules listed in here will be scanned by doing ``from mod import *``, | |||
|
118 | rather than finding function and class definitions by scanning the | |||
|
119 | AST. This is intended for API modules which expose things defined in | |||
|
120 | other files. Modules listed here must define ``__all__`` to avoid | |||
|
121 | exposing everything they import. | |||
114 | ''' |
|
122 | ''' | |
115 | if package_skip_patterns is None: |
|
123 | if package_skip_patterns is None: | |
116 | package_skip_patterns = ['\\.tests$'] |
|
124 | package_skip_patterns = ['\\.tests$'] | |
117 | if module_skip_patterns is None: |
|
125 | if module_skip_patterns is None: | |
118 | module_skip_patterns = ['\\.setup$', '\\._'] |
|
126 | module_skip_patterns = ['\\.setup$', '\\._'] | |
119 | self.package_name = package_name |
|
127 | self.package_name = package_name | |
120 | self.rst_extension = rst_extension |
|
128 | self.rst_extension = rst_extension | |
121 | self.package_skip_patterns = package_skip_patterns |
|
129 | self.package_skip_patterns = package_skip_patterns | |
122 | self.module_skip_patterns = module_skip_patterns |
|
130 | self.module_skip_patterns = module_skip_patterns | |
|
131 | self.names_from__all__ = names_from__all__ or set() | |||
123 |
|
132 | |||
124 | def get_package_name(self): |
|
133 | def get_package_name(self): | |
125 | return self._package_name |
|
134 | return self._package_name | |
126 |
|
135 | |||
127 | def set_package_name(self, package_name): |
|
136 | def set_package_name(self, package_name): | |
128 | ''' Set package_name |
|
137 | ''' Set package_name | |
129 |
|
138 | |||
130 | >>> docwriter = ApiDocWriter('sphinx') |
|
139 | >>> docwriter = ApiDocWriter('sphinx') | |
131 | >>> import sphinx |
|
140 | >>> import sphinx | |
132 | >>> docwriter.root_path == sphinx.__path__[0] |
|
141 | >>> docwriter.root_path == sphinx.__path__[0] | |
133 | True |
|
142 | True | |
134 | >>> docwriter.package_name = 'docutils' |
|
143 | >>> docwriter.package_name = 'docutils' | |
135 | >>> import docutils |
|
144 | >>> import docutils | |
136 | >>> docwriter.root_path == docutils.__path__[0] |
|
145 | >>> docwriter.root_path == docutils.__path__[0] | |
137 | True |
|
146 | True | |
138 | ''' |
|
147 | ''' | |
139 | # It's also possible to imagine caching the module parsing here |
|
148 | # It's also possible to imagine caching the module parsing here | |
140 | self._package_name = package_name |
|
149 | self._package_name = package_name | |
141 | self.root_module = __import__(package_name) |
|
150 | self.root_module = __import__(package_name) | |
142 | self.root_path = self.root_module.__path__[0] |
|
151 | self.root_path = self.root_module.__path__[0] | |
143 | self.written_modules = None |
|
152 | self.written_modules = None | |
144 |
|
153 | |||
145 | package_name = property(get_package_name, set_package_name, None, |
|
154 | package_name = property(get_package_name, set_package_name, None, | |
146 | 'get/set package_name') |
|
155 | 'get/set package_name') | |
147 |
|
156 | |||
148 | def _uri2path(self, uri): |
|
157 | def _uri2path(self, uri): | |
149 | ''' Convert uri to absolute filepath |
|
158 | ''' Convert uri to absolute filepath | |
150 |
|
159 | |||
151 | Parameters |
|
160 | Parameters | |
152 | ---------- |
|
161 | ---------- | |
153 | uri : string |
|
162 | uri : string | |
154 | URI of python module to return path for |
|
163 | URI of python module to return path for | |
155 |
|
164 | |||
156 | Returns |
|
165 | Returns | |
157 | ------- |
|
166 | ------- | |
158 | path : None or string |
|
167 | path : None or string | |
159 | Returns None if there is no valid path for this URI |
|
168 | Returns None if there is no valid path for this URI | |
160 | Otherwise returns absolute file system path for URI |
|
169 | Otherwise returns absolute file system path for URI | |
161 |
|
170 | |||
162 | Examples |
|
171 | Examples | |
163 | -------- |
|
172 | -------- | |
164 | >>> docwriter = ApiDocWriter('sphinx') |
|
173 | >>> docwriter = ApiDocWriter('sphinx') | |
165 | >>> import sphinx |
|
174 | >>> import sphinx | |
166 | >>> modpath = sphinx.__path__[0] |
|
175 | >>> modpath = sphinx.__path__[0] | |
167 | >>> res = docwriter._uri2path('sphinx.builder') |
|
176 | >>> res = docwriter._uri2path('sphinx.builder') | |
168 | >>> res == os.path.join(modpath, 'builder.py') |
|
177 | >>> res == os.path.join(modpath, 'builder.py') | |
169 | True |
|
178 | True | |
170 | >>> res = docwriter._uri2path('sphinx') |
|
179 | >>> res = docwriter._uri2path('sphinx') | |
171 | >>> res == os.path.join(modpath, '__init__.py') |
|
180 | >>> res == os.path.join(modpath, '__init__.py') | |
172 | True |
|
181 | True | |
173 | >>> docwriter._uri2path('sphinx.does_not_exist') |
|
182 | >>> docwriter._uri2path('sphinx.does_not_exist') | |
174 |
|
183 | |||
175 | ''' |
|
184 | ''' | |
176 | if uri == self.package_name: |
|
185 | if uri == self.package_name: | |
177 | return os.path.join(self.root_path, '__init__.py') |
|
186 | return os.path.join(self.root_path, '__init__.py') | |
178 | path = uri.replace('.', os.path.sep) |
|
187 | path = uri.replace('.', os.path.sep) | |
179 | path = path.replace(self.package_name + os.path.sep, '') |
|
188 | path = path.replace(self.package_name + os.path.sep, '') | |
180 | path = os.path.join(self.root_path, path) |
|
189 | path = os.path.join(self.root_path, path) | |
181 | # XXX maybe check for extensions as well? |
|
190 | # XXX maybe check for extensions as well? | |
182 | if os.path.exists(path + '.py'): # file |
|
191 | if os.path.exists(path + '.py'): # file | |
183 | path += '.py' |
|
192 | path += '.py' | |
184 | elif os.path.exists(os.path.join(path, '__init__.py')): |
|
193 | elif os.path.exists(os.path.join(path, '__init__.py')): | |
185 | path = os.path.join(path, '__init__.py') |
|
194 | path = os.path.join(path, '__init__.py') | |
186 | else: |
|
195 | else: | |
187 | return None |
|
196 | return None | |
188 | return path |
|
197 | return path | |
189 |
|
198 | |||
190 | def _path2uri(self, dirpath): |
|
199 | def _path2uri(self, dirpath): | |
191 | ''' Convert directory path to uri ''' |
|
200 | ''' Convert directory path to uri ''' | |
192 | relpath = dirpath.replace(self.root_path, self.package_name) |
|
201 | relpath = dirpath.replace(self.root_path, self.package_name) | |
193 | if relpath.startswith(os.path.sep): |
|
202 | if relpath.startswith(os.path.sep): | |
194 | relpath = relpath[1:] |
|
203 | relpath = relpath[1:] | |
195 | return relpath.replace(os.path.sep, '.') |
|
204 | return relpath.replace(os.path.sep, '.') | |
196 |
|
205 | |||
197 | def _parse_module(self, uri): |
|
206 | def _parse_module(self, uri): | |
198 | ''' Parse module defined in *uri* ''' |
|
207 | ''' Parse module defined in *uri* ''' | |
199 | filename = self._uri2path(uri) |
|
208 | filename = self._uri2path(uri) | |
200 | if filename is None: |
|
209 | if filename is None: | |
201 | # nothing that we could handle here. |
|
210 | # nothing that we could handle here. | |
202 | return ([],[]) |
|
211 | return ([],[]) | |
203 | with open(filename, 'rb') as f: |
|
212 | with open(filename, 'rb') as f: | |
204 | mod = ast.parse(f.read()) |
|
213 | mod = ast.parse(f.read()) | |
205 | return FuncClsScanner().scan(mod) |
|
214 | return FuncClsScanner().scan(mod) | |
206 |
|
215 | |||
|
216 | def _import_funcs_classes(self, uri): | |||
|
217 | """Import * from uri, and separate out functions and classes.""" | |||
|
218 | ns = {} | |||
|
219 | exec('from %s import *' % uri, ns) | |||
|
220 | funcs, classes = [], [] | |||
|
221 | for name, obj in ns.items(): | |||
|
222 | if inspect.isclass(obj): | |||
|
223 | cls = Obj(name=name, has_init='__init__' in obj.__dict__) | |||
|
224 | classes.append(cls) | |||
|
225 | elif inspect.isfunction(obj): | |||
|
226 | funcs.append(name) | |||
|
227 | ||||
|
228 | return sorted(funcs), sorted(classes, key=lambda x: x.name) | |||
|
229 | ||||
|
230 | def find_funcs_classes(self, uri): | |||
|
231 | """Find the functions and classes defined in the module ``uri``""" | |||
|
232 | if uri in self.names_from__all__: | |||
|
233 | # For API modules which expose things defined elsewhere, import them | |||
|
234 | return self._import_funcs_classes(uri) | |||
|
235 | else: | |||
|
236 | # For other modules, scan their AST to see what they define | |||
|
237 | return self._parse_module(uri) | |||
|
238 | ||||
207 | def generate_api_doc(self, uri): |
|
239 | def generate_api_doc(self, uri): | |
208 | '''Make autodoc documentation template string for a module |
|
240 | '''Make autodoc documentation template string for a module | |
209 |
|
241 | |||
210 | Parameters |
|
242 | Parameters | |
211 | ---------- |
|
243 | ---------- | |
212 | uri : string |
|
244 | uri : string | |
213 | python location of module - e.g 'sphinx.builder' |
|
245 | python location of module - e.g 'sphinx.builder' | |
214 |
|
246 | |||
215 | Returns |
|
247 | Returns | |
216 | ------- |
|
248 | ------- | |
217 | S : string |
|
249 | S : string | |
218 | Contents of API doc |
|
250 | Contents of API doc | |
219 | ''' |
|
251 | ''' | |
220 | # get the names of all classes and functions |
|
252 | # get the names of all classes and functions | |
221 |
functions, classes = self. |
|
253 | functions, classes = self.find_funcs_classes(uri) | |
222 | if not len(functions) and not len(classes): |
|
254 | if not len(functions) and not len(classes): | |
223 | #print ('WARNING: Empty -', uri) # dbg |
|
255 | #print ('WARNING: Empty -', uri) # dbg | |
224 | return '' |
|
256 | return '' | |
225 |
|
257 | |||
226 | # Make a shorter version of the uri that omits the package name for |
|
258 | # Make a shorter version of the uri that omits the package name for | |
227 | # titles |
|
259 | # titles | |
228 | uri_short = re.sub(r'^%s\.' % self.package_name,'',uri) |
|
260 | uri_short = re.sub(r'^%s\.' % self.package_name,'',uri) | |
229 |
|
261 | |||
230 | ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n' |
|
262 | ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n' | |
231 |
|
263 | |||
232 | # Set the chapter title to read 'Module:' for all modules except for the |
|
264 | # Set the chapter title to read 'Module:' for all modules except for the | |
233 | # main packages |
|
265 | # main packages | |
234 | if '.' in uri: |
|
266 | if '.' in uri: | |
235 | chap_title = 'Module: :mod:`' + uri_short + '`' |
|
267 | chap_title = 'Module: :mod:`' + uri_short + '`' | |
236 | else: |
|
268 | else: | |
237 | chap_title = ':mod:`' + uri_short + '`' |
|
269 | chap_title = ':mod:`' + uri_short + '`' | |
238 | ad += chap_title + '\n' + self.rst_section_levels[1] * len(chap_title) |
|
270 | ad += chap_title + '\n' + self.rst_section_levels[1] * len(chap_title) | |
239 |
|
271 | |||
240 | ad += '\n.. automodule:: ' + uri + '\n' |
|
272 | ad += '\n.. automodule:: ' + uri + '\n' | |
241 | ad += '\n.. currentmodule:: ' + uri + '\n' |
|
273 | ad += '\n.. currentmodule:: ' + uri + '\n' | |
242 |
|
274 | |||
243 | if classes: |
|
275 | if classes: | |
244 | subhead = str(len(classes)) + (' Classes' if len(classes) > 1 else ' Class') |
|
276 | subhead = str(len(classes)) + (' Classes' if len(classes) > 1 else ' Class') | |
245 | ad += '\n'+ subhead + '\n' + \ |
|
277 | ad += '\n'+ subhead + '\n' + \ | |
246 | self.rst_section_levels[2] * len(subhead) + '\n' |
|
278 | self.rst_section_levels[2] * len(subhead) + '\n' | |
247 |
|
279 | |||
248 | for c in classes: |
|
280 | for c in classes: | |
249 | ad += '\n.. autoclass:: ' + c.name + '\n' |
|
281 | ad += '\n.. autoclass:: ' + c.name + '\n' | |
250 | # must NOT exclude from index to keep cross-refs working |
|
282 | # must NOT exclude from index to keep cross-refs working | |
251 | ad += ' :members:\n' \ |
|
283 | ad += ' :members:\n' \ | |
252 | ' :show-inheritance:\n' |
|
284 | ' :show-inheritance:\n' | |
253 | if c.has_init: |
|
285 | if c.has_init: | |
254 | ad += '\n .. automethod:: __init__\n' |
|
286 | ad += '\n .. automethod:: __init__\n' | |
255 |
|
287 | |||
256 | if functions: |
|
288 | if functions: | |
257 | subhead = str(len(functions)) + (' Functions' if len(functions) > 1 else ' Function') |
|
289 | subhead = str(len(functions)) + (' Functions' if len(functions) > 1 else ' Function') | |
258 | ad += '\n'+ subhead + '\n' + \ |
|
290 | ad += '\n'+ subhead + '\n' + \ | |
259 | self.rst_section_levels[2] * len(subhead) + '\n' |
|
291 | self.rst_section_levels[2] * len(subhead) + '\n' | |
260 | for f in functions: |
|
292 | for f in functions: | |
261 | # must NOT exclude from index to keep cross-refs working |
|
293 | # must NOT exclude from index to keep cross-refs working | |
262 | ad += '\n.. autofunction:: ' + uri + '.' + f + '\n\n' |
|
294 | ad += '\n.. autofunction:: ' + uri + '.' + f + '\n\n' | |
263 | return ad |
|
295 | return ad | |
264 |
|
296 | |||
265 | def _survives_exclude(self, matchstr, match_type): |
|
297 | def _survives_exclude(self, matchstr, match_type): | |
266 | ''' Returns True if *matchstr* does not match patterns |
|
298 | ''' Returns True if *matchstr* does not match patterns | |
267 |
|
299 | |||
268 | ``self.package_name`` removed from front of string if present |
|
300 | ``self.package_name`` removed from front of string if present | |
269 |
|
301 | |||
270 | Examples |
|
302 | Examples | |
271 | -------- |
|
303 | -------- | |
272 | >>> dw = ApiDocWriter('sphinx') |
|
304 | >>> dw = ApiDocWriter('sphinx') | |
273 | >>> dw._survives_exclude('sphinx.okpkg', 'package') |
|
305 | >>> dw._survives_exclude('sphinx.okpkg', 'package') | |
274 | True |
|
306 | True | |
275 | >>> dw.package_skip_patterns.append('^\\.badpkg$') |
|
307 | >>> dw.package_skip_patterns.append('^\\.badpkg$') | |
276 | >>> dw._survives_exclude('sphinx.badpkg', 'package') |
|
308 | >>> dw._survives_exclude('sphinx.badpkg', 'package') | |
277 | False |
|
309 | False | |
278 | >>> dw._survives_exclude('sphinx.badpkg', 'module') |
|
310 | >>> dw._survives_exclude('sphinx.badpkg', 'module') | |
279 | True |
|
311 | True | |
280 | >>> dw._survives_exclude('sphinx.badmod', 'module') |
|
312 | >>> dw._survives_exclude('sphinx.badmod', 'module') | |
281 | True |
|
313 | True | |
282 | >>> dw.module_skip_patterns.append('^\\.badmod$') |
|
314 | >>> dw.module_skip_patterns.append('^\\.badmod$') | |
283 | >>> dw._survives_exclude('sphinx.badmod', 'module') |
|
315 | >>> dw._survives_exclude('sphinx.badmod', 'module') | |
284 | False |
|
316 | False | |
285 | ''' |
|
317 | ''' | |
286 | if match_type == 'module': |
|
318 | if match_type == 'module': | |
287 | patterns = self.module_skip_patterns |
|
319 | patterns = self.module_skip_patterns | |
288 | elif match_type == 'package': |
|
320 | elif match_type == 'package': | |
289 | patterns = self.package_skip_patterns |
|
321 | patterns = self.package_skip_patterns | |
290 | else: |
|
322 | else: | |
291 | raise ValueError('Cannot interpret match type "%s"' |
|
323 | raise ValueError('Cannot interpret match type "%s"' | |
292 | % match_type) |
|
324 | % match_type) | |
293 | # Match to URI without package name |
|
325 | # Match to URI without package name | |
294 | L = len(self.package_name) |
|
326 | L = len(self.package_name) | |
295 | if matchstr[:L] == self.package_name: |
|
327 | if matchstr[:L] == self.package_name: | |
296 | matchstr = matchstr[L:] |
|
328 | matchstr = matchstr[L:] | |
297 | for pat in patterns: |
|
329 | for pat in patterns: | |
298 | try: |
|
330 | try: | |
299 | pat.search |
|
331 | pat.search | |
300 | except AttributeError: |
|
332 | except AttributeError: | |
301 | pat = re.compile(pat) |
|
333 | pat = re.compile(pat) | |
302 | if pat.search(matchstr): |
|
334 | if pat.search(matchstr): | |
303 | return False |
|
335 | return False | |
304 | return True |
|
336 | return True | |
305 |
|
337 | |||
306 | def discover_modules(self): |
|
338 | def discover_modules(self): | |
307 | ''' Return module sequence discovered from ``self.package_name`` |
|
339 | ''' Return module sequence discovered from ``self.package_name`` | |
308 |
|
340 | |||
309 |
|
341 | |||
310 | Parameters |
|
342 | Parameters | |
311 | ---------- |
|
343 | ---------- | |
312 | None |
|
344 | None | |
313 |
|
345 | |||
314 | Returns |
|
346 | Returns | |
315 | ------- |
|
347 | ------- | |
316 | mods : sequence |
|
348 | mods : sequence | |
317 | Sequence of module names within ``self.package_name`` |
|
349 | Sequence of module names within ``self.package_name`` | |
318 |
|
350 | |||
319 | Examples |
|
351 | Examples | |
320 | -------- |
|
352 | -------- | |
321 | >>> dw = ApiDocWriter('sphinx') |
|
353 | >>> dw = ApiDocWriter('sphinx') | |
322 | >>> mods = dw.discover_modules() |
|
354 | >>> mods = dw.discover_modules() | |
323 | >>> 'sphinx.util' in mods |
|
355 | >>> 'sphinx.util' in mods | |
324 | True |
|
356 | True | |
325 | >>> dw.package_skip_patterns.append('\.util$') |
|
357 | >>> dw.package_skip_patterns.append('\.util$') | |
326 | >>> 'sphinx.util' in dw.discover_modules() |
|
358 | >>> 'sphinx.util' in dw.discover_modules() | |
327 | False |
|
359 | False | |
328 | >>> |
|
360 | >>> | |
329 | ''' |
|
361 | ''' | |
330 | modules = [self.package_name] |
|
362 | modules = [self.package_name] | |
331 | # raw directory parsing |
|
363 | # raw directory parsing | |
332 | for dirpath, dirnames, filenames in os.walk(self.root_path): |
|
364 | for dirpath, dirnames, filenames in os.walk(self.root_path): | |
333 | # Check directory names for packages |
|
365 | # Check directory names for packages | |
334 | root_uri = self._path2uri(os.path.join(self.root_path, |
|
366 | root_uri = self._path2uri(os.path.join(self.root_path, | |
335 | dirpath)) |
|
367 | dirpath)) | |
336 | for dirname in dirnames[:]: # copy list - we modify inplace |
|
368 | for dirname in dirnames[:]: # copy list - we modify inplace | |
337 | package_uri = '.'.join((root_uri, dirname)) |
|
369 | package_uri = '.'.join((root_uri, dirname)) | |
338 | if (self._uri2path(package_uri) and |
|
370 | if (self._uri2path(package_uri) and | |
339 | self._survives_exclude(package_uri, 'package')): |
|
371 | self._survives_exclude(package_uri, 'package')): | |
340 | modules.append(package_uri) |
|
372 | modules.append(package_uri) | |
341 | else: |
|
373 | else: | |
342 | dirnames.remove(dirname) |
|
374 | dirnames.remove(dirname) | |
343 | # Check filenames for modules |
|
375 | # Check filenames for modules | |
344 | for filename in filenames: |
|
376 | for filename in filenames: | |
345 | module_name = filename[:-3] |
|
377 | module_name = filename[:-3] | |
346 | module_uri = '.'.join((root_uri, module_name)) |
|
378 | module_uri = '.'.join((root_uri, module_name)) | |
347 | if (self._uri2path(module_uri) and |
|
379 | if (self._uri2path(module_uri) and | |
348 | self._survives_exclude(module_uri, 'module')): |
|
380 | self._survives_exclude(module_uri, 'module')): | |
349 | modules.append(module_uri) |
|
381 | modules.append(module_uri) | |
350 | return sorted(modules) |
|
382 | return sorted(modules) | |
351 |
|
383 | |||
352 | def write_modules_api(self, modules,outdir): |
|
384 | def write_modules_api(self, modules,outdir): | |
353 | # write the list |
|
385 | # write the list | |
354 | written_modules = [] |
|
386 | written_modules = [] | |
355 | for m in modules: |
|
387 | for m in modules: | |
356 | api_str = self.generate_api_doc(m) |
|
388 | api_str = self.generate_api_doc(m) | |
357 | if not api_str: |
|
389 | if not api_str: | |
358 | continue |
|
390 | continue | |
359 | # write out to file |
|
391 | # write out to file | |
360 | outfile = os.path.join(outdir, |
|
392 | outfile = os.path.join(outdir, | |
361 | m + self.rst_extension) |
|
393 | m + self.rst_extension) | |
362 | fileobj = open(outfile, 'wt') |
|
394 | fileobj = open(outfile, 'wt') | |
363 | fileobj.write(api_str) |
|
395 | fileobj.write(api_str) | |
364 | fileobj.close() |
|
396 | fileobj.close() | |
365 | written_modules.append(m) |
|
397 | written_modules.append(m) | |
366 | self.written_modules = written_modules |
|
398 | self.written_modules = written_modules | |
367 |
|
399 | |||
368 | def write_api_docs(self, outdir): |
|
400 | def write_api_docs(self, outdir): | |
369 | """Generate API reST files. |
|
401 | """Generate API reST files. | |
370 |
|
402 | |||
371 | Parameters |
|
403 | Parameters | |
372 | ---------- |
|
404 | ---------- | |
373 | outdir : string |
|
405 | outdir : string | |
374 | Directory name in which to store files |
|
406 | Directory name in which to store files | |
375 | We create automatic filenames for each module |
|
407 | We create automatic filenames for each module | |
376 |
|
408 | |||
377 | Returns |
|
409 | Returns | |
378 | ------- |
|
410 | ------- | |
379 | None |
|
411 | None | |
380 |
|
412 | |||
381 | Notes |
|
413 | Notes | |
382 | ----- |
|
414 | ----- | |
383 | Sets self.written_modules to list of written modules |
|
415 | Sets self.written_modules to list of written modules | |
384 | """ |
|
416 | """ | |
385 | if not os.path.exists(outdir): |
|
417 | if not os.path.exists(outdir): | |
386 | os.mkdir(outdir) |
|
418 | os.mkdir(outdir) | |
387 | # compose list of modules |
|
419 | # compose list of modules | |
388 | modules = self.discover_modules() |
|
420 | modules = self.discover_modules() | |
389 | self.write_modules_api(modules,outdir) |
|
421 | self.write_modules_api(modules,outdir) | |
390 |
|
422 | |||
391 | def write_index(self, outdir, path='gen.rst', relative_to=None): |
|
423 | def write_index(self, outdir, path='gen.rst', relative_to=None): | |
392 | """Make a reST API index file from written files |
|
424 | """Make a reST API index file from written files | |
393 |
|
425 | |||
394 | Parameters |
|
426 | Parameters | |
395 | ---------- |
|
427 | ---------- | |
396 | outdir : string |
|
428 | outdir : string | |
397 | Directory to which to write generated index file |
|
429 | Directory to which to write generated index file | |
398 | path : string |
|
430 | path : string | |
399 | Filename to write index to |
|
431 | Filename to write index to | |
400 | relative_to : string |
|
432 | relative_to : string | |
401 | path to which written filenames are relative. This |
|
433 | path to which written filenames are relative. This | |
402 | component of the written file path will be removed from |
|
434 | component of the written file path will be removed from | |
403 | outdir, in the generated index. Default is None, meaning, |
|
435 | outdir, in the generated index. Default is None, meaning, | |
404 | leave path as it is. |
|
436 | leave path as it is. | |
405 | """ |
|
437 | """ | |
406 | if self.written_modules is None: |
|
438 | if self.written_modules is None: | |
407 | raise ValueError('No modules written') |
|
439 | raise ValueError('No modules written') | |
408 | # Get full filename path |
|
440 | # Get full filename path | |
409 | path = os.path.join(outdir, path) |
|
441 | path = os.path.join(outdir, path) | |
410 | # Path written into index is relative to rootpath |
|
442 | # Path written into index is relative to rootpath | |
411 | if relative_to is not None: |
|
443 | if relative_to is not None: | |
412 | relpath = outdir.replace(relative_to + os.path.sep, '') |
|
444 | relpath = outdir.replace(relative_to + os.path.sep, '') | |
413 | else: |
|
445 | else: | |
414 | relpath = outdir |
|
446 | relpath = outdir | |
415 | idx = open(path,'wt') |
|
447 | idx = open(path,'wt') | |
416 | w = idx.write |
|
448 | w = idx.write | |
417 | w('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') |
|
449 | w('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') | |
418 | w('.. autosummary::\n' |
|
450 | w('.. autosummary::\n' | |
419 | ' :toctree: %s\n\n' % relpath) |
|
451 | ' :toctree: %s\n\n' % relpath) | |
420 | for mod in self.written_modules: |
|
452 | for mod in self.written_modules: | |
421 | w(' %s\n' % mod) |
|
453 | w(' %s\n' % mod) | |
422 | idx.close() |
|
454 | idx.close() |
General Comments 0
You need to be logged in to leave comments.
Login now