Show More
@@ -1,177 +1,177 b'' | |||||
1 | """Module containing single call export functions.""" |
|
1 | """Module containing single call export functions.""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | from functools import wraps |
|
6 | from functools import wraps | |
7 |
|
7 | |||
8 | from IPython.nbformat import NotebookNode |
|
8 | from IPython.nbformat import NotebookNode | |
9 | from IPython.utils.decorators import undoc |
|
9 | from IPython.utils.decorators import undoc | |
10 | from IPython.utils.py3compat import string_types |
|
10 | from IPython.utils.py3compat import string_types | |
11 |
|
11 | |||
12 | from .exporter import Exporter |
|
12 | from .exporter import Exporter | |
13 | from .templateexporter import TemplateExporter |
|
13 | from .templateexporter import TemplateExporter | |
14 | from .html import HTMLExporter |
|
14 | from .html import HTMLExporter | |
15 | from .slides import SlidesExporter |
|
15 | from .slides import SlidesExporter | |
16 | from .latex import LatexExporter |
|
16 | from .latex import LatexExporter | |
17 | from .pdf import PDFExporter |
|
17 | from .pdf import PDFExporter | |
18 | from .markdown import MarkdownExporter |
|
18 | from .markdown import MarkdownExporter | |
19 | from .python import PythonExporter |
|
19 | from .python import PythonExporter | |
20 | from .rst import RSTExporter |
|
20 | from .rst import RSTExporter | |
21 | from .notebook import NotebookExporter |
|
21 | from .notebook import NotebookExporter | |
22 | from .script import ScriptExporter |
|
22 | from .script import ScriptExporter | |
23 |
|
23 | |||
24 | #----------------------------------------------------------------------------- |
|
24 | #----------------------------------------------------------------------------- | |
25 | # Classes |
|
25 | # Classes | |
26 | #----------------------------------------------------------------------------- |
|
26 | #----------------------------------------------------------------------------- | |
27 |
|
27 | |||
28 | @undoc |
|
28 | @undoc | |
29 | def DocDecorator(f): |
|
29 | def DocDecorator(f): | |
30 |
|
30 | |||
31 | #Set docstring of function |
|
31 | #Set docstring of function | |
32 | f.__doc__ = f.__doc__ + """ |
|
32 | f.__doc__ = f.__doc__ + """ | |
33 | nb : :class:`~IPython.nbformat.NotebookNode` |
|
33 | nb : :class:`~IPython.nbformat.NotebookNode` | |
34 | The notebook to export. |
|
34 | The notebook to export. | |
35 | config : config (optional, keyword arg) |
|
35 | config : config (optional, keyword arg) | |
36 | User configuration instance. |
|
36 | User configuration instance. | |
37 | resources : dict (optional, keyword arg) |
|
37 | resources : dict (optional, keyword arg) | |
38 | Resources used in the conversion process. |
|
38 | Resources used in the conversion process. | |
39 |
|
39 | |||
40 | Returns |
|
40 | Returns | |
41 | ------- |
|
41 | ------- | |
42 | tuple- output, resources, exporter_instance |
|
42 | tuple- output, resources, exporter_instance | |
43 | output : str |
|
43 | output : str | |
44 | Jinja 2 output. This is the resulting converted notebook. |
|
44 | Jinja 2 output. This is the resulting converted notebook. | |
45 | resources : dictionary |
|
45 | resources : dictionary | |
46 | Dictionary of resources used prior to and during the conversion |
|
46 | Dictionary of resources used prior to and during the conversion | |
47 | process. |
|
47 | process. | |
48 | exporter_instance : Exporter |
|
48 | exporter_instance : Exporter | |
49 | Instance of the Exporter class used to export the document. Useful |
|
49 | Instance of the Exporter class used to export the document. Useful | |
50 | to caller because it provides a 'file_extension' property which |
|
50 | to caller because it provides a 'file_extension' property which | |
51 | specifies what extension the output should be saved as. |
|
51 | specifies what extension the output should be saved as. | |
52 |
|
52 | |||
53 | Notes |
|
53 | Notes | |
54 | ----- |
|
54 | ----- | |
55 | WARNING: API WILL CHANGE IN FUTURE RELEASES OF NBCONVERT |
|
55 | WARNING: API WILL CHANGE IN FUTURE RELEASES OF NBCONVERT | |
56 | """ |
|
56 | """ | |
57 |
|
57 | |||
58 | @wraps(f) |
|
58 | @wraps(f) | |
59 | def decorator(*args, **kwargs): |
|
59 | def decorator(*args, **kwargs): | |
60 | return f(*args, **kwargs) |
|
60 | return f(*args, **kwargs) | |
61 |
|
61 | |||
62 | return decorator |
|
62 | return decorator | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | #----------------------------------------------------------------------------- |
|
65 | #----------------------------------------------------------------------------- | |
66 | # Functions |
|
66 | # Functions | |
67 | #----------------------------------------------------------------------------- |
|
67 | #----------------------------------------------------------------------------- | |
68 |
|
68 | |||
69 | __all__ = [ |
|
69 | __all__ = [ | |
70 | 'export', |
|
70 | 'export', | |
71 | 'export_html', |
|
71 | 'export_html', | |
72 | 'export_custom', |
|
72 | 'export_custom', | |
73 | 'export_slides', |
|
73 | 'export_slides', | |
74 | 'export_latex', |
|
74 | 'export_latex', | |
75 | 'export_pdf', |
|
75 | 'export_pdf', | |
76 | 'export_markdown', |
|
76 | 'export_markdown', | |
77 | 'export_python', |
|
77 | 'export_python', | |
78 | 'export_script', |
|
78 | 'export_script', | |
79 | 'export_rst', |
|
79 | 'export_rst', | |
80 | 'export_by_name', |
|
80 | 'export_by_name', | |
81 | 'get_export_names', |
|
81 | 'get_export_names', | |
82 | 'ExporterNameError' |
|
82 | 'ExporterNameError' | |
83 | ] |
|
83 | ] | |
84 |
|
84 | |||
85 |
|
85 | |||
86 | class ExporterNameError(NameError): |
|
86 | class ExporterNameError(NameError): | |
87 | pass |
|
87 | pass | |
88 |
|
88 | |||
89 | @DocDecorator |
|
89 | @DocDecorator | |
90 | def export(exporter, nb, **kw): |
|
90 | def export(exporter, nb, **kw): | |
91 | """ |
|
91 | """ | |
92 | Export a notebook object using specific exporter class. |
|
92 | Export a notebook object using specific exporter class. | |
93 |
|
93 | |||
94 | Parameters |
|
94 | Parameters | |
95 | ---------- |
|
95 | ---------- | |
96 |
exporter : class:`~ |
|
96 | exporter : class:`~jupyter_nbconvert.exporters.exporter.Exporter` class or instance | |
97 | Class type or instance of the exporter that should be used. If the |
|
97 | Class type or instance of the exporter that should be used. If the | |
98 | method initializes it's own instance of the class, it is ASSUMED that |
|
98 | method initializes it's own instance of the class, it is ASSUMED that | |
99 | the class type provided exposes a constructor (``__init__``) with the same |
|
99 | the class type provided exposes a constructor (``__init__``) with the same | |
100 | signature as the base Exporter class. |
|
100 | signature as the base Exporter class. | |
101 | """ |
|
101 | """ | |
102 |
|
102 | |||
103 | #Check arguments |
|
103 | #Check arguments | |
104 | if exporter is None: |
|
104 | if exporter is None: | |
105 | raise TypeError("Exporter is None") |
|
105 | raise TypeError("Exporter is None") | |
106 | elif not isinstance(exporter, Exporter) and not issubclass(exporter, Exporter): |
|
106 | elif not isinstance(exporter, Exporter) and not issubclass(exporter, Exporter): | |
107 | raise TypeError("exporter does not inherit from Exporter (base)") |
|
107 | raise TypeError("exporter does not inherit from Exporter (base)") | |
108 | if nb is None: |
|
108 | if nb is None: | |
109 | raise TypeError("nb is None") |
|
109 | raise TypeError("nb is None") | |
110 |
|
110 | |||
111 | #Create the exporter |
|
111 | #Create the exporter | |
112 | resources = kw.pop('resources', None) |
|
112 | resources = kw.pop('resources', None) | |
113 | if isinstance(exporter, Exporter): |
|
113 | if isinstance(exporter, Exporter): | |
114 | exporter_instance = exporter |
|
114 | exporter_instance = exporter | |
115 | else: |
|
115 | else: | |
116 | exporter_instance = exporter(**kw) |
|
116 | exporter_instance = exporter(**kw) | |
117 |
|
117 | |||
118 | #Try to convert the notebook using the appropriate conversion function. |
|
118 | #Try to convert the notebook using the appropriate conversion function. | |
119 | if isinstance(nb, NotebookNode): |
|
119 | if isinstance(nb, NotebookNode): | |
120 | output, resources = exporter_instance.from_notebook_node(nb, resources) |
|
120 | output, resources = exporter_instance.from_notebook_node(nb, resources) | |
121 | elif isinstance(nb, string_types): |
|
121 | elif isinstance(nb, string_types): | |
122 | output, resources = exporter_instance.from_filename(nb, resources) |
|
122 | output, resources = exporter_instance.from_filename(nb, resources) | |
123 | else: |
|
123 | else: | |
124 | output, resources = exporter_instance.from_file(nb, resources) |
|
124 | output, resources = exporter_instance.from_file(nb, resources) | |
125 | return output, resources |
|
125 | return output, resources | |
126 |
|
126 | |||
127 | exporter_map = dict( |
|
127 | exporter_map = dict( | |
128 | custom=TemplateExporter, |
|
128 | custom=TemplateExporter, | |
129 | html=HTMLExporter, |
|
129 | html=HTMLExporter, | |
130 | slides=SlidesExporter, |
|
130 | slides=SlidesExporter, | |
131 | latex=LatexExporter, |
|
131 | latex=LatexExporter, | |
132 | pdf=PDFExporter, |
|
132 | pdf=PDFExporter, | |
133 | markdown=MarkdownExporter, |
|
133 | markdown=MarkdownExporter, | |
134 | python=PythonExporter, |
|
134 | python=PythonExporter, | |
135 | rst=RSTExporter, |
|
135 | rst=RSTExporter, | |
136 | notebook=NotebookExporter, |
|
136 | notebook=NotebookExporter, | |
137 | script=ScriptExporter, |
|
137 | script=ScriptExporter, | |
138 | ) |
|
138 | ) | |
139 |
|
139 | |||
140 | def _make_exporter(name, E): |
|
140 | def _make_exporter(name, E): | |
141 | """make an export_foo function from a short key and Exporter class E""" |
|
141 | """make an export_foo function from a short key and Exporter class E""" | |
142 | def _export(nb, **kw): |
|
142 | def _export(nb, **kw): | |
143 | return export(E, nb, **kw) |
|
143 | return export(E, nb, **kw) | |
144 | _export.__doc__ = """Export a notebook object to {0} format""".format(name) |
|
144 | _export.__doc__ = """Export a notebook object to {0} format""".format(name) | |
145 | return _export |
|
145 | return _export | |
146 |
|
146 | |||
147 | g = globals() |
|
147 | g = globals() | |
148 |
|
148 | |||
149 | for name, E in exporter_map.items(): |
|
149 | for name, E in exporter_map.items(): | |
150 | g['export_%s' % name] = DocDecorator(_make_exporter(name, E)) |
|
150 | g['export_%s' % name] = DocDecorator(_make_exporter(name, E)) | |
151 |
|
151 | |||
152 | @DocDecorator |
|
152 | @DocDecorator | |
153 | def export_by_name(format_name, nb, **kw): |
|
153 | def export_by_name(format_name, nb, **kw): | |
154 | """ |
|
154 | """ | |
155 | Export a notebook object to a template type by its name. Reflection |
|
155 | Export a notebook object to a template type by its name. Reflection | |
156 | (Inspect) is used to find the template's corresponding explicit export |
|
156 | (Inspect) is used to find the template's corresponding explicit export | |
157 | method defined in this module. That method is then called directly. |
|
157 | method defined in this module. That method is then called directly. | |
158 |
|
158 | |||
159 | Parameters |
|
159 | Parameters | |
160 | ---------- |
|
160 | ---------- | |
161 | format_name : str |
|
161 | format_name : str | |
162 | Name of the template style to export to. |
|
162 | Name of the template style to export to. | |
163 | """ |
|
163 | """ | |
164 |
|
164 | |||
165 | function_name = "export_" + format_name.lower() |
|
165 | function_name = "export_" + format_name.lower() | |
166 |
|
166 | |||
167 | if function_name in globals(): |
|
167 | if function_name in globals(): | |
168 | return globals()[function_name](nb, **kw) |
|
168 | return globals()[function_name](nb, **kw) | |
169 | else: |
|
169 | else: | |
170 | raise ExporterNameError("template for `%s` not found" % function_name) |
|
170 | raise ExporterNameError("template for `%s` not found" % function_name) | |
171 |
|
171 | |||
172 |
|
172 | |||
173 | def get_export_names(): |
|
173 | def get_export_names(): | |
174 | """Return a list of the currently supported export targets |
|
174 | """Return a list of the currently supported export targets | |
175 |
|
175 | |||
176 | WARNING: API WILL CHANGE IN FUTURE RELEASES OF NBCONVERT""" |
|
176 | WARNING: API WILL CHANGE IN FUTURE RELEASES OF NBCONVERT""" | |
177 | return sorted(exporter_map.keys()) |
|
177 | return sorted(exporter_map.keys()) |
@@ -1,280 +1,280 b'' | |||||
1 | """This module defines a base Exporter class. For Jinja template-based export, |
|
1 | """This module defines a base Exporter class. For Jinja template-based export, | |
2 | see templateexporter.py. |
|
2 | see templateexporter.py. | |
3 | """ |
|
3 | """ | |
4 |
|
4 | |||
5 |
|
5 | |||
6 | from __future__ import print_function, absolute_import |
|
6 | from __future__ import print_function, absolute_import | |
7 |
|
7 | |||
8 | import io |
|
8 | import io | |
9 | import os |
|
9 | import os | |
10 | import copy |
|
10 | import copy | |
11 | import collections |
|
11 | import collections | |
12 | import datetime |
|
12 | import datetime | |
13 |
|
13 | |||
14 | from IPython.config.configurable import LoggingConfigurable |
|
14 | from IPython.config.configurable import LoggingConfigurable | |
15 | from IPython.config import Config |
|
15 | from IPython.config import Config | |
16 | from IPython import nbformat |
|
16 | from IPython import nbformat | |
17 | from IPython.utils.traitlets import MetaHasTraits, Unicode, List, TraitError |
|
17 | from IPython.utils.traitlets import MetaHasTraits, Unicode, List, TraitError | |
18 | from IPython.utils.importstring import import_item |
|
18 | from IPython.utils.importstring import import_item | |
19 | from IPython.utils import text, py3compat |
|
19 | from IPython.utils import text, py3compat | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | class ResourcesDict(collections.defaultdict): |
|
22 | class ResourcesDict(collections.defaultdict): | |
23 | def __missing__(self, key): |
|
23 | def __missing__(self, key): | |
24 | return '' |
|
24 | return '' | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | class FilenameExtension(Unicode): |
|
27 | class FilenameExtension(Unicode): | |
28 | """A trait for filename extensions.""" |
|
28 | """A trait for filename extensions.""" | |
29 |
|
29 | |||
30 | default_value = u'' |
|
30 | default_value = u'' | |
31 | info_text = 'a filename extension, beginning with a dot' |
|
31 | info_text = 'a filename extension, beginning with a dot' | |
32 |
|
32 | |||
33 | def validate(self, obj, value): |
|
33 | def validate(self, obj, value): | |
34 | # cast to proper unicode |
|
34 | # cast to proper unicode | |
35 | value = super(FilenameExtension, self).validate(obj, value) |
|
35 | value = super(FilenameExtension, self).validate(obj, value) | |
36 |
|
36 | |||
37 | # check that it starts with a dot |
|
37 | # check that it starts with a dot | |
38 | if value and not value.startswith('.'): |
|
38 | if value and not value.startswith('.'): | |
39 | msg = "FileExtension trait '{}' does not begin with a dot: {!r}" |
|
39 | msg = "FileExtension trait '{}' does not begin with a dot: {!r}" | |
40 | raise TraitError(msg.format(self.name, value)) |
|
40 | raise TraitError(msg.format(self.name, value)) | |
41 |
|
41 | |||
42 | return value |
|
42 | return value | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | class Exporter(LoggingConfigurable): |
|
45 | class Exporter(LoggingConfigurable): | |
46 | """ |
|
46 | """ | |
47 | Class containing methods that sequentially run a list of preprocessors on a |
|
47 | Class containing methods that sequentially run a list of preprocessors on a | |
48 | NotebookNode object and then return the modified NotebookNode object and |
|
48 | NotebookNode object and then return the modified NotebookNode object and | |
49 | accompanying resources dict. |
|
49 | accompanying resources dict. | |
50 | """ |
|
50 | """ | |
51 |
|
51 | |||
52 | file_extension = FilenameExtension( |
|
52 | file_extension = FilenameExtension( | |
53 | '.txt', config=True, |
|
53 | '.txt', config=True, | |
54 | help="Extension of the file that should be written to disk" |
|
54 | help="Extension of the file that should be written to disk" | |
55 | ) |
|
55 | ) | |
56 |
|
56 | |||
57 | # MIME type of the result file, for HTTP response headers. |
|
57 | # MIME type of the result file, for HTTP response headers. | |
58 | # This is *not* a traitlet, because we want to be able to access it from |
|
58 | # This is *not* a traitlet, because we want to be able to access it from | |
59 | # the class, not just on instances. |
|
59 | # the class, not just on instances. | |
60 | output_mimetype = '' |
|
60 | output_mimetype = '' | |
61 |
|
61 | |||
62 | #Configurability, allows the user to easily add filters and preprocessors. |
|
62 | #Configurability, allows the user to easily add filters and preprocessors. | |
63 | preprocessors = List(config=True, |
|
63 | preprocessors = List(config=True, | |
64 | help="""List of preprocessors, by name or namespace, to enable.""") |
|
64 | help="""List of preprocessors, by name or namespace, to enable.""") | |
65 |
|
65 | |||
66 | _preprocessors = List() |
|
66 | _preprocessors = List() | |
67 |
|
67 | |||
68 |
default_preprocessors = List([' |
|
68 | default_preprocessors = List(['jupyter_nbconvert.preprocessors.coalesce_streams', | |
69 |
' |
|
69 | 'jupyter_nbconvert.preprocessors.SVG2PDFPreprocessor', | |
70 |
' |
|
70 | 'jupyter_nbconvert.preprocessors.ExtractOutputPreprocessor', | |
71 |
' |
|
71 | 'jupyter_nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', | |
72 |
' |
|
72 | 'jupyter_nbconvert.preprocessors.RevealHelpPreprocessor', | |
73 |
' |
|
73 | 'jupyter_nbconvert.preprocessors.LatexPreprocessor', | |
74 |
' |
|
74 | 'jupyter_nbconvert.preprocessors.ClearOutputPreprocessor', | |
75 |
' |
|
75 | 'jupyter_nbconvert.preprocessors.ExecutePreprocessor', | |
76 |
' |
|
76 | 'jupyter_nbconvert.preprocessors.HighlightMagicsPreprocessor'], | |
77 | config=True, |
|
77 | config=True, | |
78 | help="""List of preprocessors available by default, by name, namespace, |
|
78 | help="""List of preprocessors available by default, by name, namespace, | |
79 | instance, or type.""") |
|
79 | instance, or type.""") | |
80 |
|
80 | |||
81 |
|
81 | |||
82 | def __init__(self, config=None, **kw): |
|
82 | def __init__(self, config=None, **kw): | |
83 | """ |
|
83 | """ | |
84 | Public constructor |
|
84 | Public constructor | |
85 |
|
85 | |||
86 | Parameters |
|
86 | Parameters | |
87 | ---------- |
|
87 | ---------- | |
88 | config : config |
|
88 | config : config | |
89 | User configuration instance. |
|
89 | User configuration instance. | |
90 | """ |
|
90 | """ | |
91 | with_default_config = self.default_config |
|
91 | with_default_config = self.default_config | |
92 | if config: |
|
92 | if config: | |
93 | with_default_config.merge(config) |
|
93 | with_default_config.merge(config) | |
94 |
|
94 | |||
95 | super(Exporter, self).__init__(config=with_default_config, **kw) |
|
95 | super(Exporter, self).__init__(config=with_default_config, **kw) | |
96 |
|
96 | |||
97 | self._init_preprocessors() |
|
97 | self._init_preprocessors() | |
98 |
|
98 | |||
99 |
|
99 | |||
100 | @property |
|
100 | @property | |
101 | def default_config(self): |
|
101 | def default_config(self): | |
102 | return Config() |
|
102 | return Config() | |
103 |
|
103 | |||
104 | def from_notebook_node(self, nb, resources=None, **kw): |
|
104 | def from_notebook_node(self, nb, resources=None, **kw): | |
105 | """ |
|
105 | """ | |
106 | Convert a notebook from a notebook node instance. |
|
106 | Convert a notebook from a notebook node instance. | |
107 |
|
107 | |||
108 | Parameters |
|
108 | Parameters | |
109 | ---------- |
|
109 | ---------- | |
110 | nb : :class:`~IPython.nbformat.NotebookNode` |
|
110 | nb : :class:`~IPython.nbformat.NotebookNode` | |
111 | Notebook node (dict-like with attr-access) |
|
111 | Notebook node (dict-like with attr-access) | |
112 | resources : dict |
|
112 | resources : dict | |
113 | Additional resources that can be accessed read/write by |
|
113 | Additional resources that can be accessed read/write by | |
114 | preprocessors and filters. |
|
114 | preprocessors and filters. | |
115 | **kw |
|
115 | **kw | |
116 | Ignored (?) |
|
116 | Ignored (?) | |
117 | """ |
|
117 | """ | |
118 | nb_copy = copy.deepcopy(nb) |
|
118 | nb_copy = copy.deepcopy(nb) | |
119 | resources = self._init_resources(resources) |
|
119 | resources = self._init_resources(resources) | |
120 |
|
120 | |||
121 | if 'language' in nb['metadata']: |
|
121 | if 'language' in nb['metadata']: | |
122 | resources['language'] = nb['metadata']['language'].lower() |
|
122 | resources['language'] = nb['metadata']['language'].lower() | |
123 |
|
123 | |||
124 | # Preprocess |
|
124 | # Preprocess | |
125 | nb_copy, resources = self._preprocess(nb_copy, resources) |
|
125 | nb_copy, resources = self._preprocess(nb_copy, resources) | |
126 |
|
126 | |||
127 | return nb_copy, resources |
|
127 | return nb_copy, resources | |
128 |
|
128 | |||
129 |
|
129 | |||
130 | def from_filename(self, filename, resources=None, **kw): |
|
130 | def from_filename(self, filename, resources=None, **kw): | |
131 | """ |
|
131 | """ | |
132 | Convert a notebook from a notebook file. |
|
132 | Convert a notebook from a notebook file. | |
133 |
|
133 | |||
134 | Parameters |
|
134 | Parameters | |
135 | ---------- |
|
135 | ---------- | |
136 | filename : str |
|
136 | filename : str | |
137 | Full filename of the notebook file to open and convert. |
|
137 | Full filename of the notebook file to open and convert. | |
138 | """ |
|
138 | """ | |
139 |
|
139 | |||
140 | # Pull the metadata from the filesystem. |
|
140 | # Pull the metadata from the filesystem. | |
141 | if resources is None: |
|
141 | if resources is None: | |
142 | resources = ResourcesDict() |
|
142 | resources = ResourcesDict() | |
143 | if not 'metadata' in resources or resources['metadata'] == '': |
|
143 | if not 'metadata' in resources or resources['metadata'] == '': | |
144 | resources['metadata'] = ResourcesDict() |
|
144 | resources['metadata'] = ResourcesDict() | |
145 | path, basename = os.path.split(filename) |
|
145 | path, basename = os.path.split(filename) | |
146 | notebook_name = basename[:basename.rfind('.')] |
|
146 | notebook_name = basename[:basename.rfind('.')] | |
147 | resources['metadata']['name'] = notebook_name |
|
147 | resources['metadata']['name'] = notebook_name | |
148 | resources['metadata']['path'] = path |
|
148 | resources['metadata']['path'] = path | |
149 |
|
149 | |||
150 | modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(filename)) |
|
150 | modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(filename)) | |
151 | resources['metadata']['modified_date'] = modified_date.strftime(text.date_format) |
|
151 | resources['metadata']['modified_date'] = modified_date.strftime(text.date_format) | |
152 |
|
152 | |||
153 | with io.open(filename, encoding='utf-8') as f: |
|
153 | with io.open(filename, encoding='utf-8') as f: | |
154 | return self.from_notebook_node(nbformat.read(f, as_version=4), resources=resources, **kw) |
|
154 | return self.from_notebook_node(nbformat.read(f, as_version=4), resources=resources, **kw) | |
155 |
|
155 | |||
156 |
|
156 | |||
157 | def from_file(self, file_stream, resources=None, **kw): |
|
157 | def from_file(self, file_stream, resources=None, **kw): | |
158 | """ |
|
158 | """ | |
159 | Convert a notebook from a notebook file. |
|
159 | Convert a notebook from a notebook file. | |
160 |
|
160 | |||
161 | Parameters |
|
161 | Parameters | |
162 | ---------- |
|
162 | ---------- | |
163 | file_stream : file-like object |
|
163 | file_stream : file-like object | |
164 | Notebook file-like object to convert. |
|
164 | Notebook file-like object to convert. | |
165 | """ |
|
165 | """ | |
166 | return self.from_notebook_node(nbformat.read(file_stream, as_version=4), resources=resources, **kw) |
|
166 | return self.from_notebook_node(nbformat.read(file_stream, as_version=4), resources=resources, **kw) | |
167 |
|
167 | |||
168 |
|
168 | |||
169 | def register_preprocessor(self, preprocessor, enabled=False): |
|
169 | def register_preprocessor(self, preprocessor, enabled=False): | |
170 | """ |
|
170 | """ | |
171 | Register a preprocessor. |
|
171 | Register a preprocessor. | |
172 | Preprocessors are classes that act upon the notebook before it is |
|
172 | Preprocessors are classes that act upon the notebook before it is | |
173 | passed into the Jinja templating engine. preprocessors are also |
|
173 | passed into the Jinja templating engine. preprocessors are also | |
174 | capable of passing additional information to the Jinja |
|
174 | capable of passing additional information to the Jinja | |
175 | templating engine. |
|
175 | templating engine. | |
176 |
|
176 | |||
177 | Parameters |
|
177 | Parameters | |
178 | ---------- |
|
178 | ---------- | |
179 | preprocessor : preprocessor |
|
179 | preprocessor : preprocessor | |
180 | """ |
|
180 | """ | |
181 | if preprocessor is None: |
|
181 | if preprocessor is None: | |
182 | raise TypeError('preprocessor') |
|
182 | raise TypeError('preprocessor') | |
183 | isclass = isinstance(preprocessor, type) |
|
183 | isclass = isinstance(preprocessor, type) | |
184 | constructed = not isclass |
|
184 | constructed = not isclass | |
185 |
|
185 | |||
186 | # Handle preprocessor's registration based on it's type |
|
186 | # Handle preprocessor's registration based on it's type | |
187 | if constructed and isinstance(preprocessor, py3compat.string_types): |
|
187 | if constructed and isinstance(preprocessor, py3compat.string_types): | |
188 | # Preprocessor is a string, import the namespace and recursively call |
|
188 | # Preprocessor is a string, import the namespace and recursively call | |
189 | # this register_preprocessor method |
|
189 | # this register_preprocessor method | |
190 | preprocessor_cls = import_item(preprocessor) |
|
190 | preprocessor_cls = import_item(preprocessor) | |
191 | return self.register_preprocessor(preprocessor_cls, enabled) |
|
191 | return self.register_preprocessor(preprocessor_cls, enabled) | |
192 |
|
192 | |||
193 | if constructed and hasattr(preprocessor, '__call__'): |
|
193 | if constructed and hasattr(preprocessor, '__call__'): | |
194 | # Preprocessor is a function, no need to construct it. |
|
194 | # Preprocessor is a function, no need to construct it. | |
195 | # Register and return the preprocessor. |
|
195 | # Register and return the preprocessor. | |
196 | if enabled: |
|
196 | if enabled: | |
197 | preprocessor.enabled = True |
|
197 | preprocessor.enabled = True | |
198 | self._preprocessors.append(preprocessor) |
|
198 | self._preprocessors.append(preprocessor) | |
199 | return preprocessor |
|
199 | return preprocessor | |
200 |
|
200 | |||
201 | elif isclass and isinstance(preprocessor, MetaHasTraits): |
|
201 | elif isclass and isinstance(preprocessor, MetaHasTraits): | |
202 | # Preprocessor is configurable. Make sure to pass in new default for |
|
202 | # Preprocessor is configurable. Make sure to pass in new default for | |
203 | # the enabled flag if one was specified. |
|
203 | # the enabled flag if one was specified. | |
204 | self.register_preprocessor(preprocessor(parent=self), enabled) |
|
204 | self.register_preprocessor(preprocessor(parent=self), enabled) | |
205 |
|
205 | |||
206 | elif isclass: |
|
206 | elif isclass: | |
207 | # Preprocessor is not configurable, construct it |
|
207 | # Preprocessor is not configurable, construct it | |
208 | self.register_preprocessor(preprocessor(), enabled) |
|
208 | self.register_preprocessor(preprocessor(), enabled) | |
209 |
|
209 | |||
210 | else: |
|
210 | else: | |
211 | # Preprocessor is an instance of something without a __call__ |
|
211 | # Preprocessor is an instance of something without a __call__ | |
212 | # attribute. |
|
212 | # attribute. | |
213 | raise TypeError('preprocessor') |
|
213 | raise TypeError('preprocessor') | |
214 |
|
214 | |||
215 |
|
215 | |||
216 | def _init_preprocessors(self): |
|
216 | def _init_preprocessors(self): | |
217 | """ |
|
217 | """ | |
218 | Register all of the preprocessors needed for this exporter, disabled |
|
218 | Register all of the preprocessors needed for this exporter, disabled | |
219 | unless specified explicitly. |
|
219 | unless specified explicitly. | |
220 | """ |
|
220 | """ | |
221 | self._preprocessors = [] |
|
221 | self._preprocessors = [] | |
222 |
|
222 | |||
223 | # Load default preprocessors (not necessarly enabled by default). |
|
223 | # Load default preprocessors (not necessarly enabled by default). | |
224 | for preprocessor in self.default_preprocessors: |
|
224 | for preprocessor in self.default_preprocessors: | |
225 | self.register_preprocessor(preprocessor) |
|
225 | self.register_preprocessor(preprocessor) | |
226 |
|
226 | |||
227 | # Load user-specified preprocessors. Enable by default. |
|
227 | # Load user-specified preprocessors. Enable by default. | |
228 | for preprocessor in self.preprocessors: |
|
228 | for preprocessor in self.preprocessors: | |
229 | self.register_preprocessor(preprocessor, enabled=True) |
|
229 | self.register_preprocessor(preprocessor, enabled=True) | |
230 |
|
230 | |||
231 |
|
231 | |||
232 | def _init_resources(self, resources): |
|
232 | def _init_resources(self, resources): | |
233 |
|
233 | |||
234 | #Make sure the resources dict is of ResourcesDict type. |
|
234 | #Make sure the resources dict is of ResourcesDict type. | |
235 | if resources is None: |
|
235 | if resources is None: | |
236 | resources = ResourcesDict() |
|
236 | resources = ResourcesDict() | |
237 | if not isinstance(resources, ResourcesDict): |
|
237 | if not isinstance(resources, ResourcesDict): | |
238 | new_resources = ResourcesDict() |
|
238 | new_resources = ResourcesDict() | |
239 | new_resources.update(resources) |
|
239 | new_resources.update(resources) | |
240 | resources = new_resources |
|
240 | resources = new_resources | |
241 |
|
241 | |||
242 | #Make sure the metadata extension exists in resources |
|
242 | #Make sure the metadata extension exists in resources | |
243 | if 'metadata' in resources: |
|
243 | if 'metadata' in resources: | |
244 | if not isinstance(resources['metadata'], ResourcesDict): |
|
244 | if not isinstance(resources['metadata'], ResourcesDict): | |
245 | new_metadata = ResourcesDict() |
|
245 | new_metadata = ResourcesDict() | |
246 | new_metadata.update(resources['metadata']) |
|
246 | new_metadata.update(resources['metadata']) | |
247 | resources['metadata'] = new_metadata |
|
247 | resources['metadata'] = new_metadata | |
248 | else: |
|
248 | else: | |
249 | resources['metadata'] = ResourcesDict() |
|
249 | resources['metadata'] = ResourcesDict() | |
250 | if not resources['metadata']['name']: |
|
250 | if not resources['metadata']['name']: | |
251 | resources['metadata']['name'] = 'Notebook' |
|
251 | resources['metadata']['name'] = 'Notebook' | |
252 |
|
252 | |||
253 | #Set the output extension |
|
253 | #Set the output extension | |
254 | resources['output_extension'] = self.file_extension |
|
254 | resources['output_extension'] = self.file_extension | |
255 | return resources |
|
255 | return resources | |
256 |
|
256 | |||
257 |
|
257 | |||
258 | def _preprocess(self, nb, resources): |
|
258 | def _preprocess(self, nb, resources): | |
259 | """ |
|
259 | """ | |
260 | Preprocess the notebook before passing it into the Jinja engine. |
|
260 | Preprocess the notebook before passing it into the Jinja engine. | |
261 | To preprocess the notebook is to apply all of the |
|
261 | To preprocess the notebook is to apply all of the | |
262 |
|
262 | |||
263 | Parameters |
|
263 | Parameters | |
264 | ---------- |
|
264 | ---------- | |
265 | nb : notebook node |
|
265 | nb : notebook node | |
266 | notebook that is being exported. |
|
266 | notebook that is being exported. | |
267 | resources : a dict of additional resources that |
|
267 | resources : a dict of additional resources that | |
268 | can be accessed read/write by preprocessors |
|
268 | can be accessed read/write by preprocessors | |
269 | """ |
|
269 | """ | |
270 |
|
270 | |||
271 | # Do a copy.deepcopy first, |
|
271 | # Do a copy.deepcopy first, | |
272 | # we are never safe enough with what the preprocessors could do. |
|
272 | # we are never safe enough with what the preprocessors could do. | |
273 | nbc = copy.deepcopy(nb) |
|
273 | nbc = copy.deepcopy(nb) | |
274 | resc = copy.deepcopy(resources) |
|
274 | resc = copy.deepcopy(resources) | |
275 |
|
275 | |||
276 | #Run each preprocessor on the notebook. Carry the output along |
|
276 | #Run each preprocessor on the notebook. Carry the output along | |
277 | #to each preprocessor |
|
277 | #to each preprocessor | |
278 | for preprocessor in self._preprocessors: |
|
278 | for preprocessor in self._preprocessors: | |
279 | nbc, resc = preprocessor(nbc, resc) |
|
279 | nbc, resc = preprocessor(nbc, resc) | |
280 | return nbc, resc |
|
280 | return nbc, resc |
@@ -1,57 +1,57 b'' | |||||
1 | """HTML Exporter class""" |
|
1 | """HTML Exporter class""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | import os |
|
6 | import os | |
7 |
|
7 | |||
8 |
from |
|
8 | from jupyter_nbconvert.filters.highlight import Highlight2HTML | |
9 | from IPython.config import Config |
|
9 | from IPython.config import Config | |
10 |
|
10 | |||
11 | from .templateexporter import TemplateExporter |
|
11 | from .templateexporter import TemplateExporter | |
12 |
|
12 | |||
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 | # Classes |
|
14 | # Classes | |
15 | #----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
16 |
|
16 | |||
17 | class HTMLExporter(TemplateExporter): |
|
17 | class HTMLExporter(TemplateExporter): | |
18 | """ |
|
18 | """ | |
19 | Exports a basic HTML document. This exporter assists with the export of |
|
19 | Exports a basic HTML document. This exporter assists with the export of | |
20 | HTML. Inherit from it if you are writing your own HTML template and need |
|
20 | HTML. Inherit from it if you are writing your own HTML template and need | |
21 | custom preprocessors/filters. If you don't need custom preprocessors/ |
|
21 | custom preprocessors/filters. If you don't need custom preprocessors/ | |
22 | filters, just change the 'template_file' config option. |
|
22 | filters, just change the 'template_file' config option. | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | def _file_extension_default(self): |
|
25 | def _file_extension_default(self): | |
26 | return '.html' |
|
26 | return '.html' | |
27 |
|
27 | |||
28 | def _default_template_path_default(self): |
|
28 | def _default_template_path_default(self): | |
29 | return os.path.join("..", "templates", "html") |
|
29 | return os.path.join("..", "templates", "html") | |
30 |
|
30 | |||
31 | def _template_file_default(self): |
|
31 | def _template_file_default(self): | |
32 | return 'full' |
|
32 | return 'full' | |
33 |
|
33 | |||
34 | output_mimetype = 'text/html' |
|
34 | output_mimetype = 'text/html' | |
35 |
|
35 | |||
36 | @property |
|
36 | @property | |
37 | def default_config(self): |
|
37 | def default_config(self): | |
38 | c = Config({ |
|
38 | c = Config({ | |
39 | 'NbConvertBase': { |
|
39 | 'NbConvertBase': { | |
40 | 'display_data_priority' : ['application/javascript', 'text/html', 'text/markdown', 'application/pdf', 'image/svg+xml', 'text/latex', 'image/png', 'image/jpeg', 'text/plain'] |
|
40 | 'display_data_priority' : ['application/javascript', 'text/html', 'text/markdown', 'application/pdf', 'image/svg+xml', 'text/latex', 'image/png', 'image/jpeg', 'text/plain'] | |
41 | }, |
|
41 | }, | |
42 | 'CSSHTMLHeaderPreprocessor':{ |
|
42 | 'CSSHTMLHeaderPreprocessor':{ | |
43 | 'enabled':True |
|
43 | 'enabled':True | |
44 | }, |
|
44 | }, | |
45 | 'HighlightMagicsPreprocessor': { |
|
45 | 'HighlightMagicsPreprocessor': { | |
46 | 'enabled':True |
|
46 | 'enabled':True | |
47 | } |
|
47 | } | |
48 | }) |
|
48 | }) | |
49 | c.merge(super(HTMLExporter,self).default_config) |
|
49 | c.merge(super(HTMLExporter,self).default_config) | |
50 | return c |
|
50 | return c | |
51 |
|
51 | |||
52 | def from_notebook_node(self, nb, resources=None, **kw): |
|
52 | def from_notebook_node(self, nb, resources=None, **kw): | |
53 | langinfo = nb.metadata.get('language_info', {}) |
|
53 | langinfo = nb.metadata.get('language_info', {}) | |
54 | lexer = langinfo.get('pygments_lexer', langinfo.get('name', None)) |
|
54 | lexer = langinfo.get('pygments_lexer', langinfo.get('name', None)) | |
55 | self.register_filter('highlight_code', |
|
55 | self.register_filter('highlight_code', | |
56 | Highlight2HTML(pygments_lexer=lexer, parent=self)) |
|
56 | Highlight2HTML(pygments_lexer=lexer, parent=self)) | |
57 | return super(HTMLExporter, self).from_notebook_node(nb, resources, **kw) |
|
57 | return super(HTMLExporter, self).from_notebook_node(nb, resources, **kw) |
@@ -1,96 +1,96 b'' | |||||
1 | """LaTeX Exporter class""" |
|
1 | """LaTeX Exporter class""" | |
2 |
|
2 | |||
3 | #----------------------------------------------------------------------------- |
|
3 | #----------------------------------------------------------------------------- | |
4 | # Copyright (c) 2013, the IPython Development Team. |
|
4 | # Copyright (c) 2013, the IPython Development Team. | |
5 | # |
|
5 | # | |
6 | # Distributed under the terms of the Modified BSD License. |
|
6 | # Distributed under the terms of the Modified BSD License. | |
7 | # |
|
7 | # | |
8 | # The full license is in the file COPYING.txt, distributed with this software. |
|
8 | # The full license is in the file COPYING.txt, distributed with this software. | |
9 | #----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
10 |
|
10 | |||
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 | # Imports |
|
12 | # Imports | |
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | # Stdlib imports |
|
15 | # Stdlib imports | |
16 | import os |
|
16 | import os | |
17 |
|
17 | |||
18 | # IPython imports |
|
18 | # IPython imports | |
19 | from IPython.utils.traitlets import Unicode |
|
19 | from IPython.utils.traitlets import Unicode | |
20 | from IPython.config import Config |
|
20 | from IPython.config import Config | |
21 |
|
21 | |||
22 |
from |
|
22 | from jupyter_nbconvert.filters.highlight import Highlight2Latex | |
23 | from .templateexporter import TemplateExporter |
|
23 | from .templateexporter import TemplateExporter | |
24 |
|
24 | |||
25 | #----------------------------------------------------------------------------- |
|
25 | #----------------------------------------------------------------------------- | |
26 | # Classes and functions |
|
26 | # Classes and functions | |
27 | #----------------------------------------------------------------------------- |
|
27 | #----------------------------------------------------------------------------- | |
28 |
|
28 | |||
29 | class LatexExporter(TemplateExporter): |
|
29 | class LatexExporter(TemplateExporter): | |
30 | """ |
|
30 | """ | |
31 | Exports to a Latex template. Inherit from this class if your template is |
|
31 | Exports to a Latex template. Inherit from this class if your template is | |
32 | LaTeX based and you need custom tranformers/filters. Inherit from it if |
|
32 | LaTeX based and you need custom tranformers/filters. Inherit from it if | |
33 | you are writing your own HTML template and need custom tranformers/filters. |
|
33 | you are writing your own HTML template and need custom tranformers/filters. | |
34 | If you don't need custom tranformers/filters, just change the |
|
34 | If you don't need custom tranformers/filters, just change the | |
35 | 'template_file' config option. Place your template in the special "/latex" |
|
35 | 'template_file' config option. Place your template in the special "/latex" | |
36 | subfolder of the "../templates" folder. |
|
36 | subfolder of the "../templates" folder. | |
37 | """ |
|
37 | """ | |
38 |
|
38 | |||
39 | def _file_extension_default(self): |
|
39 | def _file_extension_default(self): | |
40 | return '.tex' |
|
40 | return '.tex' | |
41 |
|
41 | |||
42 | def _template_file_default(self): |
|
42 | def _template_file_default(self): | |
43 | return 'article' |
|
43 | return 'article' | |
44 |
|
44 | |||
45 | #Latex constants |
|
45 | #Latex constants | |
46 | def _default_template_path_default(self): |
|
46 | def _default_template_path_default(self): | |
47 | return os.path.join("..", "templates", "latex") |
|
47 | return os.path.join("..", "templates", "latex") | |
48 |
|
48 | |||
49 | def _template_skeleton_path_default(self): |
|
49 | def _template_skeleton_path_default(self): | |
50 | return os.path.join("..", "templates", "latex", "skeleton") |
|
50 | return os.path.join("..", "templates", "latex", "skeleton") | |
51 |
|
51 | |||
52 | #Special Jinja2 syntax that will not conflict when exporting latex. |
|
52 | #Special Jinja2 syntax that will not conflict when exporting latex. | |
53 | jinja_comment_block_start = Unicode("((=", config=True) |
|
53 | jinja_comment_block_start = Unicode("((=", config=True) | |
54 | jinja_comment_block_end = Unicode("=))", config=True) |
|
54 | jinja_comment_block_end = Unicode("=))", config=True) | |
55 | jinja_variable_block_start = Unicode("(((", config=True) |
|
55 | jinja_variable_block_start = Unicode("(((", config=True) | |
56 | jinja_variable_block_end = Unicode(")))", config=True) |
|
56 | jinja_variable_block_end = Unicode(")))", config=True) | |
57 | jinja_logic_block_start = Unicode("((*", config=True) |
|
57 | jinja_logic_block_start = Unicode("((*", config=True) | |
58 | jinja_logic_block_end = Unicode("*))", config=True) |
|
58 | jinja_logic_block_end = Unicode("*))", config=True) | |
59 |
|
59 | |||
60 | #Extension that the template files use. |
|
60 | #Extension that the template files use. | |
61 | template_extension = Unicode(".tplx", config=True) |
|
61 | template_extension = Unicode(".tplx", config=True) | |
62 |
|
62 | |||
63 | output_mimetype = 'text/latex' |
|
63 | output_mimetype = 'text/latex' | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | @property |
|
66 | @property | |
67 | def default_config(self): |
|
67 | def default_config(self): | |
68 | c = Config({ |
|
68 | c = Config({ | |
69 | 'NbConvertBase': { |
|
69 | 'NbConvertBase': { | |
70 | 'display_data_priority' : ['text/latex', 'application/pdf', 'image/png', 'image/jpeg', 'image/svg+xml', 'text/plain'] |
|
70 | 'display_data_priority' : ['text/latex', 'application/pdf', 'image/png', 'image/jpeg', 'image/svg+xml', 'text/plain'] | |
71 | }, |
|
71 | }, | |
72 | 'ExtractOutputPreprocessor': { |
|
72 | 'ExtractOutputPreprocessor': { | |
73 | 'enabled':True |
|
73 | 'enabled':True | |
74 | }, |
|
74 | }, | |
75 | 'SVG2PDFPreprocessor': { |
|
75 | 'SVG2PDFPreprocessor': { | |
76 | 'enabled':True |
|
76 | 'enabled':True | |
77 | }, |
|
77 | }, | |
78 | 'LatexPreprocessor': { |
|
78 | 'LatexPreprocessor': { | |
79 | 'enabled':True |
|
79 | 'enabled':True | |
80 | }, |
|
80 | }, | |
81 | 'SphinxPreprocessor': { |
|
81 | 'SphinxPreprocessor': { | |
82 | 'enabled':True |
|
82 | 'enabled':True | |
83 | }, |
|
83 | }, | |
84 | 'HighlightMagicsPreprocessor': { |
|
84 | 'HighlightMagicsPreprocessor': { | |
85 | 'enabled':True |
|
85 | 'enabled':True | |
86 | } |
|
86 | } | |
87 | }) |
|
87 | }) | |
88 | c.merge(super(LatexExporter,self).default_config) |
|
88 | c.merge(super(LatexExporter,self).default_config) | |
89 | return c |
|
89 | return c | |
90 |
|
90 | |||
91 | def from_notebook_node(self, nb, resources=None, **kw): |
|
91 | def from_notebook_node(self, nb, resources=None, **kw): | |
92 | langinfo = nb.metadata.get('language_info', {}) |
|
92 | langinfo = nb.metadata.get('language_info', {}) | |
93 | lexer = langinfo.get('pygments_lexer', langinfo.get('name', None)) |
|
93 | lexer = langinfo.get('pygments_lexer', langinfo.get('name', None)) | |
94 | self.register_filter('highlight_code', |
|
94 | self.register_filter('highlight_code', | |
95 | Highlight2Latex(pygments_lexer=lexer, parent=self)) |
|
95 | Highlight2Latex(pygments_lexer=lexer, parent=self)) | |
96 | return super(LatexExporter, self).from_notebook_node(nb, resources, **kw) |
|
96 | return super(LatexExporter, self).from_notebook_node(nb, resources, **kw) |
@@ -1,147 +1,147 b'' | |||||
1 | """Export to PDF via latex""" |
|
1 | """Export to PDF via latex""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | import subprocess |
|
6 | import subprocess | |
7 | import os |
|
7 | import os | |
8 | import sys |
|
8 | import sys | |
9 |
|
9 | |||
10 | from IPython.utils.process import find_cmd |
|
10 | from IPython.utils.process import find_cmd | |
11 | from IPython.utils.traitlets import Integer, List, Bool, Instance |
|
11 | from IPython.utils.traitlets import Integer, List, Bool, Instance | |
12 | from IPython.utils.tempdir import TemporaryWorkingDirectory |
|
12 | from IPython.utils.tempdir import TemporaryWorkingDirectory | |
13 | from .latex import LatexExporter |
|
13 | from .latex import LatexExporter | |
14 |
|
14 | |||
15 |
|
15 | |||
16 | class PDFExporter(LatexExporter): |
|
16 | class PDFExporter(LatexExporter): | |
17 | """Writer designed to write to PDF files""" |
|
17 | """Writer designed to write to PDF files""" | |
18 |
|
18 | |||
19 | latex_count = Integer(3, config=True, |
|
19 | latex_count = Integer(3, config=True, | |
20 | help="How many times latex will be called." |
|
20 | help="How many times latex will be called." | |
21 | ) |
|
21 | ) | |
22 |
|
22 | |||
23 | latex_command = List([u"pdflatex", u"{filename}"], config=True, |
|
23 | latex_command = List([u"pdflatex", u"{filename}"], config=True, | |
24 | help="Shell command used to compile latex." |
|
24 | help="Shell command used to compile latex." | |
25 | ) |
|
25 | ) | |
26 |
|
26 | |||
27 | bib_command = List([u"bibtex", u"{filename}"], config=True, |
|
27 | bib_command = List([u"bibtex", u"{filename}"], config=True, | |
28 | help="Shell command used to run bibtex." |
|
28 | help="Shell command used to run bibtex." | |
29 | ) |
|
29 | ) | |
30 |
|
30 | |||
31 | verbose = Bool(False, config=True, |
|
31 | verbose = Bool(False, config=True, | |
32 | help="Whether to display the output of latex commands." |
|
32 | help="Whether to display the output of latex commands." | |
33 | ) |
|
33 | ) | |
34 |
|
34 | |||
35 | temp_file_exts = List(['.aux', '.bbl', '.blg', '.idx', '.log', '.out'], config=True, |
|
35 | temp_file_exts = List(['.aux', '.bbl', '.blg', '.idx', '.log', '.out'], config=True, | |
36 | help="File extensions of temp files to remove after running." |
|
36 | help="File extensions of temp files to remove after running." | |
37 | ) |
|
37 | ) | |
38 |
|
38 | |||
39 |
writer = Instance(" |
|
39 | writer = Instance("jupyter_nbconvert.writers.FilesWriter", args=()) | |
40 |
|
40 | |||
41 | def run_command(self, command_list, filename, count, log_function): |
|
41 | def run_command(self, command_list, filename, count, log_function): | |
42 | """Run command_list count times. |
|
42 | """Run command_list count times. | |
43 |
|
43 | |||
44 | Parameters |
|
44 | Parameters | |
45 | ---------- |
|
45 | ---------- | |
46 | command_list : list |
|
46 | command_list : list | |
47 | A list of args to provide to Popen. Each element of this |
|
47 | A list of args to provide to Popen. Each element of this | |
48 | list will be interpolated with the filename to convert. |
|
48 | list will be interpolated with the filename to convert. | |
49 | filename : unicode |
|
49 | filename : unicode | |
50 | The name of the file to convert. |
|
50 | The name of the file to convert. | |
51 | count : int |
|
51 | count : int | |
52 | How many times to run the command. |
|
52 | How many times to run the command. | |
53 |
|
53 | |||
54 | Returns |
|
54 | Returns | |
55 | ------- |
|
55 | ------- | |
56 | success : bool |
|
56 | success : bool | |
57 | A boolean indicating if the command was successful (True) |
|
57 | A boolean indicating if the command was successful (True) | |
58 | or failed (False). |
|
58 | or failed (False). | |
59 | """ |
|
59 | """ | |
60 | command = [c.format(filename=filename) for c in command_list] |
|
60 | command = [c.format(filename=filename) for c in command_list] | |
61 |
|
61 | |||
62 | # On windows with python 2.x there is a bug in subprocess.Popen and |
|
62 | # On windows with python 2.x there is a bug in subprocess.Popen and | |
63 | # unicode commands are not supported |
|
63 | # unicode commands are not supported | |
64 | if sys.platform == 'win32' and sys.version_info < (3,0): |
|
64 | if sys.platform == 'win32' and sys.version_info < (3,0): | |
65 | #We must use cp1252 encoding for calling subprocess.Popen |
|
65 | #We must use cp1252 encoding for calling subprocess.Popen | |
66 | #Note that sys.stdin.encoding and encoding.DEFAULT_ENCODING |
|
66 | #Note that sys.stdin.encoding and encoding.DEFAULT_ENCODING | |
67 | # could be different (cp437 in case of dos console) |
|
67 | # could be different (cp437 in case of dos console) | |
68 | command = [c.encode('cp1252') for c in command] |
|
68 | command = [c.encode('cp1252') for c in command] | |
69 |
|
69 | |||
70 | # This will throw a clearer error if the command is not found |
|
70 | # This will throw a clearer error if the command is not found | |
71 | find_cmd(command_list[0]) |
|
71 | find_cmd(command_list[0]) | |
72 |
|
72 | |||
73 | times = 'time' if count == 1 else 'times' |
|
73 | times = 'time' if count == 1 else 'times' | |
74 | self.log.info("Running %s %i %s: %s", command_list[0], count, times, command) |
|
74 | self.log.info("Running %s %i %s: %s", command_list[0], count, times, command) | |
75 | with open(os.devnull, 'rb') as null: |
|
75 | with open(os.devnull, 'rb') as null: | |
76 | stdout = subprocess.PIPE if not self.verbose else None |
|
76 | stdout = subprocess.PIPE if not self.verbose else None | |
77 | for index in range(count): |
|
77 | for index in range(count): | |
78 | p = subprocess.Popen(command, stdout=stdout, stdin=null) |
|
78 | p = subprocess.Popen(command, stdout=stdout, stdin=null) | |
79 | out, err = p.communicate() |
|
79 | out, err = p.communicate() | |
80 | if p.returncode: |
|
80 | if p.returncode: | |
81 | if self.verbose: |
|
81 | if self.verbose: | |
82 | # verbose means I didn't capture stdout with PIPE, |
|
82 | # verbose means I didn't capture stdout with PIPE, | |
83 | # so it's already been displayed and `out` is None. |
|
83 | # so it's already been displayed and `out` is None. | |
84 | out = u'' |
|
84 | out = u'' | |
85 | else: |
|
85 | else: | |
86 | out = out.decode('utf-8', 'replace') |
|
86 | out = out.decode('utf-8', 'replace') | |
87 | log_function(command, out) |
|
87 | log_function(command, out) | |
88 | return False # failure |
|
88 | return False # failure | |
89 | return True # success |
|
89 | return True # success | |
90 |
|
90 | |||
91 | def run_latex(self, filename): |
|
91 | def run_latex(self, filename): | |
92 | """Run pdflatex self.latex_count times.""" |
|
92 | """Run pdflatex self.latex_count times.""" | |
93 |
|
93 | |||
94 | def log_error(command, out): |
|
94 | def log_error(command, out): | |
95 | self.log.critical(u"%s failed: %s\n%s", command[0], command, out) |
|
95 | self.log.critical(u"%s failed: %s\n%s", command[0], command, out) | |
96 |
|
96 | |||
97 | return self.run_command(self.latex_command, filename, |
|
97 | return self.run_command(self.latex_command, filename, | |
98 | self.latex_count, log_error) |
|
98 | self.latex_count, log_error) | |
99 |
|
99 | |||
100 | def run_bib(self, filename): |
|
100 | def run_bib(self, filename): | |
101 | """Run bibtex self.latex_count times.""" |
|
101 | """Run bibtex self.latex_count times.""" | |
102 | filename = os.path.splitext(filename)[0] |
|
102 | filename = os.path.splitext(filename)[0] | |
103 |
|
103 | |||
104 | def log_error(command, out): |
|
104 | def log_error(command, out): | |
105 | self.log.warn('%s had problems, most likely because there were no citations', |
|
105 | self.log.warn('%s had problems, most likely because there were no citations', | |
106 | command[0]) |
|
106 | command[0]) | |
107 | self.log.debug(u"%s output: %s\n%s", command[0], command, out) |
|
107 | self.log.debug(u"%s output: %s\n%s", command[0], command, out) | |
108 |
|
108 | |||
109 | return self.run_command(self.bib_command, filename, 1, log_error) |
|
109 | return self.run_command(self.bib_command, filename, 1, log_error) | |
110 |
|
110 | |||
111 | def clean_temp_files(self, filename): |
|
111 | def clean_temp_files(self, filename): | |
112 | """Remove temporary files created by pdflatex/bibtex.""" |
|
112 | """Remove temporary files created by pdflatex/bibtex.""" | |
113 | self.log.info("Removing temporary LaTeX files") |
|
113 | self.log.info("Removing temporary LaTeX files") | |
114 | filename = os.path.splitext(filename)[0] |
|
114 | filename = os.path.splitext(filename)[0] | |
115 | for ext in self.temp_file_exts: |
|
115 | for ext in self.temp_file_exts: | |
116 | try: |
|
116 | try: | |
117 | os.remove(filename+ext) |
|
117 | os.remove(filename+ext) | |
118 | except OSError: |
|
118 | except OSError: | |
119 | pass |
|
119 | pass | |
120 |
|
120 | |||
121 | def from_notebook_node(self, nb, resources=None, **kw): |
|
121 | def from_notebook_node(self, nb, resources=None, **kw): | |
122 | latex, resources = super(PDFExporter, self).from_notebook_node( |
|
122 | latex, resources = super(PDFExporter, self).from_notebook_node( | |
123 | nb, resources=resources, **kw |
|
123 | nb, resources=resources, **kw | |
124 | ) |
|
124 | ) | |
125 | with TemporaryWorkingDirectory() as td: |
|
125 | with TemporaryWorkingDirectory() as td: | |
126 | notebook_name = "notebook" |
|
126 | notebook_name = "notebook" | |
127 | tex_file = self.writer.write(latex, resources, notebook_name=notebook_name) |
|
127 | tex_file = self.writer.write(latex, resources, notebook_name=notebook_name) | |
128 | self.log.info("Building PDF") |
|
128 | self.log.info("Building PDF") | |
129 | rc = self.run_latex(tex_file) |
|
129 | rc = self.run_latex(tex_file) | |
130 | if not rc: |
|
130 | if not rc: | |
131 | rc = self.run_bib(tex_file) |
|
131 | rc = self.run_bib(tex_file) | |
132 | if not rc: |
|
132 | if not rc: | |
133 | rc = self.run_latex(tex_file) |
|
133 | rc = self.run_latex(tex_file) | |
134 |
|
134 | |||
135 | pdf_file = notebook_name + '.pdf' |
|
135 | pdf_file = notebook_name + '.pdf' | |
136 | if not os.path.isfile(pdf_file): |
|
136 | if not os.path.isfile(pdf_file): | |
137 | raise RuntimeError("PDF creating failed") |
|
137 | raise RuntimeError("PDF creating failed") | |
138 | self.log.info('PDF successfully created') |
|
138 | self.log.info('PDF successfully created') | |
139 | with open(pdf_file, 'rb') as f: |
|
139 | with open(pdf_file, 'rb') as f: | |
140 | pdf_data = f.read() |
|
140 | pdf_data = f.read() | |
141 |
|
141 | |||
142 | # convert output extension to pdf |
|
142 | # convert output extension to pdf | |
143 | # the writer above required it to be tex |
|
143 | # the writer above required it to be tex | |
144 | resources['output_extension'] = '.pdf' |
|
144 | resources['output_extension'] = '.pdf' | |
145 |
|
145 | |||
146 | return pdf_data, resources |
|
146 | return pdf_data, resources | |
147 |
|
147 |
@@ -1,43 +1,43 b'' | |||||
1 | """HTML slide show Exporter class""" |
|
1 | """HTML slide show Exporter class""" | |
2 |
|
2 | |||
3 | #----------------------------------------------------------------------------- |
|
3 | #----------------------------------------------------------------------------- | |
4 | # Copyright (c) 2013, the IPython Development Team. |
|
4 | # Copyright (c) 2013, the IPython Development Team. | |
5 | # |
|
5 | # | |
6 | # Distributed under the terms of the Modified BSD License. |
|
6 | # Distributed under the terms of the Modified BSD License. | |
7 | # |
|
7 | # | |
8 | # The full license is in the file COPYING.txt, distributed with this software. |
|
8 | # The full license is in the file COPYING.txt, distributed with this software. | |
9 | #----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
10 |
|
10 | |||
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 | # Imports |
|
12 | # Imports | |
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 |
from |
|
15 | from jupyter_nbconvert import preprocessors | |
16 | from IPython.config import Config |
|
16 | from IPython.config import Config | |
17 |
|
17 | |||
18 | from .html import HTMLExporter |
|
18 | from .html import HTMLExporter | |
19 |
|
19 | |||
20 | #----------------------------------------------------------------------------- |
|
20 | #----------------------------------------------------------------------------- | |
21 | # Classes |
|
21 | # Classes | |
22 | #----------------------------------------------------------------------------- |
|
22 | #----------------------------------------------------------------------------- | |
23 |
|
23 | |||
24 | class SlidesExporter(HTMLExporter): |
|
24 | class SlidesExporter(HTMLExporter): | |
25 | """Exports HTML slides with reveal.js""" |
|
25 | """Exports HTML slides with reveal.js""" | |
26 |
|
26 | |||
27 | def _file_extension_default(self): |
|
27 | def _file_extension_default(self): | |
28 | return '.slides.html' |
|
28 | return '.slides.html' | |
29 |
|
29 | |||
30 | def _template_file_default(self): |
|
30 | def _template_file_default(self): | |
31 | return 'slides_reveal' |
|
31 | return 'slides_reveal' | |
32 |
|
32 | |||
33 | output_mimetype = 'text/html' |
|
33 | output_mimetype = 'text/html' | |
34 |
|
34 | |||
35 | @property |
|
35 | @property | |
36 | def default_config(self): |
|
36 | def default_config(self): | |
37 | c = Config({ |
|
37 | c = Config({ | |
38 | 'RevealHelpPreprocessor': { |
|
38 | 'RevealHelpPreprocessor': { | |
39 | 'enabled': True, |
|
39 | 'enabled': True, | |
40 | }, |
|
40 | }, | |
41 | }) |
|
41 | }) | |
42 | c.merge(super(SlidesExporter,self).default_config) |
|
42 | c.merge(super(SlidesExporter,self).default_config) | |
43 | return c |
|
43 | return c |
@@ -1,321 +1,321 b'' | |||||
1 | """This module defines TemplateExporter, a highly configurable converter |
|
1 | """This module defines TemplateExporter, a highly configurable converter | |
2 | that uses Jinja2 to export notebook files into different formats. |
|
2 | that uses Jinja2 to export notebook files into different formats. | |
3 | """ |
|
3 | """ | |
4 |
|
4 | |||
5 | #----------------------------------------------------------------------------- |
|
5 | #----------------------------------------------------------------------------- | |
6 | # Copyright (c) 2013, the IPython Development Team. |
|
6 | # Copyright (c) 2013, the IPython Development Team. | |
7 | # |
|
7 | # | |
8 | # Distributed under the terms of the Modified BSD License. |
|
8 | # Distributed under the terms of the Modified BSD License. | |
9 | # |
|
9 | # | |
10 | # The full license is in the file COPYING.txt, distributed with this software. |
|
10 | # The full license is in the file COPYING.txt, distributed with this software. | |
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 |
|
12 | |||
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 | # Imports |
|
14 | # Imports | |
15 | #----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
16 |
|
16 | |||
17 | from __future__ import print_function, absolute_import |
|
17 | from __future__ import print_function, absolute_import | |
18 |
|
18 | |||
19 | # Stdlib imports |
|
19 | # Stdlib imports | |
20 | import os |
|
20 | import os | |
21 |
|
21 | |||
22 | # other libs/dependencies are imported at runtime |
|
22 | # other libs/dependencies are imported at runtime | |
23 | # to move ImportErrors to runtime when the requirement is actually needed |
|
23 | # to move ImportErrors to runtime when the requirement is actually needed | |
24 |
|
24 | |||
25 | # IPython imports |
|
25 | # IPython imports | |
26 | from IPython.utils.traitlets import MetaHasTraits, Unicode, List, Dict, Any |
|
26 | from IPython.utils.traitlets import MetaHasTraits, Unicode, List, Dict, Any | |
27 | from IPython.utils.importstring import import_item |
|
27 | from IPython.utils.importstring import import_item | |
28 | from IPython.utils import py3compat, text |
|
28 | from IPython.utils import py3compat, text | |
29 |
|
29 | |||
30 |
from |
|
30 | from jupyter_nbconvert import filters | |
31 | from .exporter import Exporter |
|
31 | from .exporter import Exporter | |
32 |
|
32 | |||
33 | #----------------------------------------------------------------------------- |
|
33 | #----------------------------------------------------------------------------- | |
34 | # Globals and constants |
|
34 | # Globals and constants | |
35 | #----------------------------------------------------------------------------- |
|
35 | #----------------------------------------------------------------------------- | |
36 |
|
36 | |||
37 | #Jinja2 extensions to load. |
|
37 | #Jinja2 extensions to load. | |
38 | JINJA_EXTENSIONS = ['jinja2.ext.loopcontrols'] |
|
38 | JINJA_EXTENSIONS = ['jinja2.ext.loopcontrols'] | |
39 |
|
39 | |||
40 | default_filters = { |
|
40 | default_filters = { | |
41 | 'indent': text.indent, |
|
41 | 'indent': text.indent, | |
42 | 'markdown2html': filters.markdown2html, |
|
42 | 'markdown2html': filters.markdown2html, | |
43 | 'ansi2html': filters.ansi2html, |
|
43 | 'ansi2html': filters.ansi2html, | |
44 | 'filter_data_type': filters.DataTypeFilter, |
|
44 | 'filter_data_type': filters.DataTypeFilter, | |
45 | 'get_lines': filters.get_lines, |
|
45 | 'get_lines': filters.get_lines, | |
46 | 'highlight2html': filters.Highlight2HTML, |
|
46 | 'highlight2html': filters.Highlight2HTML, | |
47 | 'highlight2latex': filters.Highlight2Latex, |
|
47 | 'highlight2latex': filters.Highlight2Latex, | |
48 | 'ipython2python': filters.ipython2python, |
|
48 | 'ipython2python': filters.ipython2python, | |
49 | 'posix_path': filters.posix_path, |
|
49 | 'posix_path': filters.posix_path, | |
50 | 'markdown2latex': filters.markdown2latex, |
|
50 | 'markdown2latex': filters.markdown2latex, | |
51 | 'markdown2rst': filters.markdown2rst, |
|
51 | 'markdown2rst': filters.markdown2rst, | |
52 | 'comment_lines': filters.comment_lines, |
|
52 | 'comment_lines': filters.comment_lines, | |
53 | 'strip_ansi': filters.strip_ansi, |
|
53 | 'strip_ansi': filters.strip_ansi, | |
54 | 'strip_dollars': filters.strip_dollars, |
|
54 | 'strip_dollars': filters.strip_dollars, | |
55 | 'strip_files_prefix': filters.strip_files_prefix, |
|
55 | 'strip_files_prefix': filters.strip_files_prefix, | |
56 | 'html2text' : filters.html2text, |
|
56 | 'html2text' : filters.html2text, | |
57 | 'add_anchor': filters.add_anchor, |
|
57 | 'add_anchor': filters.add_anchor, | |
58 | 'ansi2latex': filters.ansi2latex, |
|
58 | 'ansi2latex': filters.ansi2latex, | |
59 | 'wrap_text': filters.wrap_text, |
|
59 | 'wrap_text': filters.wrap_text, | |
60 | 'escape_latex': filters.escape_latex, |
|
60 | 'escape_latex': filters.escape_latex, | |
61 | 'citation2latex': filters.citation2latex, |
|
61 | 'citation2latex': filters.citation2latex, | |
62 | 'path2url': filters.path2url, |
|
62 | 'path2url': filters.path2url, | |
63 | 'add_prompts': filters.add_prompts, |
|
63 | 'add_prompts': filters.add_prompts, | |
64 | 'ascii_only': filters.ascii_only, |
|
64 | 'ascii_only': filters.ascii_only, | |
65 | 'prevent_list_blocks': filters.prevent_list_blocks, |
|
65 | 'prevent_list_blocks': filters.prevent_list_blocks, | |
66 | } |
|
66 | } | |
67 |
|
67 | |||
68 | #----------------------------------------------------------------------------- |
|
68 | #----------------------------------------------------------------------------- | |
69 | # Class |
|
69 | # Class | |
70 | #----------------------------------------------------------------------------- |
|
70 | #----------------------------------------------------------------------------- | |
71 |
|
71 | |||
72 | class TemplateExporter(Exporter): |
|
72 | class TemplateExporter(Exporter): | |
73 | """ |
|
73 | """ | |
74 | Exports notebooks into other file formats. Uses Jinja 2 templating engine |
|
74 | Exports notebooks into other file formats. Uses Jinja 2 templating engine | |
75 | to output new formats. Inherit from this class if you are creating a new |
|
75 | to output new formats. Inherit from this class if you are creating a new | |
76 | template type along with new filters/preprocessors. If the filters/ |
|
76 | template type along with new filters/preprocessors. If the filters/ | |
77 | preprocessors provided by default suffice, there is no need to inherit from |
|
77 | preprocessors provided by default suffice, there is no need to inherit from | |
78 | this class. Instead, override the template_file and file_extension |
|
78 | this class. Instead, override the template_file and file_extension | |
79 | traits via a config file. |
|
79 | traits via a config file. | |
80 |
|
80 | |||
81 | {filters} |
|
81 | {filters} | |
82 | """ |
|
82 | """ | |
83 |
|
83 | |||
84 | # finish the docstring |
|
84 | # finish the docstring | |
85 | __doc__ = __doc__.format(filters = '- '+'\n - '.join(default_filters.keys())) |
|
85 | __doc__ = __doc__.format(filters = '- '+'\n - '.join(default_filters.keys())) | |
86 |
|
86 | |||
87 |
|
87 | |||
88 | template_file = Unicode(u'default', |
|
88 | template_file = Unicode(u'default', | |
89 | config=True, |
|
89 | config=True, | |
90 | help="Name of the template file to use") |
|
90 | help="Name of the template file to use") | |
91 | def _template_file_changed(self, name, old, new): |
|
91 | def _template_file_changed(self, name, old, new): | |
92 | if new == 'default': |
|
92 | if new == 'default': | |
93 | self.template_file = self.default_template |
|
93 | self.template_file = self.default_template | |
94 | else: |
|
94 | else: | |
95 | self.template_file = new |
|
95 | self.template_file = new | |
96 | self.template = None |
|
96 | self.template = None | |
97 | self._load_template() |
|
97 | self._load_template() | |
98 |
|
98 | |||
99 | default_template = Unicode(u'') |
|
99 | default_template = Unicode(u'') | |
100 | template = Any() |
|
100 | template = Any() | |
101 | environment = Any() |
|
101 | environment = Any() | |
102 |
|
102 | |||
103 | template_path = List(['.'], config=True) |
|
103 | template_path = List(['.'], config=True) | |
104 | def _template_path_changed(self, name, old, new): |
|
104 | def _template_path_changed(self, name, old, new): | |
105 | self._load_template() |
|
105 | self._load_template() | |
106 |
|
106 | |||
107 | default_template_path = Unicode( |
|
107 | default_template_path = Unicode( | |
108 | os.path.join("..", "templates"), |
|
108 | os.path.join("..", "templates"), | |
109 | help="Path where the template files are located.") |
|
109 | help="Path where the template files are located.") | |
110 |
|
110 | |||
111 | template_skeleton_path = Unicode( |
|
111 | template_skeleton_path = Unicode( | |
112 | os.path.join("..", "templates", "skeleton"), |
|
112 | os.path.join("..", "templates", "skeleton"), | |
113 | help="Path where the template skeleton files are located.") |
|
113 | help="Path where the template skeleton files are located.") | |
114 |
|
114 | |||
115 | #Jinja block definitions |
|
115 | #Jinja block definitions | |
116 | jinja_comment_block_start = Unicode("", config=True) |
|
116 | jinja_comment_block_start = Unicode("", config=True) | |
117 | jinja_comment_block_end = Unicode("", config=True) |
|
117 | jinja_comment_block_end = Unicode("", config=True) | |
118 | jinja_variable_block_start = Unicode("", config=True) |
|
118 | jinja_variable_block_start = Unicode("", config=True) | |
119 | jinja_variable_block_end = Unicode("", config=True) |
|
119 | jinja_variable_block_end = Unicode("", config=True) | |
120 | jinja_logic_block_start = Unicode("", config=True) |
|
120 | jinja_logic_block_start = Unicode("", config=True) | |
121 | jinja_logic_block_end = Unicode("", config=True) |
|
121 | jinja_logic_block_end = Unicode("", config=True) | |
122 |
|
122 | |||
123 | #Extension that the template files use. |
|
123 | #Extension that the template files use. | |
124 | template_extension = Unicode(".tpl", config=True) |
|
124 | template_extension = Unicode(".tpl", config=True) | |
125 |
|
125 | |||
126 | filters = Dict(config=True, |
|
126 | filters = Dict(config=True, | |
127 | help="""Dictionary of filters, by name and namespace, to add to the Jinja |
|
127 | help="""Dictionary of filters, by name and namespace, to add to the Jinja | |
128 | environment.""") |
|
128 | environment.""") | |
129 |
|
129 | |||
130 | raw_mimetypes = List(config=True, |
|
130 | raw_mimetypes = List(config=True, | |
131 | help="""formats of raw cells to be included in this Exporter's output.""" |
|
131 | help="""formats of raw cells to be included in this Exporter's output.""" | |
132 | ) |
|
132 | ) | |
133 | def _raw_mimetypes_default(self): |
|
133 | def _raw_mimetypes_default(self): | |
134 | return [self.output_mimetype, ''] |
|
134 | return [self.output_mimetype, ''] | |
135 |
|
135 | |||
136 |
|
136 | |||
137 | def __init__(self, config=None, extra_loaders=None, **kw): |
|
137 | def __init__(self, config=None, extra_loaders=None, **kw): | |
138 | """ |
|
138 | """ | |
139 | Public constructor |
|
139 | Public constructor | |
140 |
|
140 | |||
141 | Parameters |
|
141 | Parameters | |
142 | ---------- |
|
142 | ---------- | |
143 | config : config |
|
143 | config : config | |
144 | User configuration instance. |
|
144 | User configuration instance. | |
145 | extra_loaders : list[of Jinja Loaders] |
|
145 | extra_loaders : list[of Jinja Loaders] | |
146 | ordered list of Jinja loader to find templates. Will be tried in order |
|
146 | ordered list of Jinja loader to find templates. Will be tried in order | |
147 | before the default FileSystem ones. |
|
147 | before the default FileSystem ones. | |
148 | template : str (optional, kw arg) |
|
148 | template : str (optional, kw arg) | |
149 | Template to use when exporting. |
|
149 | Template to use when exporting. | |
150 | """ |
|
150 | """ | |
151 | super(TemplateExporter, self).__init__(config=config, **kw) |
|
151 | super(TemplateExporter, self).__init__(config=config, **kw) | |
152 |
|
152 | |||
153 | #Init |
|
153 | #Init | |
154 | self._init_template() |
|
154 | self._init_template() | |
155 | self._init_environment(extra_loaders=extra_loaders) |
|
155 | self._init_environment(extra_loaders=extra_loaders) | |
156 | self._init_filters() |
|
156 | self._init_filters() | |
157 |
|
157 | |||
158 |
|
158 | |||
159 | def _load_template(self): |
|
159 | def _load_template(self): | |
160 | """Load the Jinja template object from the template file |
|
160 | """Load the Jinja template object from the template file | |
161 |
|
161 | |||
162 | This is a no-op if the template attribute is already defined, |
|
162 | This is a no-op if the template attribute is already defined, | |
163 | or the Jinja environment is not setup yet. |
|
163 | or the Jinja environment is not setup yet. | |
164 |
|
164 | |||
165 | This is triggered by various trait changes that would change the template. |
|
165 | This is triggered by various trait changes that would change the template. | |
166 | """ |
|
166 | """ | |
167 | from jinja2 import TemplateNotFound |
|
167 | from jinja2 import TemplateNotFound | |
168 |
|
168 | |||
169 | if self.template is not None: |
|
169 | if self.template is not None: | |
170 | return |
|
170 | return | |
171 | # called too early, do nothing |
|
171 | # called too early, do nothing | |
172 | if self.environment is None: |
|
172 | if self.environment is None: | |
173 | return |
|
173 | return | |
174 | # Try different template names during conversion. First try to load the |
|
174 | # Try different template names during conversion. First try to load the | |
175 | # template by name with extension added, then try loading the template |
|
175 | # template by name with extension added, then try loading the template | |
176 | # as if the name is explicitly specified, then try the name as a |
|
176 | # as if the name is explicitly specified, then try the name as a | |
177 | # 'flavor', and lastly just try to load the template by module name. |
|
177 | # 'flavor', and lastly just try to load the template by module name. | |
178 | try_names = [] |
|
178 | try_names = [] | |
179 | if self.template_file: |
|
179 | if self.template_file: | |
180 | try_names.extend([ |
|
180 | try_names.extend([ | |
181 | self.template_file + self.template_extension, |
|
181 | self.template_file + self.template_extension, | |
182 | self.template_file, |
|
182 | self.template_file, | |
183 | ]) |
|
183 | ]) | |
184 | for try_name in try_names: |
|
184 | for try_name in try_names: | |
185 | self.log.debug("Attempting to load template %s", try_name) |
|
185 | self.log.debug("Attempting to load template %s", try_name) | |
186 | try: |
|
186 | try: | |
187 | self.template = self.environment.get_template(try_name) |
|
187 | self.template = self.environment.get_template(try_name) | |
188 | except (TemplateNotFound, IOError): |
|
188 | except (TemplateNotFound, IOError): | |
189 | pass |
|
189 | pass | |
190 | except Exception as e: |
|
190 | except Exception as e: | |
191 | self.log.warn("Unexpected exception loading template: %s", try_name, exc_info=True) |
|
191 | self.log.warn("Unexpected exception loading template: %s", try_name, exc_info=True) | |
192 | else: |
|
192 | else: | |
193 | self.log.debug("Loaded template %s", try_name) |
|
193 | self.log.debug("Loaded template %s", try_name) | |
194 | break |
|
194 | break | |
195 |
|
195 | |||
196 | def from_notebook_node(self, nb, resources=None, **kw): |
|
196 | def from_notebook_node(self, nb, resources=None, **kw): | |
197 | """ |
|
197 | """ | |
198 | Convert a notebook from a notebook node instance. |
|
198 | Convert a notebook from a notebook node instance. | |
199 |
|
199 | |||
200 | Parameters |
|
200 | Parameters | |
201 | ---------- |
|
201 | ---------- | |
202 | nb : :class:`~IPython.nbformat.NotebookNode` |
|
202 | nb : :class:`~IPython.nbformat.NotebookNode` | |
203 | Notebook node |
|
203 | Notebook node | |
204 | resources : dict |
|
204 | resources : dict | |
205 | Additional resources that can be accessed read/write by |
|
205 | Additional resources that can be accessed read/write by | |
206 | preprocessors and filters. |
|
206 | preprocessors and filters. | |
207 | """ |
|
207 | """ | |
208 | nb_copy, resources = super(TemplateExporter, self).from_notebook_node(nb, resources, **kw) |
|
208 | nb_copy, resources = super(TemplateExporter, self).from_notebook_node(nb, resources, **kw) | |
209 | resources.setdefault('raw_mimetypes', self.raw_mimetypes) |
|
209 | resources.setdefault('raw_mimetypes', self.raw_mimetypes) | |
210 |
|
210 | |||
211 | self._load_template() |
|
211 | self._load_template() | |
212 |
|
212 | |||
213 | if self.template is not None: |
|
213 | if self.template is not None: | |
214 | output = self.template.render(nb=nb_copy, resources=resources) |
|
214 | output = self.template.render(nb=nb_copy, resources=resources) | |
215 | else: |
|
215 | else: | |
216 | raise IOError('template file "%s" could not be found' % self.template_file) |
|
216 | raise IOError('template file "%s" could not be found' % self.template_file) | |
217 | return output, resources |
|
217 | return output, resources | |
218 |
|
218 | |||
219 |
|
219 | |||
220 | def register_filter(self, name, jinja_filter): |
|
220 | def register_filter(self, name, jinja_filter): | |
221 | """ |
|
221 | """ | |
222 | Register a filter. |
|
222 | Register a filter. | |
223 | A filter is a function that accepts and acts on one string. |
|
223 | A filter is a function that accepts and acts on one string. | |
224 | The filters are accesible within the Jinja templating engine. |
|
224 | The filters are accesible within the Jinja templating engine. | |
225 |
|
225 | |||
226 | Parameters |
|
226 | Parameters | |
227 | ---------- |
|
227 | ---------- | |
228 | name : str |
|
228 | name : str | |
229 | name to give the filter in the Jinja engine |
|
229 | name to give the filter in the Jinja engine | |
230 | filter : filter |
|
230 | filter : filter | |
231 | """ |
|
231 | """ | |
232 | if jinja_filter is None: |
|
232 | if jinja_filter is None: | |
233 | raise TypeError('filter') |
|
233 | raise TypeError('filter') | |
234 | isclass = isinstance(jinja_filter, type) |
|
234 | isclass = isinstance(jinja_filter, type) | |
235 | constructed = not isclass |
|
235 | constructed = not isclass | |
236 |
|
236 | |||
237 | #Handle filter's registration based on it's type |
|
237 | #Handle filter's registration based on it's type | |
238 | if constructed and isinstance(jinja_filter, py3compat.string_types): |
|
238 | if constructed and isinstance(jinja_filter, py3compat.string_types): | |
239 | #filter is a string, import the namespace and recursively call |
|
239 | #filter is a string, import the namespace and recursively call | |
240 | #this register_filter method |
|
240 | #this register_filter method | |
241 | filter_cls = import_item(jinja_filter) |
|
241 | filter_cls = import_item(jinja_filter) | |
242 | return self.register_filter(name, filter_cls) |
|
242 | return self.register_filter(name, filter_cls) | |
243 |
|
243 | |||
244 | if constructed and hasattr(jinja_filter, '__call__'): |
|
244 | if constructed and hasattr(jinja_filter, '__call__'): | |
245 | #filter is a function, no need to construct it. |
|
245 | #filter is a function, no need to construct it. | |
246 | self.environment.filters[name] = jinja_filter |
|
246 | self.environment.filters[name] = jinja_filter | |
247 | return jinja_filter |
|
247 | return jinja_filter | |
248 |
|
248 | |||
249 | elif isclass and isinstance(jinja_filter, MetaHasTraits): |
|
249 | elif isclass and isinstance(jinja_filter, MetaHasTraits): | |
250 | #filter is configurable. Make sure to pass in new default for |
|
250 | #filter is configurable. Make sure to pass in new default for | |
251 | #the enabled flag if one was specified. |
|
251 | #the enabled flag if one was specified. | |
252 | filter_instance = jinja_filter(parent=self) |
|
252 | filter_instance = jinja_filter(parent=self) | |
253 | self.register_filter(name, filter_instance ) |
|
253 | self.register_filter(name, filter_instance ) | |
254 |
|
254 | |||
255 | elif isclass: |
|
255 | elif isclass: | |
256 | #filter is not configurable, construct it |
|
256 | #filter is not configurable, construct it | |
257 | filter_instance = jinja_filter() |
|
257 | filter_instance = jinja_filter() | |
258 | self.register_filter(name, filter_instance) |
|
258 | self.register_filter(name, filter_instance) | |
259 |
|
259 | |||
260 | else: |
|
260 | else: | |
261 | #filter is an instance of something without a __call__ |
|
261 | #filter is an instance of something without a __call__ | |
262 | #attribute. |
|
262 | #attribute. | |
263 | raise TypeError('filter') |
|
263 | raise TypeError('filter') | |
264 |
|
264 | |||
265 |
|
265 | |||
266 | def _init_template(self): |
|
266 | def _init_template(self): | |
267 | """ |
|
267 | """ | |
268 | Make sure a template name is specified. If one isn't specified, try to |
|
268 | Make sure a template name is specified. If one isn't specified, try to | |
269 | build one from the information we know. |
|
269 | build one from the information we know. | |
270 | """ |
|
270 | """ | |
271 | self._template_file_changed('template_file', self.template_file, self.template_file) |
|
271 | self._template_file_changed('template_file', self.template_file, self.template_file) | |
272 |
|
272 | |||
273 |
|
273 | |||
274 | def _init_environment(self, extra_loaders=None): |
|
274 | def _init_environment(self, extra_loaders=None): | |
275 | """ |
|
275 | """ | |
276 | Create the Jinja templating environment. |
|
276 | Create the Jinja templating environment. | |
277 | """ |
|
277 | """ | |
278 | from jinja2 import Environment, ChoiceLoader, FileSystemLoader |
|
278 | from jinja2 import Environment, ChoiceLoader, FileSystemLoader | |
279 | here = os.path.dirname(os.path.realpath(__file__)) |
|
279 | here = os.path.dirname(os.path.realpath(__file__)) | |
280 | loaders = [] |
|
280 | loaders = [] | |
281 | if extra_loaders: |
|
281 | if extra_loaders: | |
282 | loaders.extend(extra_loaders) |
|
282 | loaders.extend(extra_loaders) | |
283 |
|
283 | |||
284 | paths = self.template_path |
|
284 | paths = self.template_path | |
285 | paths.extend([os.path.join(here, self.default_template_path), |
|
285 | paths.extend([os.path.join(here, self.default_template_path), | |
286 | os.path.join(here, self.template_skeleton_path)]) |
|
286 | os.path.join(here, self.template_skeleton_path)]) | |
287 | loaders.append(FileSystemLoader(paths)) |
|
287 | loaders.append(FileSystemLoader(paths)) | |
288 |
|
288 | |||
289 | self.environment = Environment( |
|
289 | self.environment = Environment( | |
290 | loader= ChoiceLoader(loaders), |
|
290 | loader= ChoiceLoader(loaders), | |
291 | extensions=JINJA_EXTENSIONS |
|
291 | extensions=JINJA_EXTENSIONS | |
292 | ) |
|
292 | ) | |
293 |
|
293 | |||
294 | #Set special Jinja2 syntax that will not conflict with latex. |
|
294 | #Set special Jinja2 syntax that will not conflict with latex. | |
295 | if self.jinja_logic_block_start: |
|
295 | if self.jinja_logic_block_start: | |
296 | self.environment.block_start_string = self.jinja_logic_block_start |
|
296 | self.environment.block_start_string = self.jinja_logic_block_start | |
297 | if self.jinja_logic_block_end: |
|
297 | if self.jinja_logic_block_end: | |
298 | self.environment.block_end_string = self.jinja_logic_block_end |
|
298 | self.environment.block_end_string = self.jinja_logic_block_end | |
299 | if self.jinja_variable_block_start: |
|
299 | if self.jinja_variable_block_start: | |
300 | self.environment.variable_start_string = self.jinja_variable_block_start |
|
300 | self.environment.variable_start_string = self.jinja_variable_block_start | |
301 | if self.jinja_variable_block_end: |
|
301 | if self.jinja_variable_block_end: | |
302 | self.environment.variable_end_string = self.jinja_variable_block_end |
|
302 | self.environment.variable_end_string = self.jinja_variable_block_end | |
303 | if self.jinja_comment_block_start: |
|
303 | if self.jinja_comment_block_start: | |
304 | self.environment.comment_start_string = self.jinja_comment_block_start |
|
304 | self.environment.comment_start_string = self.jinja_comment_block_start | |
305 | if self.jinja_comment_block_end: |
|
305 | if self.jinja_comment_block_end: | |
306 | self.environment.comment_end_string = self.jinja_comment_block_end |
|
306 | self.environment.comment_end_string = self.jinja_comment_block_end | |
307 |
|
307 | |||
308 |
|
308 | |||
309 | def _init_filters(self): |
|
309 | def _init_filters(self): | |
310 | """ |
|
310 | """ | |
311 | Register all of the filters required for the exporter. |
|
311 | Register all of the filters required for the exporter. | |
312 | """ |
|
312 | """ | |
313 |
|
313 | |||
314 | #Add default filters to the Jinja2 environment |
|
314 | #Add default filters to the Jinja2 environment | |
315 | for key, value in default_filters.items(): |
|
315 | for key, value in default_filters.items(): | |
316 | self.register_filter(key, value) |
|
316 | self.register_filter(key, value) | |
317 |
|
317 | |||
318 | #Load user filters. Overwrite existing filters if need be. |
|
318 | #Load user filters. Overwrite existing filters if need be. | |
319 | if self.filters: |
|
319 | if self.filters: | |
320 | for key, user_filter in self.filters.items(): |
|
320 | for key, user_filter in self.filters.items(): | |
321 | self.register_filter(key, user_filter) |
|
321 | self.register_filter(key, user_filter) |
@@ -1,108 +1,108 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Module with tests for templateexporter.py |
|
2 | Module with tests for templateexporter.py | |
3 | """ |
|
3 | """ | |
4 |
|
4 | |||
5 | #----------------------------------------------------------------------------- |
|
5 | #----------------------------------------------------------------------------- | |
6 | # Copyright (c) 2013, the IPython Development Team. |
|
6 | # Copyright (c) 2013, the IPython Development Team. | |
7 | # |
|
7 | # | |
8 | # Distributed under the terms of the Modified BSD License. |
|
8 | # Distributed under the terms of the Modified BSD License. | |
9 | # |
|
9 | # | |
10 | # The full license is in the file COPYING.txt, distributed with this software. |
|
10 | # The full license is in the file COPYING.txt, distributed with this software. | |
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 |
|
12 | |||
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 | # Imports |
|
14 | # Imports | |
15 | #----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
16 |
|
16 | |||
17 | from IPython.config import Config |
|
17 | from IPython.config import Config | |
18 |
|
18 | |||
19 | from .base import ExportersTestsBase |
|
19 | from .base import ExportersTestsBase | |
20 | from .cheese import CheesePreprocessor |
|
20 | from .cheese import CheesePreprocessor | |
21 | from ..templateexporter import TemplateExporter |
|
21 | from ..templateexporter import TemplateExporter | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | #----------------------------------------------------------------------------- |
|
24 | #----------------------------------------------------------------------------- | |
25 | # Class |
|
25 | # Class | |
26 | #----------------------------------------------------------------------------- |
|
26 | #----------------------------------------------------------------------------- | |
27 |
|
27 | |||
28 | class TestExporter(ExportersTestsBase): |
|
28 | class TestExporter(ExportersTestsBase): | |
29 | """Contains test functions for exporter.py""" |
|
29 | """Contains test functions for exporter.py""" | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | def test_constructor(self): |
|
32 | def test_constructor(self): | |
33 | """ |
|
33 | """ | |
34 | Can a TemplateExporter be constructed? |
|
34 | Can a TemplateExporter be constructed? | |
35 | """ |
|
35 | """ | |
36 | TemplateExporter() |
|
36 | TemplateExporter() | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | def test_export(self): |
|
39 | def test_export(self): | |
40 | """ |
|
40 | """ | |
41 | Can a TemplateExporter export something? |
|
41 | Can a TemplateExporter export something? | |
42 | """ |
|
42 | """ | |
43 | exporter = self._make_exporter() |
|
43 | exporter = self._make_exporter() | |
44 | (output, resources) = exporter.from_filename(self._get_notebook()) |
|
44 | (output, resources) = exporter.from_filename(self._get_notebook()) | |
45 | assert len(output) > 0 |
|
45 | assert len(output) > 0 | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | def test_extract_outputs(self): |
|
48 | def test_extract_outputs(self): | |
49 | """ |
|
49 | """ | |
50 | If the ExtractOutputPreprocessor is enabled, are outputs extracted? |
|
50 | If the ExtractOutputPreprocessor is enabled, are outputs extracted? | |
51 | """ |
|
51 | """ | |
52 | config = Config({'ExtractOutputPreprocessor': {'enabled': True}}) |
|
52 | config = Config({'ExtractOutputPreprocessor': {'enabled': True}}) | |
53 | exporter = self._make_exporter(config=config) |
|
53 | exporter = self._make_exporter(config=config) | |
54 | (output, resources) = exporter.from_filename(self._get_notebook()) |
|
54 | (output, resources) = exporter.from_filename(self._get_notebook()) | |
55 | assert resources is not None |
|
55 | assert resources is not None | |
56 | assert isinstance(resources['outputs'], dict) |
|
56 | assert isinstance(resources['outputs'], dict) | |
57 | assert len(resources['outputs']) > 0 |
|
57 | assert len(resources['outputs']) > 0 | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | def test_preprocessor_class(self): |
|
60 | def test_preprocessor_class(self): | |
61 | """ |
|
61 | """ | |
62 | Can a preprocessor be added to the preprocessors list by class type? |
|
62 | Can a preprocessor be added to the preprocessors list by class type? | |
63 | """ |
|
63 | """ | |
64 | config = Config({'Exporter': {'preprocessors': [CheesePreprocessor]}}) |
|
64 | config = Config({'Exporter': {'preprocessors': [CheesePreprocessor]}}) | |
65 | exporter = self._make_exporter(config=config) |
|
65 | exporter = self._make_exporter(config=config) | |
66 | (output, resources) = exporter.from_filename(self._get_notebook()) |
|
66 | (output, resources) = exporter.from_filename(self._get_notebook()) | |
67 | assert resources is not None |
|
67 | assert resources is not None | |
68 | assert resources['cheese'] == 'real' |
|
68 | assert resources['cheese'] == 'real' | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | def test_preprocessor_instance(self): |
|
71 | def test_preprocessor_instance(self): | |
72 | """ |
|
72 | """ | |
73 | Can a preprocessor be added to the preprocessors list by instance? |
|
73 | Can a preprocessor be added to the preprocessors list by instance? | |
74 | """ |
|
74 | """ | |
75 | config = Config({'Exporter': {'preprocessors': [CheesePreprocessor()]}}) |
|
75 | config = Config({'Exporter': {'preprocessors': [CheesePreprocessor()]}}) | |
76 | exporter = self._make_exporter(config=config) |
|
76 | exporter = self._make_exporter(config=config) | |
77 | (output, resources) = exporter.from_filename(self._get_notebook()) |
|
77 | (output, resources) = exporter.from_filename(self._get_notebook()) | |
78 | assert resources is not None |
|
78 | assert resources is not None | |
79 | assert resources['cheese'] == 'real' |
|
79 | assert resources['cheese'] == 'real' | |
80 |
|
80 | |||
81 |
|
81 | |||
82 | def test_preprocessor_dottedobjectname(self): |
|
82 | def test_preprocessor_dottedobjectname(self): | |
83 | """ |
|
83 | """ | |
84 | Can a preprocessor be added to the preprocessors list by dotted object name? |
|
84 | Can a preprocessor be added to the preprocessors list by dotted object name? | |
85 | """ |
|
85 | """ | |
86 |
config = Config({'Exporter': {'preprocessors': [' |
|
86 | config = Config({'Exporter': {'preprocessors': ['jupyter_nbconvert.exporters.tests.cheese.CheesePreprocessor']}}) | |
87 | exporter = self._make_exporter(config=config) |
|
87 | exporter = self._make_exporter(config=config) | |
88 | (output, resources) = exporter.from_filename(self._get_notebook()) |
|
88 | (output, resources) = exporter.from_filename(self._get_notebook()) | |
89 | assert resources is not None |
|
89 | assert resources is not None | |
90 | assert resources['cheese'] == 'real' |
|
90 | assert resources['cheese'] == 'real' | |
91 |
|
91 | |||
92 |
|
92 | |||
93 | def test_preprocessor_via_method(self): |
|
93 | def test_preprocessor_via_method(self): | |
94 | """ |
|
94 | """ | |
95 | Can a preprocessor be added via the Exporter convenience method? |
|
95 | Can a preprocessor be added via the Exporter convenience method? | |
96 | """ |
|
96 | """ | |
97 | exporter = self._make_exporter() |
|
97 | exporter = self._make_exporter() | |
98 | exporter.register_preprocessor(CheesePreprocessor, enabled=True) |
|
98 | exporter.register_preprocessor(CheesePreprocessor, enabled=True) | |
99 | (output, resources) = exporter.from_filename(self._get_notebook()) |
|
99 | (output, resources) = exporter.from_filename(self._get_notebook()) | |
100 | assert resources is not None |
|
100 | assert resources is not None | |
101 | assert resources['cheese'] == 'real' |
|
101 | assert resources['cheese'] == 'real' | |
102 |
|
102 | |||
103 |
|
103 | |||
104 | def _make_exporter(self, config=None): |
|
104 | def _make_exporter(self, config=None): | |
105 | # Create the exporter instance, make sure to set a template name since |
|
105 | # Create the exporter instance, make sure to set a template name since | |
106 | # the base TemplateExporter doesn't have a template associated with it. |
|
106 | # the base TemplateExporter doesn't have a template associated with it. | |
107 | exporter = TemplateExporter(config=config, template_file='python') |
|
107 | exporter = TemplateExporter(config=config, template_file='python') | |
108 | return exporter |
|
108 | return exporter |
@@ -1,135 +1,135 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Module containing filter functions that allow code to be highlighted |
|
2 | Module containing filter functions that allow code to be highlighted | |
3 | from within Jinja templates. |
|
3 | from within Jinja templates. | |
4 | """ |
|
4 | """ | |
5 |
|
5 | |||
6 | # Copyright (c) IPython Development Team. |
|
6 | # Copyright (c) IPython Development Team. | |
7 | # Distributed under the terms of the Modified BSD License. |
|
7 | # Distributed under the terms of the Modified BSD License. | |
8 |
|
8 | |||
9 | # pygments must not be imported at the module level |
|
9 | # pygments must not be imported at the module level | |
10 | # because errors should be raised at runtime if it's actually needed, |
|
10 | # because errors should be raised at runtime if it's actually needed, | |
11 | # not import time, when it may not be needed. |
|
11 | # not import time, when it may not be needed. | |
12 |
|
12 | |||
13 |
from |
|
13 | from jupyter_nbconvert.utils.base import NbConvertBase | |
14 | from warnings import warn |
|
14 | from warnings import warn | |
15 |
|
15 | |||
16 | MULTILINE_OUTPUTS = ['text', 'html', 'svg', 'latex', 'javascript', 'json'] |
|
16 | MULTILINE_OUTPUTS = ['text', 'html', 'svg', 'latex', 'javascript', 'json'] | |
17 |
|
17 | |||
18 | __all__ = [ |
|
18 | __all__ = [ | |
19 | 'Highlight2HTML', |
|
19 | 'Highlight2HTML', | |
20 | 'Highlight2Latex' |
|
20 | 'Highlight2Latex' | |
21 | ] |
|
21 | ] | |
22 |
|
22 | |||
23 | class Highlight2HTML(NbConvertBase): |
|
23 | class Highlight2HTML(NbConvertBase): | |
24 | def __init__(self, pygments_lexer=None, **kwargs): |
|
24 | def __init__(self, pygments_lexer=None, **kwargs): | |
25 | self.pygments_lexer = pygments_lexer or 'ipython3' |
|
25 | self.pygments_lexer = pygments_lexer or 'ipython3' | |
26 | super(Highlight2HTML, self).__init__(**kwargs) |
|
26 | super(Highlight2HTML, self).__init__(**kwargs) | |
27 |
|
27 | |||
28 | def _default_language_changed(self, name, old, new): |
|
28 | def _default_language_changed(self, name, old, new): | |
29 | warn('Setting default_language in config is deprecated, ' |
|
29 | warn('Setting default_language in config is deprecated, ' | |
30 | 'please use language_info metadata instead.') |
|
30 | 'please use language_info metadata instead.') | |
31 | self.pygments_lexer = new |
|
31 | self.pygments_lexer = new | |
32 |
|
32 | |||
33 | def __call__(self, source, language=None, metadata=None): |
|
33 | def __call__(self, source, language=None, metadata=None): | |
34 | """ |
|
34 | """ | |
35 | Return a syntax-highlighted version of the input source as html output. |
|
35 | Return a syntax-highlighted version of the input source as html output. | |
36 |
|
36 | |||
37 | Parameters |
|
37 | Parameters | |
38 | ---------- |
|
38 | ---------- | |
39 | source : str |
|
39 | source : str | |
40 | source of the cell to highlight |
|
40 | source of the cell to highlight | |
41 | language : str |
|
41 | language : str | |
42 | language to highlight the syntax of |
|
42 | language to highlight the syntax of | |
43 | metadata : NotebookNode cell metadata |
|
43 | metadata : NotebookNode cell metadata | |
44 | metadata of the cell to highlight |
|
44 | metadata of the cell to highlight | |
45 | """ |
|
45 | """ | |
46 | from pygments.formatters import HtmlFormatter |
|
46 | from pygments.formatters import HtmlFormatter | |
47 |
|
47 | |||
48 | if not language: |
|
48 | if not language: | |
49 | language=self.pygments_lexer |
|
49 | language=self.pygments_lexer | |
50 |
|
50 | |||
51 | return _pygments_highlight(source if len(source) > 0 else ' ', |
|
51 | return _pygments_highlight(source if len(source) > 0 else ' ', | |
52 | # needed to help post processors: |
|
52 | # needed to help post processors: | |
53 | HtmlFormatter(cssclass=" highlight hl-"+language), |
|
53 | HtmlFormatter(cssclass=" highlight hl-"+language), | |
54 | language, metadata) |
|
54 | language, metadata) | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | class Highlight2Latex(NbConvertBase): |
|
57 | class Highlight2Latex(NbConvertBase): | |
58 | def __init__(self, pygments_lexer=None, **kwargs): |
|
58 | def __init__(self, pygments_lexer=None, **kwargs): | |
59 | self.pygments_lexer = pygments_lexer or 'ipython3' |
|
59 | self.pygments_lexer = pygments_lexer or 'ipython3' | |
60 | super(Highlight2Latex, self).__init__(**kwargs) |
|
60 | super(Highlight2Latex, self).__init__(**kwargs) | |
61 |
|
61 | |||
62 | def _default_language_changed(self, name, old, new): |
|
62 | def _default_language_changed(self, name, old, new): | |
63 | warn('Setting default_language in config is deprecated, ' |
|
63 | warn('Setting default_language in config is deprecated, ' | |
64 | 'please use language_info metadata instead.') |
|
64 | 'please use language_info metadata instead.') | |
65 | self.pygments_lexer = new |
|
65 | self.pygments_lexer = new | |
66 |
|
66 | |||
67 | def __call__(self, source, language=None, metadata=None, strip_verbatim=False): |
|
67 | def __call__(self, source, language=None, metadata=None, strip_verbatim=False): | |
68 | """ |
|
68 | """ | |
69 | Return a syntax-highlighted version of the input source as latex output. |
|
69 | Return a syntax-highlighted version of the input source as latex output. | |
70 |
|
70 | |||
71 | Parameters |
|
71 | Parameters | |
72 | ---------- |
|
72 | ---------- | |
73 | source : str |
|
73 | source : str | |
74 | source of the cell to highlight |
|
74 | source of the cell to highlight | |
75 | language : str |
|
75 | language : str | |
76 | language to highlight the syntax of |
|
76 | language to highlight the syntax of | |
77 | metadata : NotebookNode cell metadata |
|
77 | metadata : NotebookNode cell metadata | |
78 | metadata of the cell to highlight |
|
78 | metadata of the cell to highlight | |
79 | strip_verbatim : bool |
|
79 | strip_verbatim : bool | |
80 | remove the Verbatim environment that pygments provides by default |
|
80 | remove the Verbatim environment that pygments provides by default | |
81 | """ |
|
81 | """ | |
82 | from pygments.formatters import LatexFormatter |
|
82 | from pygments.formatters import LatexFormatter | |
83 | if not language: |
|
83 | if not language: | |
84 | language=self.pygments_lexer |
|
84 | language=self.pygments_lexer | |
85 |
|
85 | |||
86 | latex = _pygments_highlight(source, LatexFormatter(), language, metadata) |
|
86 | latex = _pygments_highlight(source, LatexFormatter(), language, metadata) | |
87 | if strip_verbatim: |
|
87 | if strip_verbatim: | |
88 | latex = latex.replace(r'\begin{Verbatim}[commandchars=\\\{\}]' + '\n', '') |
|
88 | latex = latex.replace(r'\begin{Verbatim}[commandchars=\\\{\}]' + '\n', '') | |
89 | return latex.replace('\n\\end{Verbatim}\n', '') |
|
89 | return latex.replace('\n\\end{Verbatim}\n', '') | |
90 | else: |
|
90 | else: | |
91 | return latex |
|
91 | return latex | |
92 |
|
92 | |||
93 |
|
93 | |||
94 |
|
94 | |||
95 | def _pygments_highlight(source, output_formatter, language='ipython', metadata=None): |
|
95 | def _pygments_highlight(source, output_formatter, language='ipython', metadata=None): | |
96 | """ |
|
96 | """ | |
97 | Return a syntax-highlighted version of the input source |
|
97 | Return a syntax-highlighted version of the input source | |
98 |
|
98 | |||
99 | Parameters |
|
99 | Parameters | |
100 | ---------- |
|
100 | ---------- | |
101 | source : str |
|
101 | source : str | |
102 | source of the cell to highlight |
|
102 | source of the cell to highlight | |
103 | output_formatter : Pygments formatter |
|
103 | output_formatter : Pygments formatter | |
104 | language : str |
|
104 | language : str | |
105 | language to highlight the syntax of |
|
105 | language to highlight the syntax of | |
106 | metadata : NotebookNode cell metadata |
|
106 | metadata : NotebookNode cell metadata | |
107 | metadata of the cell to highlight |
|
107 | metadata of the cell to highlight | |
108 | """ |
|
108 | """ | |
109 | from pygments import highlight |
|
109 | from pygments import highlight | |
110 | from pygments.lexers import get_lexer_by_name |
|
110 | from pygments.lexers import get_lexer_by_name | |
111 | from pygments.util import ClassNotFound |
|
111 | from pygments.util import ClassNotFound | |
112 | from IPython.lib.lexers import IPythonLexer, IPython3Lexer |
|
112 | from IPython.lib.lexers import IPythonLexer, IPython3Lexer | |
113 |
|
113 | |||
114 | # If the cell uses a magic extension language, |
|
114 | # If the cell uses a magic extension language, | |
115 | # use the magic language instead. |
|
115 | # use the magic language instead. | |
116 | if language.startswith('ipython') \ |
|
116 | if language.startswith('ipython') \ | |
117 | and metadata \ |
|
117 | and metadata \ | |
118 | and 'magics_language' in metadata: |
|
118 | and 'magics_language' in metadata: | |
119 |
|
119 | |||
120 | language = metadata['magics_language'] |
|
120 | language = metadata['magics_language'] | |
121 |
|
121 | |||
122 | if language == 'ipython2': |
|
122 | if language == 'ipython2': | |
123 | lexer = IPythonLexer() |
|
123 | lexer = IPythonLexer() | |
124 | elif language == 'ipython3': |
|
124 | elif language == 'ipython3': | |
125 | lexer = IPython3Lexer() |
|
125 | lexer = IPython3Lexer() | |
126 | else: |
|
126 | else: | |
127 | try: |
|
127 | try: | |
128 | lexer = get_lexer_by_name(language, stripall=True) |
|
128 | lexer = get_lexer_by_name(language, stripall=True) | |
129 | except ClassNotFound: |
|
129 | except ClassNotFound: | |
130 | warn("No lexer found for language %r. Treating as plain text." % language) |
|
130 | warn("No lexer found for language %r. Treating as plain text." % language) | |
131 | from pygments.lexers.special import TextLexer |
|
131 | from pygments.lexers.special import TextLexer | |
132 | lexer = TextLexer() |
|
132 | lexer = TextLexer() | |
133 |
|
133 | |||
134 |
|
134 | |||
135 | return highlight(source, lexer, output_formatter) |
|
135 | return highlight(source, lexer, output_formatter) |
@@ -1,140 +1,140 b'' | |||||
1 | """Markdown filters |
|
1 | """Markdown filters | |
2 |
|
2 | |||
3 | This file contains a collection of utility filters for dealing with |
|
3 | This file contains a collection of utility filters for dealing with | |
4 | markdown within Jinja templates. |
|
4 | markdown within Jinja templates. | |
5 | """ |
|
5 | """ | |
6 | # Copyright (c) IPython Development Team. |
|
6 | # Copyright (c) IPython Development Team. | |
7 | # Distributed under the terms of the Modified BSD License. |
|
7 | # Distributed under the terms of the Modified BSD License. | |
8 |
|
8 | |||
9 | from __future__ import print_function |
|
9 | from __future__ import print_function | |
10 |
|
10 | |||
11 | import os |
|
11 | import os | |
12 | import subprocess |
|
12 | import subprocess | |
13 | from io import TextIOWrapper, BytesIO |
|
13 | from io import TextIOWrapper, BytesIO | |
14 |
|
14 | |||
15 | try: |
|
15 | try: | |
16 | from .markdown_mistune import markdown2html_mistune |
|
16 | from .markdown_mistune import markdown2html_mistune | |
17 | except ImportError as e: |
|
17 | except ImportError as e: | |
18 | # store in variable for Python 3 |
|
18 | # store in variable for Python 3 | |
19 | _mistune_import_error = e |
|
19 | _mistune_import_error = e | |
20 | def markdown2html_mistune(source): |
|
20 | def markdown2html_mistune(source): | |
21 | """mistune is unavailable, raise ImportError""" |
|
21 | """mistune is unavailable, raise ImportError""" | |
22 | raise ImportError("markdown2html requires mistune: %s" % _mistune_import_error) |
|
22 | raise ImportError("markdown2html requires mistune: %s" % _mistune_import_error) | |
23 |
|
23 | |||
24 |
from |
|
24 | from jupyter_nbconvert.utils.pandoc import pandoc | |
25 |
from |
|
25 | from jupyter_nbconvert.utils.exceptions import ConversionException | |
26 | from IPython.utils.process import get_output_error_code |
|
26 | from IPython.utils.process import get_output_error_code | |
27 | from IPython.utils.py3compat import cast_bytes |
|
27 | from IPython.utils.py3compat import cast_bytes | |
28 | from IPython.utils.version import check_version |
|
28 | from IPython.utils.version import check_version | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | marked = os.path.join(os.path.dirname(__file__), "marked.js") |
|
31 | marked = os.path.join(os.path.dirname(__file__), "marked.js") | |
32 | _node = None |
|
32 | _node = None | |
33 |
|
33 | |||
34 | __all__ = [ |
|
34 | __all__ = [ | |
35 | 'markdown2html', |
|
35 | 'markdown2html', | |
36 | 'markdown2html_pandoc', |
|
36 | 'markdown2html_pandoc', | |
37 | 'markdown2html_marked', |
|
37 | 'markdown2html_marked', | |
38 | 'markdown2html_mistune', |
|
38 | 'markdown2html_mistune', | |
39 | 'markdown2latex', |
|
39 | 'markdown2latex', | |
40 | 'markdown2rst', |
|
40 | 'markdown2rst', | |
41 | ] |
|
41 | ] | |
42 |
|
42 | |||
43 | class NodeJSMissing(ConversionException): |
|
43 | class NodeJSMissing(ConversionException): | |
44 | """Exception raised when node.js is missing.""" |
|
44 | """Exception raised when node.js is missing.""" | |
45 | pass |
|
45 | pass | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | def markdown2latex(source, markup='markdown', extra_args=None): |
|
48 | def markdown2latex(source, markup='markdown', extra_args=None): | |
49 | """Convert a markdown string to LaTeX via pandoc. |
|
49 | """Convert a markdown string to LaTeX via pandoc. | |
50 |
|
50 | |||
51 | This function will raise an error if pandoc is not installed. |
|
51 | This function will raise an error if pandoc is not installed. | |
52 | Any error messages generated by pandoc are printed to stderr. |
|
52 | Any error messages generated by pandoc are printed to stderr. | |
53 |
|
53 | |||
54 | Parameters |
|
54 | Parameters | |
55 | ---------- |
|
55 | ---------- | |
56 | source : string |
|
56 | source : string | |
57 | Input string, assumed to be valid markdown. |
|
57 | Input string, assumed to be valid markdown. | |
58 | markup : string |
|
58 | markup : string | |
59 | Markup used by pandoc's reader |
|
59 | Markup used by pandoc's reader | |
60 | default : pandoc extended markdown |
|
60 | default : pandoc extended markdown | |
61 | (see http://johnmacfarlane.net/pandoc/README.html#pandocs-markdown) |
|
61 | (see http://johnmacfarlane.net/pandoc/README.html#pandocs-markdown) | |
62 |
|
62 | |||
63 | Returns |
|
63 | Returns | |
64 | ------- |
|
64 | ------- | |
65 | out : string |
|
65 | out : string | |
66 | Output as returned by pandoc. |
|
66 | Output as returned by pandoc. | |
67 | """ |
|
67 | """ | |
68 | return pandoc(source, markup, 'latex', extra_args=extra_args) |
|
68 | return pandoc(source, markup, 'latex', extra_args=extra_args) | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | def markdown2html_pandoc(source, extra_args=None): |
|
71 | def markdown2html_pandoc(source, extra_args=None): | |
72 | """Convert a markdown string to HTML via pandoc""" |
|
72 | """Convert a markdown string to HTML via pandoc""" | |
73 | extra_args = extra_args or ['--mathjax'] |
|
73 | extra_args = extra_args or ['--mathjax'] | |
74 | return pandoc(source, 'markdown', 'html', extra_args=extra_args) |
|
74 | return pandoc(source, 'markdown', 'html', extra_args=extra_args) | |
75 |
|
75 | |||
76 |
|
76 | |||
77 | def _find_nodejs(): |
|
77 | def _find_nodejs(): | |
78 | global _node |
|
78 | global _node | |
79 | if _node is None: |
|
79 | if _node is None: | |
80 | # prefer md2html via marked if node.js >= 0.9.12 is available |
|
80 | # prefer md2html via marked if node.js >= 0.9.12 is available | |
81 | # node is called nodejs on debian, so try that first |
|
81 | # node is called nodejs on debian, so try that first | |
82 | _node = 'nodejs' |
|
82 | _node = 'nodejs' | |
83 | if not _verify_node(_node): |
|
83 | if not _verify_node(_node): | |
84 | _node = 'node' |
|
84 | _node = 'node' | |
85 | return _node |
|
85 | return _node | |
86 |
|
86 | |||
87 | def markdown2html_marked(source, encoding='utf-8'): |
|
87 | def markdown2html_marked(source, encoding='utf-8'): | |
88 | """Convert a markdown string to HTML via marked""" |
|
88 | """Convert a markdown string to HTML via marked""" | |
89 | command = [_find_nodejs(), marked] |
|
89 | command = [_find_nodejs(), marked] | |
90 | try: |
|
90 | try: | |
91 | p = subprocess.Popen(command, |
|
91 | p = subprocess.Popen(command, | |
92 | stdin=subprocess.PIPE, stdout=subprocess.PIPE |
|
92 | stdin=subprocess.PIPE, stdout=subprocess.PIPE | |
93 | ) |
|
93 | ) | |
94 | except OSError as e: |
|
94 | except OSError as e: | |
95 | raise NodeJSMissing( |
|
95 | raise NodeJSMissing( | |
96 | "The command '%s' returned an error: %s.\n" % (" ".join(command), e) + |
|
96 | "The command '%s' returned an error: %s.\n" % (" ".join(command), e) + | |
97 | "Please check that Node.js is installed." |
|
97 | "Please check that Node.js is installed." | |
98 | ) |
|
98 | ) | |
99 | out, _ = p.communicate(cast_bytes(source, encoding)) |
|
99 | out, _ = p.communicate(cast_bytes(source, encoding)) | |
100 | out = TextIOWrapper(BytesIO(out), encoding, 'replace').read() |
|
100 | out = TextIOWrapper(BytesIO(out), encoding, 'replace').read() | |
101 | return out.rstrip('\n') |
|
101 | return out.rstrip('\n') | |
102 |
|
102 | |||
103 | # The mistune renderer is the default, because it's simple to depend on it |
|
103 | # The mistune renderer is the default, because it's simple to depend on it | |
104 | markdown2html = markdown2html_mistune |
|
104 | markdown2html = markdown2html_mistune | |
105 |
|
105 | |||
106 | def markdown2rst(source, extra_args=None): |
|
106 | def markdown2rst(source, extra_args=None): | |
107 | """Convert a markdown string to ReST via pandoc. |
|
107 | """Convert a markdown string to ReST via pandoc. | |
108 |
|
108 | |||
109 | This function will raise an error if pandoc is not installed. |
|
109 | This function will raise an error if pandoc is not installed. | |
110 | Any error messages generated by pandoc are printed to stderr. |
|
110 | Any error messages generated by pandoc are printed to stderr. | |
111 |
|
111 | |||
112 | Parameters |
|
112 | Parameters | |
113 | ---------- |
|
113 | ---------- | |
114 | source : string |
|
114 | source : string | |
115 | Input string, assumed to be valid markdown. |
|
115 | Input string, assumed to be valid markdown. | |
116 |
|
116 | |||
117 | Returns |
|
117 | Returns | |
118 | ------- |
|
118 | ------- | |
119 | out : string |
|
119 | out : string | |
120 | Output as returned by pandoc. |
|
120 | Output as returned by pandoc. | |
121 | """ |
|
121 | """ | |
122 | return pandoc(source, 'markdown', 'rst', extra_args=extra_args) |
|
122 | return pandoc(source, 'markdown', 'rst', extra_args=extra_args) | |
123 |
|
123 | |||
124 | def _verify_node(cmd): |
|
124 | def _verify_node(cmd): | |
125 | """Verify that the node command exists and is at least the minimum supported |
|
125 | """Verify that the node command exists and is at least the minimum supported | |
126 | version of node. |
|
126 | version of node. | |
127 |
|
127 | |||
128 | Parameters |
|
128 | Parameters | |
129 | ---------- |
|
129 | ---------- | |
130 | cmd : string |
|
130 | cmd : string | |
131 | Node command to verify (i.e 'node').""" |
|
131 | Node command to verify (i.e 'node').""" | |
132 | try: |
|
132 | try: | |
133 | out, err, return_code = get_output_error_code([cmd, '--version']) |
|
133 | out, err, return_code = get_output_error_code([cmd, '--version']) | |
134 | except OSError: |
|
134 | except OSError: | |
135 | # Command not found |
|
135 | # Command not found | |
136 | return False |
|
136 | return False | |
137 | if return_code: |
|
137 | if return_code: | |
138 | # Command error |
|
138 | # Command error | |
139 | return False |
|
139 | return False | |
140 | return check_version(out.lstrip('v'), '0.9.12') |
|
140 | return check_version(out.lstrip('v'), '0.9.12') |
@@ -1,122 +1,122 b'' | |||||
1 | """Markdown filters with mistune |
|
1 | """Markdown filters with mistune | |
2 |
|
2 | |||
3 | Used from markdown.py |
|
3 | Used from markdown.py | |
4 | """ |
|
4 | """ | |
5 | # Copyright (c) IPython Development Team. |
|
5 | # Copyright (c) IPython Development Team. | |
6 | # Distributed under the terms of the Modified BSD License. |
|
6 | # Distributed under the terms of the Modified BSD License. | |
7 |
|
7 | |||
8 | from __future__ import print_function |
|
8 | from __future__ import print_function | |
9 |
|
9 | |||
10 | import re |
|
10 | import re | |
11 |
|
11 | |||
12 | import mistune |
|
12 | import mistune | |
13 |
|
13 | |||
14 | from pygments import highlight |
|
14 | from pygments import highlight | |
15 | from pygments.lexers import get_lexer_by_name |
|
15 | from pygments.lexers import get_lexer_by_name | |
16 | from pygments.formatters import HtmlFormatter |
|
16 | from pygments.formatters import HtmlFormatter | |
17 | from pygments.util import ClassNotFound |
|
17 | from pygments.util import ClassNotFound | |
18 |
|
18 | |||
19 |
from |
|
19 | from jupyter_nbconvert.filters.strings import add_anchor | |
20 |
from |
|
20 | from jupyter_nbconvert.utils.exceptions import ConversionException | |
21 | from IPython.utils.decorators import undoc |
|
21 | from IPython.utils.decorators import undoc | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | @undoc |
|
24 | @undoc | |
25 | class MathBlockGrammar(mistune.BlockGrammar): |
|
25 | class MathBlockGrammar(mistune.BlockGrammar): | |
26 | block_math = re.compile(r"^\$\$(.*?)\$\$", re.DOTALL) |
|
26 | block_math = re.compile(r"^\$\$(.*?)\$\$", re.DOTALL) | |
27 | latex_environment = re.compile(r"^\\begin\{([a-z]*\*?)\}(.*?)\\end\{\1\}", |
|
27 | latex_environment = re.compile(r"^\\begin\{([a-z]*\*?)\}(.*?)\\end\{\1\}", | |
28 | re.DOTALL) |
|
28 | re.DOTALL) | |
29 |
|
29 | |||
30 | @undoc |
|
30 | @undoc | |
31 | class MathBlockLexer(mistune.BlockLexer): |
|
31 | class MathBlockLexer(mistune.BlockLexer): | |
32 | default_rules = ['block_math', 'latex_environment'] + mistune.BlockLexer.default_rules |
|
32 | default_rules = ['block_math', 'latex_environment'] + mistune.BlockLexer.default_rules | |
33 |
|
33 | |||
34 | def __init__(self, rules=None, **kwargs): |
|
34 | def __init__(self, rules=None, **kwargs): | |
35 | if rules is None: |
|
35 | if rules is None: | |
36 | rules = MathBlockGrammar() |
|
36 | rules = MathBlockGrammar() | |
37 | super(MathBlockLexer, self).__init__(rules, **kwargs) |
|
37 | super(MathBlockLexer, self).__init__(rules, **kwargs) | |
38 |
|
38 | |||
39 | def parse_block_math(self, m): |
|
39 | def parse_block_math(self, m): | |
40 | """Parse a $$math$$ block""" |
|
40 | """Parse a $$math$$ block""" | |
41 | self.tokens.append({ |
|
41 | self.tokens.append({ | |
42 | 'type': 'block_math', |
|
42 | 'type': 'block_math', | |
43 | 'text': m.group(1) |
|
43 | 'text': m.group(1) | |
44 | }) |
|
44 | }) | |
45 |
|
45 | |||
46 | def parse_latex_environment(self, m): |
|
46 | def parse_latex_environment(self, m): | |
47 | self.tokens.append({ |
|
47 | self.tokens.append({ | |
48 | 'type': 'latex_environment', |
|
48 | 'type': 'latex_environment', | |
49 | 'name': m.group(1), |
|
49 | 'name': m.group(1), | |
50 | 'text': m.group(2) |
|
50 | 'text': m.group(2) | |
51 | }) |
|
51 | }) | |
52 |
|
52 | |||
53 | @undoc |
|
53 | @undoc | |
54 | class MathInlineGrammar(mistune.InlineGrammar): |
|
54 | class MathInlineGrammar(mistune.InlineGrammar): | |
55 | math = re.compile(r"^\$(.+?)\$") |
|
55 | math = re.compile(r"^\$(.+?)\$") | |
56 | block_math = re.compile(r"^\$\$(.+?)\$\$", re.DOTALL) |
|
56 | block_math = re.compile(r"^\$\$(.+?)\$\$", re.DOTALL) | |
57 | text = re.compile(r'^[\s\S]+?(?=[\\<!\[_*`~$]|https?://| {2,}\n|$)') |
|
57 | text = re.compile(r'^[\s\S]+?(?=[\\<!\[_*`~$]|https?://| {2,}\n|$)') | |
58 |
|
58 | |||
59 | @undoc |
|
59 | @undoc | |
60 | class MathInlineLexer(mistune.InlineLexer): |
|
60 | class MathInlineLexer(mistune.InlineLexer): | |
61 | default_rules = ['math', 'block_math'] + mistune.InlineLexer.default_rules |
|
61 | default_rules = ['math', 'block_math'] + mistune.InlineLexer.default_rules | |
62 |
|
62 | |||
63 | def __init__(self, renderer, rules=None, **kwargs): |
|
63 | def __init__(self, renderer, rules=None, **kwargs): | |
64 | if rules is None: |
|
64 | if rules is None: | |
65 | rules = MathInlineGrammar() |
|
65 | rules = MathInlineGrammar() | |
66 | super(MathInlineLexer, self).__init__(renderer, rules, **kwargs) |
|
66 | super(MathInlineLexer, self).__init__(renderer, rules, **kwargs) | |
67 |
|
67 | |||
68 | def output_math(self, m): |
|
68 | def output_math(self, m): | |
69 | return self.renderer.inline_math(m.group(1)) |
|
69 | return self.renderer.inline_math(m.group(1)) | |
70 |
|
70 | |||
71 | def output_block_math(self, m): |
|
71 | def output_block_math(self, m): | |
72 | return self.renderer.block_math(m.group(1)) |
|
72 | return self.renderer.block_math(m.group(1)) | |
73 |
|
73 | |||
74 | @undoc |
|
74 | @undoc | |
75 | class MarkdownWithMath(mistune.Markdown): |
|
75 | class MarkdownWithMath(mistune.Markdown): | |
76 | def __init__(self, renderer, **kwargs): |
|
76 | def __init__(self, renderer, **kwargs): | |
77 | if 'inline' not in kwargs: |
|
77 | if 'inline' not in kwargs: | |
78 | kwargs['inline'] = MathInlineLexer |
|
78 | kwargs['inline'] = MathInlineLexer | |
79 | if 'block' not in kwargs: |
|
79 | if 'block' not in kwargs: | |
80 | kwargs['block'] = MathBlockLexer |
|
80 | kwargs['block'] = MathBlockLexer | |
81 | super(MarkdownWithMath, self).__init__(renderer, **kwargs) |
|
81 | super(MarkdownWithMath, self).__init__(renderer, **kwargs) | |
82 |
|
82 | |||
83 | def output_block_math(self): |
|
83 | def output_block_math(self): | |
84 | return self.renderer.block_math(self.token['text']) |
|
84 | return self.renderer.block_math(self.token['text']) | |
85 |
|
85 | |||
86 | def output_latex_environment(self): |
|
86 | def output_latex_environment(self): | |
87 | return self.renderer.latex_environment(self.token['name'], self.token['text']) |
|
87 | return self.renderer.latex_environment(self.token['name'], self.token['text']) | |
88 |
|
88 | |||
89 | @undoc |
|
89 | @undoc | |
90 | class IPythonRenderer(mistune.Renderer): |
|
90 | class IPythonRenderer(mistune.Renderer): | |
91 | def block_code(self, code, lang): |
|
91 | def block_code(self, code, lang): | |
92 | if lang: |
|
92 | if lang: | |
93 | try: |
|
93 | try: | |
94 | lexer = get_lexer_by_name(lang, stripall=True) |
|
94 | lexer = get_lexer_by_name(lang, stripall=True) | |
95 | except ClassNotFound: |
|
95 | except ClassNotFound: | |
96 | code = lang + '\n' + code |
|
96 | code = lang + '\n' + code | |
97 | lang = None |
|
97 | lang = None | |
98 |
|
98 | |||
99 | if not lang: |
|
99 | if not lang: | |
100 | return '\n<pre><code>%s</code></pre>\n' % \ |
|
100 | return '\n<pre><code>%s</code></pre>\n' % \ | |
101 | mistune.escape(code) |
|
101 | mistune.escape(code) | |
102 |
|
102 | |||
103 | formatter = HtmlFormatter() |
|
103 | formatter = HtmlFormatter() | |
104 | return highlight(code, lexer, formatter) |
|
104 | return highlight(code, lexer, formatter) | |
105 |
|
105 | |||
106 | def header(self, text, level, raw=None): |
|
106 | def header(self, text, level, raw=None): | |
107 | html = super(IPythonRenderer, self).header(text, level, raw=raw) |
|
107 | html = super(IPythonRenderer, self).header(text, level, raw=raw) | |
108 | return add_anchor(html) |
|
108 | return add_anchor(html) | |
109 |
|
109 | |||
110 | # Pass math through unaltered - mathjax does the rendering in the browser |
|
110 | # Pass math through unaltered - mathjax does the rendering in the browser | |
111 | def block_math(self, text): |
|
111 | def block_math(self, text): | |
112 | return '$$%s$$' % text |
|
112 | return '$$%s$$' % text | |
113 |
|
113 | |||
114 | def latex_environment(self, name, text): |
|
114 | def latex_environment(self, name, text): | |
115 | return r'\begin{%s}%s\end{%s}' % (name, text, name) |
|
115 | return r'\begin{%s}%s\end{%s}' % (name, text, name) | |
116 |
|
116 | |||
117 | def inline_math(self, text): |
|
117 | def inline_math(self, text): | |
118 | return '$%s$' % text |
|
118 | return '$%s$' % text | |
119 |
|
119 | |||
120 | def markdown2html_mistune(source): |
|
120 | def markdown2html_mistune(source): | |
121 | """Convert a markdown string to HTML using mistune""" |
|
121 | """Convert a markdown string to HTML using mistune""" | |
122 | return MarkdownWithMath(renderer=IPythonRenderer()).render(source) |
|
122 | return MarkdownWithMath(renderer=IPythonRenderer()).render(source) |
@@ -1,407 +1,407 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | """NbConvert is a utility for conversion of .ipynb files. |
|
2 | """NbConvert is a utility for conversion of .ipynb files. | |
3 |
|
3 | |||
4 | Command-line interface for the NbConvert conversion utility. |
|
4 | Command-line interface for the NbConvert conversion utility. | |
5 | """ |
|
5 | """ | |
6 |
|
6 | |||
7 | # Copyright (c) IPython Development Team. |
|
7 | # Copyright (c) IPython Development Team. | |
8 | # Distributed under the terms of the Modified BSD License. |
|
8 | # Distributed under the terms of the Modified BSD License. | |
9 |
|
9 | |||
10 | from __future__ import print_function |
|
10 | from __future__ import print_function | |
11 |
|
11 | |||
12 | import logging |
|
12 | import logging | |
13 | import sys |
|
13 | import sys | |
14 | import os |
|
14 | import os | |
15 | import glob |
|
15 | import glob | |
16 |
|
16 | |||
17 | from IPython.core.application import BaseIPythonApplication, base_aliases, base_flags |
|
17 | from IPython.core.application import BaseIPythonApplication, base_aliases, base_flags | |
18 | from IPython.core.profiledir import ProfileDir |
|
18 | from IPython.core.profiledir import ProfileDir | |
19 | from IPython.config import catch_config_error, Configurable |
|
19 | from IPython.config import catch_config_error, Configurable | |
20 | from IPython.utils.traitlets import ( |
|
20 | from IPython.utils.traitlets import ( | |
21 | Unicode, List, Instance, DottedObjectName, Type, CaselessStrEnum, Bool, |
|
21 | Unicode, List, Instance, DottedObjectName, Type, CaselessStrEnum, Bool, | |
22 | ) |
|
22 | ) | |
23 | from IPython.utils.importstring import import_item |
|
23 | from IPython.utils.importstring import import_item | |
24 |
|
24 | |||
25 | from .exporters.export import get_export_names, exporter_map |
|
25 | from .exporters.export import get_export_names, exporter_map | |
26 |
from |
|
26 | from jupyter_nbconvert import exporters, preprocessors, writers, postprocessors | |
27 | from .utils.base import NbConvertBase |
|
27 | from .utils.base import NbConvertBase | |
28 | from .utils.exceptions import ConversionException |
|
28 | from .utils.exceptions import ConversionException | |
29 |
|
29 | |||
30 | #----------------------------------------------------------------------------- |
|
30 | #----------------------------------------------------------------------------- | |
31 | #Classes and functions |
|
31 | #Classes and functions | |
32 | #----------------------------------------------------------------------------- |
|
32 | #----------------------------------------------------------------------------- | |
33 |
|
33 | |||
34 | class DottedOrNone(DottedObjectName): |
|
34 | class DottedOrNone(DottedObjectName): | |
35 | """ |
|
35 | """ | |
36 | A string holding a valid dotted object name in Python, such as A.b3._c |
|
36 | A string holding a valid dotted object name in Python, such as A.b3._c | |
37 | Also allows for None type.""" |
|
37 | Also allows for None type.""" | |
38 |
|
38 | |||
39 | default_value = u'' |
|
39 | default_value = u'' | |
40 |
|
40 | |||
41 | def validate(self, obj, value): |
|
41 | def validate(self, obj, value): | |
42 | if value is not None and len(value) > 0: |
|
42 | if value is not None and len(value) > 0: | |
43 | return super(DottedOrNone, self).validate(obj, value) |
|
43 | return super(DottedOrNone, self).validate(obj, value) | |
44 | else: |
|
44 | else: | |
45 | return value |
|
45 | return value | |
46 |
|
46 | |||
47 | nbconvert_aliases = {} |
|
47 | nbconvert_aliases = {} | |
48 | nbconvert_aliases.update(base_aliases) |
|
48 | nbconvert_aliases.update(base_aliases) | |
49 | nbconvert_aliases.update({ |
|
49 | nbconvert_aliases.update({ | |
50 | 'to' : 'NbConvertApp.export_format', |
|
50 | 'to' : 'NbConvertApp.export_format', | |
51 | 'template' : 'TemplateExporter.template_file', |
|
51 | 'template' : 'TemplateExporter.template_file', | |
52 | 'writer' : 'NbConvertApp.writer_class', |
|
52 | 'writer' : 'NbConvertApp.writer_class', | |
53 | 'post': 'NbConvertApp.postprocessor_class', |
|
53 | 'post': 'NbConvertApp.postprocessor_class', | |
54 | 'output': 'NbConvertApp.output_base', |
|
54 | 'output': 'NbConvertApp.output_base', | |
55 | 'reveal-prefix': 'RevealHelpPreprocessor.url_prefix', |
|
55 | 'reveal-prefix': 'RevealHelpPreprocessor.url_prefix', | |
56 | 'nbformat': 'NotebookExporter.nbformat_version', |
|
56 | 'nbformat': 'NotebookExporter.nbformat_version', | |
57 | }) |
|
57 | }) | |
58 |
|
58 | |||
59 | nbconvert_flags = {} |
|
59 | nbconvert_flags = {} | |
60 | nbconvert_flags.update(base_flags) |
|
60 | nbconvert_flags.update(base_flags) | |
61 | nbconvert_flags.update({ |
|
61 | nbconvert_flags.update({ | |
62 | 'execute' : ( |
|
62 | 'execute' : ( | |
63 | {'ExecutePreprocessor' : {'enabled' : True}}, |
|
63 | {'ExecutePreprocessor' : {'enabled' : True}}, | |
64 | "Execute the notebook prior to export." |
|
64 | "Execute the notebook prior to export." | |
65 | ), |
|
65 | ), | |
66 | 'stdout' : ( |
|
66 | 'stdout' : ( | |
67 | {'NbConvertApp' : {'writer_class' : "StdoutWriter"}}, |
|
67 | {'NbConvertApp' : {'writer_class' : "StdoutWriter"}}, | |
68 | "Write notebook output to stdout instead of files." |
|
68 | "Write notebook output to stdout instead of files." | |
69 | ), |
|
69 | ), | |
70 | 'inplace' : ( |
|
70 | 'inplace' : ( | |
71 | { |
|
71 | { | |
72 | 'NbConvertApp' : {'use_output_suffix' : False}, |
|
72 | 'NbConvertApp' : {'use_output_suffix' : False}, | |
73 | 'FilesWriter': {'build_directory': ''} |
|
73 | 'FilesWriter': {'build_directory': ''} | |
74 | }, |
|
74 | }, | |
75 | """Run nbconvert in place, overwriting the existing notebook (only |
|
75 | """Run nbconvert in place, overwriting the existing notebook (only | |
76 | relevant when converting to notebook format)""" |
|
76 | relevant when converting to notebook format)""" | |
77 | ) |
|
77 | ) | |
78 | }) |
|
78 | }) | |
79 |
|
79 | |||
80 |
|
80 | |||
81 | class NbConvertApp(BaseIPythonApplication): |
|
81 | class NbConvertApp(BaseIPythonApplication): | |
82 | """Application used to convert from notebook file type (``*.ipynb``)""" |
|
82 | """Application used to convert from notebook file type (``*.ipynb``)""" | |
83 |
|
83 | |||
84 | name = 'ipython-nbconvert' |
|
84 | name = 'ipython-nbconvert' | |
85 | aliases = nbconvert_aliases |
|
85 | aliases = nbconvert_aliases | |
86 | flags = nbconvert_flags |
|
86 | flags = nbconvert_flags | |
87 |
|
87 | |||
88 | def _log_level_default(self): |
|
88 | def _log_level_default(self): | |
89 | return logging.INFO |
|
89 | return logging.INFO | |
90 |
|
90 | |||
91 | def _classes_default(self): |
|
91 | def _classes_default(self): | |
92 | classes = [NbConvertBase, ProfileDir] |
|
92 | classes = [NbConvertBase, ProfileDir] | |
93 | for pkg in (exporters, preprocessors, writers, postprocessors): |
|
93 | for pkg in (exporters, preprocessors, writers, postprocessors): | |
94 | for name in dir(pkg): |
|
94 | for name in dir(pkg): | |
95 | cls = getattr(pkg, name) |
|
95 | cls = getattr(pkg, name) | |
96 | if isinstance(cls, type) and issubclass(cls, Configurable): |
|
96 | if isinstance(cls, type) and issubclass(cls, Configurable): | |
97 | classes.append(cls) |
|
97 | classes.append(cls) | |
98 |
|
98 | |||
99 | return classes |
|
99 | return classes | |
100 |
|
100 | |||
101 | description = Unicode( |
|
101 | description = Unicode( | |
102 | u"""This application is used to convert notebook files (*.ipynb) |
|
102 | u"""This application is used to convert notebook files (*.ipynb) | |
103 | to various other formats. |
|
103 | to various other formats. | |
104 |
|
104 | |||
105 | WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.""") |
|
105 | WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.""") | |
106 |
|
106 | |||
107 | output_base = Unicode('', config=True, help='''overwrite base name use for output files. |
|
107 | output_base = Unicode('', config=True, help='''overwrite base name use for output files. | |
108 | can only be used when converting one notebook at a time. |
|
108 | can only be used when converting one notebook at a time. | |
109 | ''') |
|
109 | ''') | |
110 |
|
110 | |||
111 | use_output_suffix = Bool( |
|
111 | use_output_suffix = Bool( | |
112 | True, |
|
112 | True, | |
113 | config=True, |
|
113 | config=True, | |
114 | help="""Whether to apply a suffix prior to the extension (only relevant |
|
114 | help="""Whether to apply a suffix prior to the extension (only relevant | |
115 | when converting to notebook format). The suffix is determined by |
|
115 | when converting to notebook format). The suffix is determined by | |
116 | the exporter, and is usually '.nbconvert'.""") |
|
116 | the exporter, and is usually '.nbconvert'.""") | |
117 |
|
117 | |||
118 | examples = Unicode(u""" |
|
118 | examples = Unicode(u""" | |
119 | The simplest way to use nbconvert is |
|
119 | The simplest way to use nbconvert is | |
120 |
|
120 | |||
121 | > ipython nbconvert mynotebook.ipynb |
|
121 | > ipython nbconvert mynotebook.ipynb | |
122 |
|
122 | |||
123 | which will convert mynotebook.ipynb to the default format (probably HTML). |
|
123 | which will convert mynotebook.ipynb to the default format (probably HTML). | |
124 |
|
124 | |||
125 | You can specify the export format with `--to`. |
|
125 | You can specify the export format with `--to`. | |
126 | Options include {0} |
|
126 | Options include {0} | |
127 |
|
127 | |||
128 | > ipython nbconvert --to latex mynotebook.ipynb |
|
128 | > ipython nbconvert --to latex mynotebook.ipynb | |
129 |
|
129 | |||
130 | Both HTML and LaTeX support multiple output templates. LaTeX includes |
|
130 | Both HTML and LaTeX support multiple output templates. LaTeX includes | |
131 | 'base', 'article' and 'report'. HTML includes 'basic' and 'full'. You |
|
131 | 'base', 'article' and 'report'. HTML includes 'basic' and 'full'. You | |
132 | can specify the flavor of the format used. |
|
132 | can specify the flavor of the format used. | |
133 |
|
133 | |||
134 | > ipython nbconvert --to html --template basic mynotebook.ipynb |
|
134 | > ipython nbconvert --to html --template basic mynotebook.ipynb | |
135 |
|
135 | |||
136 | You can also pipe the output to stdout, rather than a file |
|
136 | You can also pipe the output to stdout, rather than a file | |
137 |
|
137 | |||
138 | > ipython nbconvert mynotebook.ipynb --stdout |
|
138 | > ipython nbconvert mynotebook.ipynb --stdout | |
139 |
|
139 | |||
140 | PDF is generated via latex |
|
140 | PDF is generated via latex | |
141 |
|
141 | |||
142 | > ipython nbconvert mynotebook.ipynb --to pdf |
|
142 | > ipython nbconvert mynotebook.ipynb --to pdf | |
143 |
|
143 | |||
144 | You can get (and serve) a Reveal.js-powered slideshow |
|
144 | You can get (and serve) a Reveal.js-powered slideshow | |
145 |
|
145 | |||
146 | > ipython nbconvert myslides.ipynb --to slides --post serve |
|
146 | > ipython nbconvert myslides.ipynb --to slides --post serve | |
147 |
|
147 | |||
148 | Multiple notebooks can be given at the command line in a couple of |
|
148 | Multiple notebooks can be given at the command line in a couple of | |
149 | different ways: |
|
149 | different ways: | |
150 |
|
150 | |||
151 | > ipython nbconvert notebook*.ipynb |
|
151 | > ipython nbconvert notebook*.ipynb | |
152 | > ipython nbconvert notebook1.ipynb notebook2.ipynb |
|
152 | > ipython nbconvert notebook1.ipynb notebook2.ipynb | |
153 |
|
153 | |||
154 | or you can specify the notebooks list in a config file, containing:: |
|
154 | or you can specify the notebooks list in a config file, containing:: | |
155 |
|
155 | |||
156 | c.NbConvertApp.notebooks = ["my_notebook.ipynb"] |
|
156 | c.NbConvertApp.notebooks = ["my_notebook.ipynb"] | |
157 |
|
157 | |||
158 | > ipython nbconvert --config mycfg.py |
|
158 | > ipython nbconvert --config mycfg.py | |
159 | """.format(get_export_names())) |
|
159 | """.format(get_export_names())) | |
160 |
|
160 | |||
161 | # Writer specific variables |
|
161 | # Writer specific variables | |
162 |
writer = Instance(' |
|
162 | writer = Instance('jupyter_nbconvert.writers.base.WriterBase', | |
163 | help="""Instance of the writer class used to write the |
|
163 | help="""Instance of the writer class used to write the | |
164 | results of the conversion.""") |
|
164 | results of the conversion.""") | |
165 | writer_class = DottedObjectName('FilesWriter', config=True, |
|
165 | writer_class = DottedObjectName('FilesWriter', config=True, | |
166 | help="""Writer class used to write the |
|
166 | help="""Writer class used to write the | |
167 | results of the conversion""") |
|
167 | results of the conversion""") | |
168 |
writer_aliases = {'fileswriter': ' |
|
168 | writer_aliases = {'fileswriter': 'jupyter_nbconvert.writers.files.FilesWriter', | |
169 |
'debugwriter': ' |
|
169 | 'debugwriter': 'jupyter_nbconvert.writers.debug.DebugWriter', | |
170 |
'stdoutwriter': ' |
|
170 | 'stdoutwriter': 'jupyter_nbconvert.writers.stdout.StdoutWriter'} | |
171 | writer_factory = Type() |
|
171 | writer_factory = Type() | |
172 |
|
172 | |||
173 | def _writer_class_changed(self, name, old, new): |
|
173 | def _writer_class_changed(self, name, old, new): | |
174 | if new.lower() in self.writer_aliases: |
|
174 | if new.lower() in self.writer_aliases: | |
175 | new = self.writer_aliases[new.lower()] |
|
175 | new = self.writer_aliases[new.lower()] | |
176 | self.writer_factory = import_item(new) |
|
176 | self.writer_factory = import_item(new) | |
177 |
|
177 | |||
178 | # Post-processor specific variables |
|
178 | # Post-processor specific variables | |
179 |
postprocessor = Instance(' |
|
179 | postprocessor = Instance('jupyter_nbconvert.postprocessors.base.PostProcessorBase', | |
180 | help="""Instance of the PostProcessor class used to write the |
|
180 | help="""Instance of the PostProcessor class used to write the | |
181 | results of the conversion.""") |
|
181 | results of the conversion.""") | |
182 |
|
182 | |||
183 | postprocessor_class = DottedOrNone(config=True, |
|
183 | postprocessor_class = DottedOrNone(config=True, | |
184 | help="""PostProcessor class used to write the |
|
184 | help="""PostProcessor class used to write the | |
185 | results of the conversion""") |
|
185 | results of the conversion""") | |
186 |
postprocessor_aliases = {'serve': ' |
|
186 | postprocessor_aliases = {'serve': 'jupyter_nbconvert.postprocessors.serve.ServePostProcessor'} | |
187 | postprocessor_factory = Type() |
|
187 | postprocessor_factory = Type() | |
188 |
|
188 | |||
189 | def _postprocessor_class_changed(self, name, old, new): |
|
189 | def _postprocessor_class_changed(self, name, old, new): | |
190 | if new.lower() in self.postprocessor_aliases: |
|
190 | if new.lower() in self.postprocessor_aliases: | |
191 | new = self.postprocessor_aliases[new.lower()] |
|
191 | new = self.postprocessor_aliases[new.lower()] | |
192 | if new: |
|
192 | if new: | |
193 | self.postprocessor_factory = import_item(new) |
|
193 | self.postprocessor_factory = import_item(new) | |
194 |
|
194 | |||
195 |
|
195 | |||
196 | # Other configurable variables |
|
196 | # Other configurable variables | |
197 | export_format = CaselessStrEnum(get_export_names(), |
|
197 | export_format = CaselessStrEnum(get_export_names(), | |
198 | default_value="html", |
|
198 | default_value="html", | |
199 | config=True, |
|
199 | config=True, | |
200 | help="""The export format to be used.""" |
|
200 | help="""The export format to be used.""" | |
201 | ) |
|
201 | ) | |
202 |
|
202 | |||
203 | notebooks = List([], config=True, help="""List of notebooks to convert. |
|
203 | notebooks = List([], config=True, help="""List of notebooks to convert. | |
204 | Wildcards are supported. |
|
204 | Wildcards are supported. | |
205 | Filenames passed positionally will be added to the list. |
|
205 | Filenames passed positionally will be added to the list. | |
206 | """) |
|
206 | """) | |
207 |
|
207 | |||
208 | @catch_config_error |
|
208 | @catch_config_error | |
209 | def initialize(self, argv=None): |
|
209 | def initialize(self, argv=None): | |
210 | self.init_syspath() |
|
210 | self.init_syspath() | |
211 | super(NbConvertApp, self).initialize(argv) |
|
211 | super(NbConvertApp, self).initialize(argv) | |
212 | self.init_notebooks() |
|
212 | self.init_notebooks() | |
213 | self.init_writer() |
|
213 | self.init_writer() | |
214 | self.init_postprocessor() |
|
214 | self.init_postprocessor() | |
215 |
|
215 | |||
216 |
|
216 | |||
217 |
|
217 | |||
218 | def init_syspath(self): |
|
218 | def init_syspath(self): | |
219 | """ |
|
219 | """ | |
220 | Add the cwd to the sys.path ($PYTHONPATH) |
|
220 | Add the cwd to the sys.path ($PYTHONPATH) | |
221 | """ |
|
221 | """ | |
222 | sys.path.insert(0, os.getcwd()) |
|
222 | sys.path.insert(0, os.getcwd()) | |
223 |
|
223 | |||
224 |
|
224 | |||
225 | def init_notebooks(self): |
|
225 | def init_notebooks(self): | |
226 | """Construct the list of notebooks. |
|
226 | """Construct the list of notebooks. | |
227 | If notebooks are passed on the command-line, |
|
227 | If notebooks are passed on the command-line, | |
228 | they override notebooks specified in config files. |
|
228 | they override notebooks specified in config files. | |
229 | Glob each notebook to replace notebook patterns with filenames. |
|
229 | Glob each notebook to replace notebook patterns with filenames. | |
230 | """ |
|
230 | """ | |
231 |
|
231 | |||
232 | # Specifying notebooks on the command-line overrides (rather than adds) |
|
232 | # Specifying notebooks on the command-line overrides (rather than adds) | |
233 | # the notebook list |
|
233 | # the notebook list | |
234 | if self.extra_args: |
|
234 | if self.extra_args: | |
235 | patterns = self.extra_args |
|
235 | patterns = self.extra_args | |
236 | else: |
|
236 | else: | |
237 | patterns = self.notebooks |
|
237 | patterns = self.notebooks | |
238 |
|
238 | |||
239 | # Use glob to replace all the notebook patterns with filenames. |
|
239 | # Use glob to replace all the notebook patterns with filenames. | |
240 | filenames = [] |
|
240 | filenames = [] | |
241 | for pattern in patterns: |
|
241 | for pattern in patterns: | |
242 |
|
242 | |||
243 | # Use glob to find matching filenames. Allow the user to convert |
|
243 | # Use glob to find matching filenames. Allow the user to convert | |
244 | # notebooks without having to type the extension. |
|
244 | # notebooks without having to type the extension. | |
245 | globbed_files = glob.glob(pattern) |
|
245 | globbed_files = glob.glob(pattern) | |
246 | globbed_files.extend(glob.glob(pattern + '.ipynb')) |
|
246 | globbed_files.extend(glob.glob(pattern + '.ipynb')) | |
247 | if not globbed_files: |
|
247 | if not globbed_files: | |
248 | self.log.warn("pattern %r matched no files", pattern) |
|
248 | self.log.warn("pattern %r matched no files", pattern) | |
249 |
|
249 | |||
250 | for filename in globbed_files: |
|
250 | for filename in globbed_files: | |
251 | if not filename in filenames: |
|
251 | if not filename in filenames: | |
252 | filenames.append(filename) |
|
252 | filenames.append(filename) | |
253 | self.notebooks = filenames |
|
253 | self.notebooks = filenames | |
254 |
|
254 | |||
255 | def init_writer(self): |
|
255 | def init_writer(self): | |
256 | """ |
|
256 | """ | |
257 | Initialize the writer (which is stateless) |
|
257 | Initialize the writer (which is stateless) | |
258 | """ |
|
258 | """ | |
259 | self._writer_class_changed(None, self.writer_class, self.writer_class) |
|
259 | self._writer_class_changed(None, self.writer_class, self.writer_class) | |
260 | self.writer = self.writer_factory(parent=self) |
|
260 | self.writer = self.writer_factory(parent=self) | |
261 | if hasattr(self.writer, 'build_directory') and self.writer.build_directory != '': |
|
261 | if hasattr(self.writer, 'build_directory') and self.writer.build_directory != '': | |
262 | self.use_output_suffix = False |
|
262 | self.use_output_suffix = False | |
263 |
|
263 | |||
264 | def init_postprocessor(self): |
|
264 | def init_postprocessor(self): | |
265 | """ |
|
265 | """ | |
266 | Initialize the postprocessor (which is stateless) |
|
266 | Initialize the postprocessor (which is stateless) | |
267 | """ |
|
267 | """ | |
268 | self._postprocessor_class_changed(None, self.postprocessor_class, |
|
268 | self._postprocessor_class_changed(None, self.postprocessor_class, | |
269 | self.postprocessor_class) |
|
269 | self.postprocessor_class) | |
270 | if self.postprocessor_factory: |
|
270 | if self.postprocessor_factory: | |
271 | self.postprocessor = self.postprocessor_factory(parent=self) |
|
271 | self.postprocessor = self.postprocessor_factory(parent=self) | |
272 |
|
272 | |||
273 | def start(self): |
|
273 | def start(self): | |
274 | """ |
|
274 | """ | |
275 | Ran after initialization completed |
|
275 | Ran after initialization completed | |
276 | """ |
|
276 | """ | |
277 | super(NbConvertApp, self).start() |
|
277 | super(NbConvertApp, self).start() | |
278 | self.convert_notebooks() |
|
278 | self.convert_notebooks() | |
279 |
|
279 | |||
280 | def init_single_notebook_resources(self, notebook_filename): |
|
280 | def init_single_notebook_resources(self, notebook_filename): | |
281 | """Step 1: Initialize resources |
|
281 | """Step 1: Initialize resources | |
282 |
|
282 | |||
283 | This intializes the resources dictionary for a single notebook. This |
|
283 | This intializes the resources dictionary for a single notebook. This | |
284 | method should return the resources dictionary, and MUST include the |
|
284 | method should return the resources dictionary, and MUST include the | |
285 | following keys: |
|
285 | following keys: | |
286 |
|
286 | |||
287 | - profile_dir: the location of the profile directory |
|
287 | - profile_dir: the location of the profile directory | |
288 | - unique_key: the notebook name |
|
288 | - unique_key: the notebook name | |
289 | - output_files_dir: a directory where output files (not including |
|
289 | - output_files_dir: a directory where output files (not including | |
290 | the notebook itself) should be saved |
|
290 | the notebook itself) should be saved | |
291 |
|
291 | |||
292 | """ |
|
292 | """ | |
293 |
|
293 | |||
294 | # Get a unique key for the notebook and set it in the resources object. |
|
294 | # Get a unique key for the notebook and set it in the resources object. | |
295 | basename = os.path.basename(notebook_filename) |
|
295 | basename = os.path.basename(notebook_filename) | |
296 | notebook_name = basename[:basename.rfind('.')] |
|
296 | notebook_name = basename[:basename.rfind('.')] | |
297 | if self.output_base: |
|
297 | if self.output_base: | |
298 | # strip duplicate extension from output_base, to avoid Basname.ext.ext |
|
298 | # strip duplicate extension from output_base, to avoid Basname.ext.ext | |
299 | if getattr(self.exporter, 'file_extension', False): |
|
299 | if getattr(self.exporter, 'file_extension', False): | |
300 | base, ext = os.path.splitext(self.output_base) |
|
300 | base, ext = os.path.splitext(self.output_base) | |
301 | if ext == self.exporter.file_extension: |
|
301 | if ext == self.exporter.file_extension: | |
302 | self.output_base = base |
|
302 | self.output_base = base | |
303 | notebook_name = self.output_base |
|
303 | notebook_name = self.output_base | |
304 |
|
304 | |||
305 | self.log.debug("Notebook name is '%s'", notebook_name) |
|
305 | self.log.debug("Notebook name is '%s'", notebook_name) | |
306 |
|
306 | |||
307 | # first initialize the resources we want to use |
|
307 | # first initialize the resources we want to use | |
308 | resources = {} |
|
308 | resources = {} | |
309 | resources['profile_dir'] = self.profile_dir.location |
|
309 | resources['profile_dir'] = self.profile_dir.location | |
310 | resources['unique_key'] = notebook_name |
|
310 | resources['unique_key'] = notebook_name | |
311 | resources['output_files_dir'] = '%s_files' % notebook_name |
|
311 | resources['output_files_dir'] = '%s_files' % notebook_name | |
312 |
|
312 | |||
313 | return resources |
|
313 | return resources | |
314 |
|
314 | |||
315 | def export_single_notebook(self, notebook_filename, resources): |
|
315 | def export_single_notebook(self, notebook_filename, resources): | |
316 | """Step 2: Export the notebook |
|
316 | """Step 2: Export the notebook | |
317 |
|
317 | |||
318 | Exports the notebook to a particular format according to the specified |
|
318 | Exports the notebook to a particular format according to the specified | |
319 | exporter. This function returns the output and (possibly modified) |
|
319 | exporter. This function returns the output and (possibly modified) | |
320 | resources from the exporter. |
|
320 | resources from the exporter. | |
321 |
|
321 | |||
322 | """ |
|
322 | """ | |
323 | try: |
|
323 | try: | |
324 | output, resources = self.exporter.from_filename(notebook_filename, resources=resources) |
|
324 | output, resources = self.exporter.from_filename(notebook_filename, resources=resources) | |
325 | except ConversionException: |
|
325 | except ConversionException: | |
326 | self.log.error("Error while converting '%s'", notebook_filename, exc_info=True) |
|
326 | self.log.error("Error while converting '%s'", notebook_filename, exc_info=True) | |
327 | self.exit(1) |
|
327 | self.exit(1) | |
328 |
|
328 | |||
329 | return output, resources |
|
329 | return output, resources | |
330 |
|
330 | |||
331 | def write_single_notebook(self, output, resources): |
|
331 | def write_single_notebook(self, output, resources): | |
332 | """Step 3: Write the notebook to file |
|
332 | """Step 3: Write the notebook to file | |
333 |
|
333 | |||
334 | This writes output from the exporter to file using the specified writer. |
|
334 | This writes output from the exporter to file using the specified writer. | |
335 | It returns the results from the writer. |
|
335 | It returns the results from the writer. | |
336 |
|
336 | |||
337 | """ |
|
337 | """ | |
338 | if 'unique_key' not in resources: |
|
338 | if 'unique_key' not in resources: | |
339 | raise KeyError("unique_key MUST be specified in the resources, but it is not") |
|
339 | raise KeyError("unique_key MUST be specified in the resources, but it is not") | |
340 |
|
340 | |||
341 | notebook_name = resources['unique_key'] |
|
341 | notebook_name = resources['unique_key'] | |
342 | if self.use_output_suffix and not self.output_base: |
|
342 | if self.use_output_suffix and not self.output_base: | |
343 | notebook_name += resources.get('output_suffix', '') |
|
343 | notebook_name += resources.get('output_suffix', '') | |
344 |
|
344 | |||
345 | write_results = self.writer.write( |
|
345 | write_results = self.writer.write( | |
346 | output, resources, notebook_name=notebook_name) |
|
346 | output, resources, notebook_name=notebook_name) | |
347 | return write_results |
|
347 | return write_results | |
348 |
|
348 | |||
349 | def postprocess_single_notebook(self, write_results): |
|
349 | def postprocess_single_notebook(self, write_results): | |
350 | """Step 4: Postprocess the notebook |
|
350 | """Step 4: Postprocess the notebook | |
351 |
|
351 | |||
352 | This postprocesses the notebook after it has been written, taking as an |
|
352 | This postprocesses the notebook after it has been written, taking as an | |
353 | argument the results of writing the notebook to file. This only actually |
|
353 | argument the results of writing the notebook to file. This only actually | |
354 | does anything if a postprocessor has actually been specified. |
|
354 | does anything if a postprocessor has actually been specified. | |
355 |
|
355 | |||
356 | """ |
|
356 | """ | |
357 | # Post-process if post processor has been defined. |
|
357 | # Post-process if post processor has been defined. | |
358 | if hasattr(self, 'postprocessor') and self.postprocessor: |
|
358 | if hasattr(self, 'postprocessor') and self.postprocessor: | |
359 | self.postprocessor(write_results) |
|
359 | self.postprocessor(write_results) | |
360 |
|
360 | |||
361 | def convert_single_notebook(self, notebook_filename): |
|
361 | def convert_single_notebook(self, notebook_filename): | |
362 | """Convert a single notebook. Performs the following steps: |
|
362 | """Convert a single notebook. Performs the following steps: | |
363 |
|
363 | |||
364 | 1. Initialize notebook resources |
|
364 | 1. Initialize notebook resources | |
365 | 2. Export the notebook to a particular format |
|
365 | 2. Export the notebook to a particular format | |
366 | 3. Write the exported notebook to file |
|
366 | 3. Write the exported notebook to file | |
367 | 4. (Maybe) postprocess the written file |
|
367 | 4. (Maybe) postprocess the written file | |
368 |
|
368 | |||
369 | """ |
|
369 | """ | |
370 | self.log.info("Converting notebook %s to %s", notebook_filename, self.export_format) |
|
370 | self.log.info("Converting notebook %s to %s", notebook_filename, self.export_format) | |
371 | resources = self.init_single_notebook_resources(notebook_filename) |
|
371 | resources = self.init_single_notebook_resources(notebook_filename) | |
372 | output, resources = self.export_single_notebook(notebook_filename, resources) |
|
372 | output, resources = self.export_single_notebook(notebook_filename, resources) | |
373 | write_results = self.write_single_notebook(output, resources) |
|
373 | write_results = self.write_single_notebook(output, resources) | |
374 | self.postprocess_single_notebook(write_results) |
|
374 | self.postprocess_single_notebook(write_results) | |
375 |
|
375 | |||
376 | def convert_notebooks(self): |
|
376 | def convert_notebooks(self): | |
377 | """ |
|
377 | """ | |
378 | Convert the notebooks in the self.notebook traitlet |
|
378 | Convert the notebooks in the self.notebook traitlet | |
379 | """ |
|
379 | """ | |
380 | # check that the output base isn't specified if there is more than |
|
380 | # check that the output base isn't specified if there is more than | |
381 | # one notebook to convert |
|
381 | # one notebook to convert | |
382 | if self.output_base != '' and len(self.notebooks) > 1: |
|
382 | if self.output_base != '' and len(self.notebooks) > 1: | |
383 | self.log.error( |
|
383 | self.log.error( | |
384 | """ |
|
384 | """ | |
385 | UsageError: --output flag or `NbConvertApp.output_base` config option |
|
385 | UsageError: --output flag or `NbConvertApp.output_base` config option | |
386 | cannot be used when converting multiple notebooks. |
|
386 | cannot be used when converting multiple notebooks. | |
387 | """ |
|
387 | """ | |
388 | ) |
|
388 | ) | |
389 | self.exit(1) |
|
389 | self.exit(1) | |
390 |
|
390 | |||
391 | # initialize the exporter |
|
391 | # initialize the exporter | |
392 | self.exporter = exporter_map[self.export_format](config=self.config) |
|
392 | self.exporter = exporter_map[self.export_format](config=self.config) | |
393 |
|
393 | |||
394 | # no notebooks to convert! |
|
394 | # no notebooks to convert! | |
395 | if len(self.notebooks) == 0: |
|
395 | if len(self.notebooks) == 0: | |
396 | self.print_help() |
|
396 | self.print_help() | |
397 | sys.exit(-1) |
|
397 | sys.exit(-1) | |
398 |
|
398 | |||
399 | # convert each notebook |
|
399 | # convert each notebook | |
400 | for notebook_filename in self.notebooks: |
|
400 | for notebook_filename in self.notebooks: | |
401 | self.convert_single_notebook(notebook_filename) |
|
401 | self.convert_single_notebook(notebook_filename) | |
402 |
|
402 | |||
403 | #----------------------------------------------------------------------------- |
|
403 | #----------------------------------------------------------------------------- | |
404 | # Main entry point |
|
404 | # Main entry point | |
405 | #----------------------------------------------------------------------------- |
|
405 | #----------------------------------------------------------------------------- | |
406 |
|
406 | |||
407 | launch_new_instance = NbConvertApp.launch_instance |
|
407 | launch_new_instance = NbConvertApp.launch_instance |
@@ -1,151 +1,151 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Module with tests for the execute preprocessor. |
|
2 | Module with tests for the execute preprocessor. | |
3 | """ |
|
3 | """ | |
4 |
|
4 | |||
5 | # Copyright (c) IPython Development Team. |
|
5 | # Copyright (c) IPython Development Team. | |
6 | # Distributed under the terms of the Modified BSD License. |
|
6 | # Distributed under the terms of the Modified BSD License. | |
7 |
|
7 | |||
8 | import copy |
|
8 | import copy | |
9 | import glob |
|
9 | import glob | |
10 | import io |
|
10 | import io | |
11 | import os |
|
11 | import os | |
12 | import re |
|
12 | import re | |
13 |
|
13 | |||
14 | try: |
|
14 | try: | |
15 | from queue import Empty # Py 3 |
|
15 | from queue import Empty # Py 3 | |
16 | except ImportError: |
|
16 | except ImportError: | |
17 | from Queue import Empty # Py 2 |
|
17 | from Queue import Empty # Py 2 | |
18 |
|
18 | |||
19 | from IPython import nbformat |
|
19 | from IPython import nbformat | |
20 |
|
20 | |||
21 | from .base import PreprocessorTestsBase |
|
21 | from .base import PreprocessorTestsBase | |
22 | from ..execute import ExecutePreprocessor |
|
22 | from ..execute import ExecutePreprocessor | |
23 |
|
23 | |||
24 |
from |
|
24 | from jupyter_nbconvert.filters import strip_ansi | |
25 | from nose.tools import assert_raises |
|
25 | from nose.tools import assert_raises | |
26 |
|
26 | |||
27 | addr_pat = re.compile(r'0x[0-9a-f]{7,9}') |
|
27 | addr_pat = re.compile(r'0x[0-9a-f]{7,9}') | |
28 |
|
28 | |||
29 | class TestExecute(PreprocessorTestsBase): |
|
29 | class TestExecute(PreprocessorTestsBase): | |
30 | """Contains test functions for execute.py""" |
|
30 | """Contains test functions for execute.py""" | |
31 |
|
31 | |||
32 | @staticmethod |
|
32 | @staticmethod | |
33 | def normalize_output(output): |
|
33 | def normalize_output(output): | |
34 | """ |
|
34 | """ | |
35 | Normalizes outputs for comparison. |
|
35 | Normalizes outputs for comparison. | |
36 | """ |
|
36 | """ | |
37 | output = dict(output) |
|
37 | output = dict(output) | |
38 | if 'metadata' in output: |
|
38 | if 'metadata' in output: | |
39 | del output['metadata'] |
|
39 | del output['metadata'] | |
40 | if 'text' in output: |
|
40 | if 'text' in output: | |
41 | output['text'] = re.sub(addr_pat, '<HEXADDR>', output['text']) |
|
41 | output['text'] = re.sub(addr_pat, '<HEXADDR>', output['text']) | |
42 | if 'text/plain' in output.get('data', {}): |
|
42 | if 'text/plain' in output.get('data', {}): | |
43 | output['data']['text/plain'] = \ |
|
43 | output['data']['text/plain'] = \ | |
44 | re.sub(addr_pat, '<HEXADDR>', output['data']['text/plain']) |
|
44 | re.sub(addr_pat, '<HEXADDR>', output['data']['text/plain']) | |
45 | if 'traceback' in output: |
|
45 | if 'traceback' in output: | |
46 | tb = [] |
|
46 | tb = [] | |
47 | for line in output['traceback']: |
|
47 | for line in output['traceback']: | |
48 | tb.append(strip_ansi(line)) |
|
48 | tb.append(strip_ansi(line)) | |
49 | output['traceback'] = tb |
|
49 | output['traceback'] = tb | |
50 |
|
50 | |||
51 | return output |
|
51 | return output | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | def assert_notebooks_equal(self, expected, actual): |
|
54 | def assert_notebooks_equal(self, expected, actual): | |
55 | expected_cells = expected['cells'] |
|
55 | expected_cells = expected['cells'] | |
56 | actual_cells = actual['cells'] |
|
56 | actual_cells = actual['cells'] | |
57 | self.assertEqual(len(expected_cells), len(actual_cells)) |
|
57 | self.assertEqual(len(expected_cells), len(actual_cells)) | |
58 |
|
58 | |||
59 | for expected_cell, actual_cell in zip(expected_cells, actual_cells): |
|
59 | for expected_cell, actual_cell in zip(expected_cells, actual_cells): | |
60 | expected_outputs = expected_cell.get('outputs', []) |
|
60 | expected_outputs = expected_cell.get('outputs', []) | |
61 | actual_outputs = actual_cell.get('outputs', []) |
|
61 | actual_outputs = actual_cell.get('outputs', []) | |
62 | normalized_expected_outputs = list(map(self.normalize_output, expected_outputs)) |
|
62 | normalized_expected_outputs = list(map(self.normalize_output, expected_outputs)) | |
63 | normalized_actual_outputs = list(map(self.normalize_output, actual_outputs)) |
|
63 | normalized_actual_outputs = list(map(self.normalize_output, actual_outputs)) | |
64 | self.assertEqual(normalized_expected_outputs, normalized_actual_outputs) |
|
64 | self.assertEqual(normalized_expected_outputs, normalized_actual_outputs) | |
65 |
|
65 | |||
66 | expected_execution_count = expected_cell.get('execution_count', None) |
|
66 | expected_execution_count = expected_cell.get('execution_count', None) | |
67 | actual_execution_count = actual_cell.get('execution_count', None) |
|
67 | actual_execution_count = actual_cell.get('execution_count', None) | |
68 | self.assertEqual(expected_execution_count, actual_execution_count) |
|
68 | self.assertEqual(expected_execution_count, actual_execution_count) | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | def build_preprocessor(self, opts): |
|
71 | def build_preprocessor(self, opts): | |
72 | """Make an instance of a preprocessor""" |
|
72 | """Make an instance of a preprocessor""" | |
73 | preprocessor = ExecutePreprocessor() |
|
73 | preprocessor = ExecutePreprocessor() | |
74 | preprocessor.enabled = True |
|
74 | preprocessor.enabled = True | |
75 | for opt in opts: |
|
75 | for opt in opts: | |
76 | setattr(preprocessor, opt, opts[opt]) |
|
76 | setattr(preprocessor, opt, opts[opt]) | |
77 | return preprocessor |
|
77 | return preprocessor | |
78 |
|
78 | |||
79 |
|
79 | |||
80 | def test_constructor(self): |
|
80 | def test_constructor(self): | |
81 | """Can a ExecutePreprocessor be constructed?""" |
|
81 | """Can a ExecutePreprocessor be constructed?""" | |
82 | self.build_preprocessor({}) |
|
82 | self.build_preprocessor({}) | |
83 |
|
83 | |||
84 |
|
84 | |||
85 | def run_notebook(self, filename, opts, resources): |
|
85 | def run_notebook(self, filename, opts, resources): | |
86 | """Loads and runs a notebook, returning both the version prior to |
|
86 | """Loads and runs a notebook, returning both the version prior to | |
87 | running it and the version after running it. |
|
87 | running it and the version after running it. | |
88 |
|
88 | |||
89 | """ |
|
89 | """ | |
90 | with io.open(filename) as f: |
|
90 | with io.open(filename) as f: | |
91 | input_nb = nbformat.read(f, 4) |
|
91 | input_nb = nbformat.read(f, 4) | |
92 | preprocessor = self.build_preprocessor(opts) |
|
92 | preprocessor = self.build_preprocessor(opts) | |
93 | cleaned_input_nb = copy.deepcopy(input_nb) |
|
93 | cleaned_input_nb = copy.deepcopy(input_nb) | |
94 | for cell in cleaned_input_nb.cells: |
|
94 | for cell in cleaned_input_nb.cells: | |
95 | if 'execution_count' in cell: |
|
95 | if 'execution_count' in cell: | |
96 | del cell['execution_count'] |
|
96 | del cell['execution_count'] | |
97 | cell['outputs'] = [] |
|
97 | cell['outputs'] = [] | |
98 | output_nb, _ = preprocessor(cleaned_input_nb, resources) |
|
98 | output_nb, _ = preprocessor(cleaned_input_nb, resources) | |
99 | return input_nb, output_nb |
|
99 | return input_nb, output_nb | |
100 |
|
100 | |||
101 | def test_run_notebooks(self): |
|
101 | def test_run_notebooks(self): | |
102 | """Runs a series of test notebooks and compares them to their actual output""" |
|
102 | """Runs a series of test notebooks and compares them to their actual output""" | |
103 | current_dir = os.path.dirname(__file__) |
|
103 | current_dir = os.path.dirname(__file__) | |
104 | input_files = glob.glob(os.path.join(current_dir, 'files', '*.ipynb')) |
|
104 | input_files = glob.glob(os.path.join(current_dir, 'files', '*.ipynb')) | |
105 | for filename in input_files: |
|
105 | for filename in input_files: | |
106 | if os.path.basename(filename) == "Disable Stdin.ipynb": |
|
106 | if os.path.basename(filename) == "Disable Stdin.ipynb": | |
107 | continue |
|
107 | continue | |
108 | elif os.path.basename(filename) == "Interrupt.ipynb": |
|
108 | elif os.path.basename(filename) == "Interrupt.ipynb": | |
109 | opts = dict(timeout=1, interrupt_on_timeout=True) |
|
109 | opts = dict(timeout=1, interrupt_on_timeout=True) | |
110 | else: |
|
110 | else: | |
111 | opts = {} |
|
111 | opts = {} | |
112 | res = self.build_resources() |
|
112 | res = self.build_resources() | |
113 | res['metadata']['path'] = os.path.dirname(filename) |
|
113 | res['metadata']['path'] = os.path.dirname(filename) | |
114 | input_nb, output_nb = self.run_notebook(filename, opts, res) |
|
114 | input_nb, output_nb = self.run_notebook(filename, opts, res) | |
115 | self.assert_notebooks_equal(input_nb, output_nb) |
|
115 | self.assert_notebooks_equal(input_nb, output_nb) | |
116 |
|
116 | |||
117 | def test_empty_path(self): |
|
117 | def test_empty_path(self): | |
118 | """Can the kernel be started when the path is empty?""" |
|
118 | """Can the kernel be started when the path is empty?""" | |
119 | current_dir = os.path.dirname(__file__) |
|
119 | current_dir = os.path.dirname(__file__) | |
120 | filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') |
|
120 | filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') | |
121 | res = self.build_resources() |
|
121 | res = self.build_resources() | |
122 | res['metadata']['path'] = '' |
|
122 | res['metadata']['path'] = '' | |
123 | input_nb, output_nb = self.run_notebook(filename, {}, res) |
|
123 | input_nb, output_nb = self.run_notebook(filename, {}, res) | |
124 | self.assert_notebooks_equal(input_nb, output_nb) |
|
124 | self.assert_notebooks_equal(input_nb, output_nb) | |
125 |
|
125 | |||
126 | def test_disable_stdin(self): |
|
126 | def test_disable_stdin(self): | |
127 | """Test disabling standard input""" |
|
127 | """Test disabling standard input""" | |
128 | current_dir = os.path.dirname(__file__) |
|
128 | current_dir = os.path.dirname(__file__) | |
129 | filename = os.path.join(current_dir, 'files', 'Disable Stdin.ipynb') |
|
129 | filename = os.path.join(current_dir, 'files', 'Disable Stdin.ipynb') | |
130 | res = self.build_resources() |
|
130 | res = self.build_resources() | |
131 | res['metadata']['path'] = os.path.dirname(filename) |
|
131 | res['metadata']['path'] = os.path.dirname(filename) | |
132 | input_nb, output_nb = self.run_notebook(filename, {}, res) |
|
132 | input_nb, output_nb = self.run_notebook(filename, {}, res) | |
133 |
|
133 | |||
134 | # We need to special-case this particular notebook, because the |
|
134 | # We need to special-case this particular notebook, because the | |
135 | # traceback contains machine-specific stuff like where IPython |
|
135 | # traceback contains machine-specific stuff like where IPython | |
136 | # is installed. It is sufficient here to just check that an error |
|
136 | # is installed. It is sufficient here to just check that an error | |
137 | # was thrown, and that it was a StdinNotImplementedError |
|
137 | # was thrown, and that it was a StdinNotImplementedError | |
138 | self.assertEqual(len(output_nb['cells']), 1) |
|
138 | self.assertEqual(len(output_nb['cells']), 1) | |
139 | self.assertEqual(len(output_nb['cells'][0]['outputs']), 1) |
|
139 | self.assertEqual(len(output_nb['cells'][0]['outputs']), 1) | |
140 | output = output_nb['cells'][0]['outputs'][0] |
|
140 | output = output_nb['cells'][0]['outputs'][0] | |
141 | self.assertEqual(output['output_type'], 'error') |
|
141 | self.assertEqual(output['output_type'], 'error') | |
142 | self.assertEqual(output['ename'], 'StdinNotImplementedError') |
|
142 | self.assertEqual(output['ename'], 'StdinNotImplementedError') | |
143 | self.assertEqual(output['evalue'], 'raw_input was called, but this frontend does not support input requests.') |
|
143 | self.assertEqual(output['evalue'], 'raw_input was called, but this frontend does not support input requests.') | |
144 |
|
144 | |||
145 | def test_timeout(self): |
|
145 | def test_timeout(self): | |
146 | """Check that an error is raised when a computation times out""" |
|
146 | """Check that an error is raised when a computation times out""" | |
147 | current_dir = os.path.dirname(__file__) |
|
147 | current_dir = os.path.dirname(__file__) | |
148 | filename = os.path.join(current_dir, 'files', 'Interrupt.ipynb') |
|
148 | filename = os.path.join(current_dir, 'files', 'Interrupt.ipynb') | |
149 | res = self.build_resources() |
|
149 | res = self.build_resources() | |
150 | res['metadata']['path'] = os.path.dirname(filename) |
|
150 | res['metadata']['path'] = os.path.dirname(filename) | |
151 | assert_raises(Empty, self.run_notebook, filename, dict(timeout=1), res) |
|
151 | assert_raises(Empty, self.run_notebook, filename, dict(timeout=1), res) |
@@ -1,7 +1,7 b'' | |||||
1 |
from |
|
1 | from jupyter_nbconvert.writers.base import WriterBase | |
2 |
|
2 | |||
3 | class HelloWriter(WriterBase): |
|
3 | class HelloWriter(WriterBase): | |
4 |
|
4 | |||
5 | def write(self, output, resources, notebook_name=None, **kw): |
|
5 | def write(self, output, resources, notebook_name=None, **kw): | |
6 | with open('hello.txt', 'w') as outfile: |
|
6 | with open('hello.txt', 'w') as outfile: | |
7 | outfile.write('hello world') |
|
7 | outfile.write('hello world') |
@@ -1,243 +1,243 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """Test NbConvertApp""" |
|
2 | """Test NbConvertApp""" | |
3 |
|
3 | |||
4 | # Copyright (c) IPython Development Team. |
|
4 | # Copyright (c) IPython Development Team. | |
5 | # Distributed under the terms of the Modified BSD License. |
|
5 | # Distributed under the terms of the Modified BSD License. | |
6 |
|
6 | |||
7 | import os |
|
7 | import os | |
8 | import glob |
|
8 | import glob | |
9 | import sys |
|
9 | import sys | |
10 |
|
10 | |||
11 | from .base import TestsBase |
|
11 | from .base import TestsBase | |
12 | from ..postprocessors import PostProcessorBase |
|
12 | from ..postprocessors import PostProcessorBase | |
13 |
|
13 | |||
14 | import IPython.testing.tools as tt |
|
14 | import IPython.testing.tools as tt | |
15 | from IPython.testing import decorators as dec |
|
15 | from IPython.testing import decorators as dec | |
16 |
|
16 | |||
17 | #----------------------------------------------------------------------------- |
|
17 | #----------------------------------------------------------------------------- | |
18 | # Classes and functions |
|
18 | # Classes and functions | |
19 | #----------------------------------------------------------------------------- |
|
19 | #----------------------------------------------------------------------------- | |
20 |
|
20 | |||
21 | class DummyPost(PostProcessorBase): |
|
21 | class DummyPost(PostProcessorBase): | |
22 | def postprocess(self, filename): |
|
22 | def postprocess(self, filename): | |
23 | print("Dummy:%s" % filename) |
|
23 | print("Dummy:%s" % filename) | |
24 |
|
24 | |||
25 | class TestNbConvertApp(TestsBase): |
|
25 | class TestNbConvertApp(TestsBase): | |
26 | """Collection of NbConvertApp tests""" |
|
26 | """Collection of NbConvertApp tests""" | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | def test_notebook_help(self): |
|
29 | def test_notebook_help(self): | |
30 | """Will help show if no notebooks are specified?""" |
|
30 | """Will help show if no notebooks are specified?""" | |
31 | with self.create_temp_cwd(): |
|
31 | with self.create_temp_cwd(): | |
32 | out, err = self.call('nbconvert --log-level 0', ignore_return_code=True) |
|
32 | out, err = self.call('nbconvert --log-level 0', ignore_return_code=True) | |
33 | self.assertIn("see '--help-all'", out) |
|
33 | self.assertIn("see '--help-all'", out) | |
34 |
|
34 | |||
35 | def test_help_output(self): |
|
35 | def test_help_output(self): | |
36 | """ipython nbconvert --help-all works""" |
|
36 | """ipython nbconvert --help-all works""" | |
37 | tt.help_all_output_test('nbconvert') |
|
37 | tt.help_all_output_test('nbconvert') | |
38 |
|
38 | |||
39 | def test_glob(self): |
|
39 | def test_glob(self): | |
40 | """ |
|
40 | """ | |
41 | Do search patterns work for notebook names? |
|
41 | Do search patterns work for notebook names? | |
42 | """ |
|
42 | """ | |
43 | with self.create_temp_cwd(['notebook*.ipynb']): |
|
43 | with self.create_temp_cwd(['notebook*.ipynb']): | |
44 | self.call('nbconvert --to python *.ipynb --log-level 0') |
|
44 | self.call('nbconvert --to python *.ipynb --log-level 0') | |
45 | assert os.path.isfile('notebook1.py') |
|
45 | assert os.path.isfile('notebook1.py') | |
46 | assert os.path.isfile('notebook2.py') |
|
46 | assert os.path.isfile('notebook2.py') | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | def test_glob_subdir(self): |
|
49 | def test_glob_subdir(self): | |
50 | """ |
|
50 | """ | |
51 | Do search patterns work for subdirectory notebook names? |
|
51 | Do search patterns work for subdirectory notebook names? | |
52 | """ |
|
52 | """ | |
53 | with self.create_temp_cwd(): |
|
53 | with self.create_temp_cwd(): | |
54 | self.copy_files_to(['notebook*.ipynb'], 'subdir/') |
|
54 | self.copy_files_to(['notebook*.ipynb'], 'subdir/') | |
55 | self.call('nbconvert --to python --log-level 0 ' + |
|
55 | self.call('nbconvert --to python --log-level 0 ' + | |
56 | os.path.join('subdir', '*.ipynb')) |
|
56 | os.path.join('subdir', '*.ipynb')) | |
57 | assert os.path.isfile('notebook1.py') |
|
57 | assert os.path.isfile('notebook1.py') | |
58 | assert os.path.isfile('notebook2.py') |
|
58 | assert os.path.isfile('notebook2.py') | |
59 |
|
59 | |||
60 |
|
60 | |||
61 | def test_explicit(self): |
|
61 | def test_explicit(self): | |
62 | """ |
|
62 | """ | |
63 | Do explicit notebook names work? |
|
63 | Do explicit notebook names work? | |
64 | """ |
|
64 | """ | |
65 | with self.create_temp_cwd(['notebook*.ipynb']): |
|
65 | with self.create_temp_cwd(['notebook*.ipynb']): | |
66 | self.call('nbconvert --log-level 0 --to python notebook2') |
|
66 | self.call('nbconvert --log-level 0 --to python notebook2') | |
67 | assert not os.path.isfile('notebook1.py') |
|
67 | assert not os.path.isfile('notebook1.py') | |
68 | assert os.path.isfile('notebook2.py') |
|
68 | assert os.path.isfile('notebook2.py') | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | @dec.onlyif_cmds_exist('pdflatex') |
|
71 | @dec.onlyif_cmds_exist('pdflatex') | |
72 | @dec.onlyif_cmds_exist('pandoc') |
|
72 | @dec.onlyif_cmds_exist('pandoc') | |
73 | def test_filename_spaces(self): |
|
73 | def test_filename_spaces(self): | |
74 | """ |
|
74 | """ | |
75 | Generate PDFs with graphics if notebooks have spaces in the name? |
|
75 | Generate PDFs with graphics if notebooks have spaces in the name? | |
76 | """ |
|
76 | """ | |
77 | with self.create_temp_cwd(['notebook2.ipynb']): |
|
77 | with self.create_temp_cwd(['notebook2.ipynb']): | |
78 | os.rename('notebook2.ipynb', 'notebook with spaces.ipynb') |
|
78 | os.rename('notebook2.ipynb', 'notebook with spaces.ipynb') | |
79 | self.call('nbconvert --log-level 0 --to pdf' |
|
79 | self.call('nbconvert --log-level 0 --to pdf' | |
80 | ' "notebook with spaces"' |
|
80 | ' "notebook with spaces"' | |
81 | ' --PDFExporter.latex_count=1' |
|
81 | ' --PDFExporter.latex_count=1' | |
82 | ' --PDFExporter.verbose=True' |
|
82 | ' --PDFExporter.verbose=True' | |
83 | ) |
|
83 | ) | |
84 | assert os.path.isfile('notebook with spaces.pdf') |
|
84 | assert os.path.isfile('notebook with spaces.pdf') | |
85 |
|
85 | |||
86 | def test_post_processor(self): |
|
86 | def test_post_processor(self): | |
87 | """Do post processors work?""" |
|
87 | """Do post processors work?""" | |
88 | with self.create_temp_cwd(['notebook1.ipynb']): |
|
88 | with self.create_temp_cwd(['notebook1.ipynb']): | |
89 | out, err = self.call('nbconvert --log-level 0 --to python notebook1 ' |
|
89 | out, err = self.call('nbconvert --log-level 0 --to python notebook1 ' | |
90 |
'--post |
|
90 | '--post jupyter_nbconvert.tests.test_nbconvertapp.DummyPost') | |
91 | self.assertIn('Dummy:notebook1.py', out) |
|
91 | self.assertIn('Dummy:notebook1.py', out) | |
92 |
|
92 | |||
93 | @dec.onlyif_cmds_exist('pandoc') |
|
93 | @dec.onlyif_cmds_exist('pandoc') | |
94 | def test_spurious_cr(self): |
|
94 | def test_spurious_cr(self): | |
95 | """Check for extra CR characters""" |
|
95 | """Check for extra CR characters""" | |
96 | with self.create_temp_cwd(['notebook2.ipynb']): |
|
96 | with self.create_temp_cwd(['notebook2.ipynb']): | |
97 | self.call('nbconvert --log-level 0 --to latex notebook2') |
|
97 | self.call('nbconvert --log-level 0 --to latex notebook2') | |
98 | assert os.path.isfile('notebook2.tex') |
|
98 | assert os.path.isfile('notebook2.tex') | |
99 | with open('notebook2.tex') as f: |
|
99 | with open('notebook2.tex') as f: | |
100 | tex = f.read() |
|
100 | tex = f.read() | |
101 | self.call('nbconvert --log-level 0 --to html notebook2') |
|
101 | self.call('nbconvert --log-level 0 --to html notebook2') | |
102 | assert os.path.isfile('notebook2.html') |
|
102 | assert os.path.isfile('notebook2.html') | |
103 | with open('notebook2.html') as f: |
|
103 | with open('notebook2.html') as f: | |
104 | html = f.read() |
|
104 | html = f.read() | |
105 | self.assertEqual(tex.count('\r'), tex.count('\r\n')) |
|
105 | self.assertEqual(tex.count('\r'), tex.count('\r\n')) | |
106 | self.assertEqual(html.count('\r'), html.count('\r\n')) |
|
106 | self.assertEqual(html.count('\r'), html.count('\r\n')) | |
107 |
|
107 | |||
108 | @dec.onlyif_cmds_exist('pandoc') |
|
108 | @dec.onlyif_cmds_exist('pandoc') | |
109 | def test_png_base64_html_ok(self): |
|
109 | def test_png_base64_html_ok(self): | |
110 | """Is embedded png data well formed in HTML?""" |
|
110 | """Is embedded png data well formed in HTML?""" | |
111 | with self.create_temp_cwd(['notebook2.ipynb']): |
|
111 | with self.create_temp_cwd(['notebook2.ipynb']): | |
112 | self.call('nbconvert --log-level 0 --to HTML ' |
|
112 | self.call('nbconvert --log-level 0 --to HTML ' | |
113 | 'notebook2.ipynb --template full') |
|
113 | 'notebook2.ipynb --template full') | |
114 | assert os.path.isfile('notebook2.html') |
|
114 | assert os.path.isfile('notebook2.html') | |
115 | with open('notebook2.html') as f: |
|
115 | with open('notebook2.html') as f: | |
116 | assert "data:image/png;base64,b'" not in f.read() |
|
116 | assert "data:image/png;base64,b'" not in f.read() | |
117 |
|
117 | |||
118 | @dec.onlyif_cmds_exist('pandoc') |
|
118 | @dec.onlyif_cmds_exist('pandoc') | |
119 | def test_template(self): |
|
119 | def test_template(self): | |
120 | """ |
|
120 | """ | |
121 | Do export templates work? |
|
121 | Do export templates work? | |
122 | """ |
|
122 | """ | |
123 | with self.create_temp_cwd(['notebook2.ipynb']): |
|
123 | with self.create_temp_cwd(['notebook2.ipynb']): | |
124 | self.call('nbconvert --log-level 0 --to slides ' |
|
124 | self.call('nbconvert --log-level 0 --to slides ' | |
125 | 'notebook2.ipynb') |
|
125 | 'notebook2.ipynb') | |
126 | assert os.path.isfile('notebook2.slides.html') |
|
126 | assert os.path.isfile('notebook2.slides.html') | |
127 | with open('notebook2.slides.html') as f: |
|
127 | with open('notebook2.slides.html') as f: | |
128 | assert '/reveal.css' in f.read() |
|
128 | assert '/reveal.css' in f.read() | |
129 |
|
129 | |||
130 | def test_output_ext(self): |
|
130 | def test_output_ext(self): | |
131 | """test --output=outputfile[.ext]""" |
|
131 | """test --output=outputfile[.ext]""" | |
132 | with self.create_temp_cwd(['notebook1.ipynb']): |
|
132 | with self.create_temp_cwd(['notebook1.ipynb']): | |
133 | self.call('nbconvert --log-level 0 --to python ' |
|
133 | self.call('nbconvert --log-level 0 --to python ' | |
134 | 'notebook1.ipynb --output nb.py') |
|
134 | 'notebook1.ipynb --output nb.py') | |
135 | assert os.path.exists('nb.py') |
|
135 | assert os.path.exists('nb.py') | |
136 |
|
136 | |||
137 | self.call('nbconvert --log-level 0 --to python ' |
|
137 | self.call('nbconvert --log-level 0 --to python ' | |
138 | 'notebook1.ipynb --output nb2') |
|
138 | 'notebook1.ipynb --output nb2') | |
139 | assert os.path.exists('nb2.py') |
|
139 | assert os.path.exists('nb2.py') | |
140 |
|
140 | |||
141 | def test_glob_explicit(self): |
|
141 | def test_glob_explicit(self): | |
142 | """ |
|
142 | """ | |
143 | Can a search pattern be used along with matching explicit notebook names? |
|
143 | Can a search pattern be used along with matching explicit notebook names? | |
144 | """ |
|
144 | """ | |
145 | with self.create_temp_cwd(['notebook*.ipynb']): |
|
145 | with self.create_temp_cwd(['notebook*.ipynb']): | |
146 | self.call('nbconvert --log-level 0 --to python ' |
|
146 | self.call('nbconvert --log-level 0 --to python ' | |
147 | '*.ipynb notebook1.ipynb notebook2.ipynb') |
|
147 | '*.ipynb notebook1.ipynb notebook2.ipynb') | |
148 | assert os.path.isfile('notebook1.py') |
|
148 | assert os.path.isfile('notebook1.py') | |
149 | assert os.path.isfile('notebook2.py') |
|
149 | assert os.path.isfile('notebook2.py') | |
150 |
|
150 | |||
151 |
|
151 | |||
152 | def test_explicit_glob(self): |
|
152 | def test_explicit_glob(self): | |
153 | """ |
|
153 | """ | |
154 | Can explicit notebook names be used and then a matching search pattern? |
|
154 | Can explicit notebook names be used and then a matching search pattern? | |
155 | """ |
|
155 | """ | |
156 | with self.create_temp_cwd(['notebook*.ipynb']): |
|
156 | with self.create_temp_cwd(['notebook*.ipynb']): | |
157 | self.call('nbconvert --log-level 0 --to=python ' |
|
157 | self.call('nbconvert --log-level 0 --to=python ' | |
158 | 'notebook1.ipynb notebook2.ipynb *.ipynb') |
|
158 | 'notebook1.ipynb notebook2.ipynb *.ipynb') | |
159 | assert os.path.isfile('notebook1.py') |
|
159 | assert os.path.isfile('notebook1.py') | |
160 | assert os.path.isfile('notebook2.py') |
|
160 | assert os.path.isfile('notebook2.py') | |
161 |
|
161 | |||
162 |
|
162 | |||
163 | def test_default_config(self): |
|
163 | def test_default_config(self): | |
164 | """ |
|
164 | """ | |
165 | Does the default config work? |
|
165 | Does the default config work? | |
166 | """ |
|
166 | """ | |
167 | with self.create_temp_cwd(['notebook*.ipynb', 'ipython_nbconvert_config.py']): |
|
167 | with self.create_temp_cwd(['notebook*.ipynb', 'ipython_nbconvert_config.py']): | |
168 | self.call('nbconvert --log-level 0') |
|
168 | self.call('nbconvert --log-level 0') | |
169 | assert os.path.isfile('notebook1.py') |
|
169 | assert os.path.isfile('notebook1.py') | |
170 | assert not os.path.isfile('notebook2.py') |
|
170 | assert not os.path.isfile('notebook2.py') | |
171 |
|
171 | |||
172 |
|
172 | |||
173 | def test_override_config(self): |
|
173 | def test_override_config(self): | |
174 | """ |
|
174 | """ | |
175 | Can the default config be overriden? |
|
175 | Can the default config be overriden? | |
176 | """ |
|
176 | """ | |
177 | with self.create_temp_cwd(['notebook*.ipynb', |
|
177 | with self.create_temp_cwd(['notebook*.ipynb', | |
178 | 'ipython_nbconvert_config.py', |
|
178 | 'ipython_nbconvert_config.py', | |
179 | 'override.py']): |
|
179 | 'override.py']): | |
180 | self.call('nbconvert --log-level 0 --config="override.py"') |
|
180 | self.call('nbconvert --log-level 0 --config="override.py"') | |
181 | assert not os.path.isfile('notebook1.py') |
|
181 | assert not os.path.isfile('notebook1.py') | |
182 | assert os.path.isfile('notebook2.py') |
|
182 | assert os.path.isfile('notebook2.py') | |
183 |
|
183 | |||
184 | def test_accents_in_filename(self): |
|
184 | def test_accents_in_filename(self): | |
185 | """ |
|
185 | """ | |
186 | Can notebook names include accents? |
|
186 | Can notebook names include accents? | |
187 | """ |
|
187 | """ | |
188 | with self.create_temp_cwd(): |
|
188 | with self.create_temp_cwd(): | |
189 | self.create_empty_notebook(u'nb1_anΓ‘lisis.ipynb') |
|
189 | self.create_empty_notebook(u'nb1_anΓ‘lisis.ipynb') | |
190 | self.call('nbconvert --log-level 0 --to python nb1_*') |
|
190 | self.call('nbconvert --log-level 0 --to python nb1_*') | |
191 | assert os.path.isfile(u'nb1_anΓ‘lisis.py') |
|
191 | assert os.path.isfile(u'nb1_anΓ‘lisis.py') | |
192 |
|
192 | |||
193 | @dec.onlyif_cmds_exist('pdflatex', 'pandoc') |
|
193 | @dec.onlyif_cmds_exist('pdflatex', 'pandoc') | |
194 | def test_filename_accent_pdf(self): |
|
194 | def test_filename_accent_pdf(self): | |
195 | """ |
|
195 | """ | |
196 | Generate PDFs if notebooks have an accent in their name? |
|
196 | Generate PDFs if notebooks have an accent in their name? | |
197 | """ |
|
197 | """ | |
198 | with self.create_temp_cwd(): |
|
198 | with self.create_temp_cwd(): | |
199 | self.create_empty_notebook(u'nb1_anΓ‘lisis.ipynb') |
|
199 | self.create_empty_notebook(u'nb1_anΓ‘lisis.ipynb') | |
200 | self.call('nbconvert --log-level 0 --to pdf "nb1_*"' |
|
200 | self.call('nbconvert --log-level 0 --to pdf "nb1_*"' | |
201 | ' --PDFExporter.latex_count=1' |
|
201 | ' --PDFExporter.latex_count=1' | |
202 | ' --PDFExporter.verbose=True') |
|
202 | ' --PDFExporter.verbose=True') | |
203 | assert os.path.isfile(u'nb1_anΓ‘lisis.pdf') |
|
203 | assert os.path.isfile(u'nb1_anΓ‘lisis.pdf') | |
204 |
|
204 | |||
205 | def test_cwd_plugin(self): |
|
205 | def test_cwd_plugin(self): | |
206 | """ |
|
206 | """ | |
207 | Verify that an extension in the cwd can be imported. |
|
207 | Verify that an extension in the cwd can be imported. | |
208 | """ |
|
208 | """ | |
209 | with self.create_temp_cwd(['hello.py']): |
|
209 | with self.create_temp_cwd(['hello.py']): | |
210 | self.create_empty_notebook(u'empty.ipynb') |
|
210 | self.create_empty_notebook(u'empty.ipynb') | |
211 | self.call('nbconvert empty --to html --NbConvertApp.writer_class=\'hello.HelloWriter\'') |
|
211 | self.call('nbconvert empty --to html --NbConvertApp.writer_class=\'hello.HelloWriter\'') | |
212 | assert os.path.isfile(u'hello.txt') |
|
212 | assert os.path.isfile(u'hello.txt') | |
213 |
|
213 | |||
214 | def test_output_suffix(self): |
|
214 | def test_output_suffix(self): | |
215 | """ |
|
215 | """ | |
216 | Verify that the output suffix is applied |
|
216 | Verify that the output suffix is applied | |
217 | """ |
|
217 | """ | |
218 | with self.create_temp_cwd(): |
|
218 | with self.create_temp_cwd(): | |
219 | self.create_empty_notebook('empty.ipynb') |
|
219 | self.create_empty_notebook('empty.ipynb') | |
220 | self.call('nbconvert empty.ipynb --to notebook') |
|
220 | self.call('nbconvert empty.ipynb --to notebook') | |
221 | assert os.path.isfile('empty.nbconvert.ipynb') |
|
221 | assert os.path.isfile('empty.nbconvert.ipynb') | |
222 |
|
222 | |||
223 | def test_different_build_dir(self): |
|
223 | def test_different_build_dir(self): | |
224 | """ |
|
224 | """ | |
225 | Verify that the output suffix is not applied |
|
225 | Verify that the output suffix is not applied | |
226 | """ |
|
226 | """ | |
227 | with self.create_temp_cwd(): |
|
227 | with self.create_temp_cwd(): | |
228 | self.create_empty_notebook('empty.ipynb') |
|
228 | self.create_empty_notebook('empty.ipynb') | |
229 | os.mkdir('output') |
|
229 | os.mkdir('output') | |
230 | self.call( |
|
230 | self.call( | |
231 | 'nbconvert empty.ipynb --to notebook ' |
|
231 | 'nbconvert empty.ipynb --to notebook ' | |
232 | '--FilesWriter.build_directory=output') |
|
232 | '--FilesWriter.build_directory=output') | |
233 | assert os.path.isfile('output/empty.ipynb') |
|
233 | assert os.path.isfile('output/empty.ipynb') | |
234 |
|
234 | |||
235 | def test_inplace(self): |
|
235 | def test_inplace(self): | |
236 | """ |
|
236 | """ | |
237 | Verify that the notebook is converted in place |
|
237 | Verify that the notebook is converted in place | |
238 | """ |
|
238 | """ | |
239 | with self.create_temp_cwd(): |
|
239 | with self.create_temp_cwd(): | |
240 | self.create_empty_notebook('empty.ipynb') |
|
240 | self.create_empty_notebook('empty.ipynb') | |
241 | self.call('nbconvert empty.ipynb --to notebook --inplace') |
|
241 | self.call('nbconvert empty.ipynb --to notebook --inplace') | |
242 | assert os.path.isfile('empty.ipynb') |
|
242 | assert os.path.isfile('empty.ipynb') | |
243 | assert not os.path.isfile('empty.nbconvert.ipynb') |
|
243 | assert not os.path.isfile('empty.nbconvert.ipynb') |
@@ -1,131 +1,131 b'' | |||||
1 | """Test lexers module""" |
|
1 | """Test lexers module""" | |
2 | #----------------------------------------------------------------------------- |
|
2 | #----------------------------------------------------------------------------- | |
3 | # Copyright (C) 2014 The IPython Development Team |
|
3 | # Copyright (C) 2014 The IPython Development Team | |
4 | # |
|
4 | # | |
5 | # Distributed under the terms of the BSD License. The full license is in |
|
5 | # Distributed under the terms of the BSD License. The full license is in | |
6 | # the file COPYING, distributed as part of this software. |
|
6 | # the file COPYING, distributed as part of this software. | |
7 | #----------------------------------------------------------------------------- |
|
7 | #----------------------------------------------------------------------------- | |
8 |
|
8 | |||
9 | #----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
10 | # Imports |
|
10 | # Imports | |
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 | from pygments.token import Token |
|
12 | from pygments.token import Token | |
13 |
|
13 | |||
14 |
from |
|
14 | from jupyter_nbconvert.tests.base import TestsBase | |
15 | from .. import lexers |
|
15 | from .. import lexers | |
16 |
|
16 | |||
17 |
|
17 | |||
18 | #----------------------------------------------------------------------------- |
|
18 | #----------------------------------------------------------------------------- | |
19 | # Classes and functions |
|
19 | # Classes and functions | |
20 | #----------------------------------------------------------------------------- |
|
20 | #----------------------------------------------------------------------------- | |
21 | class TestLexers(TestsBase): |
|
21 | class TestLexers(TestsBase): | |
22 | """Collection of lexers tests""" |
|
22 | """Collection of lexers tests""" | |
23 | def setUp(self): |
|
23 | def setUp(self): | |
24 | self.lexer = lexers.IPythonLexer() |
|
24 | self.lexer = lexers.IPythonLexer() | |
25 |
|
25 | |||
26 | def testIPythonLexer(self): |
|
26 | def testIPythonLexer(self): | |
27 | fragment = '!echo $HOME\n' |
|
27 | fragment = '!echo $HOME\n' | |
28 | tokens = [ |
|
28 | tokens = [ | |
29 | (Token.Operator, '!'), |
|
29 | (Token.Operator, '!'), | |
30 | (Token.Name.Builtin, 'echo'), |
|
30 | (Token.Name.Builtin, 'echo'), | |
31 | (Token.Text, ' '), |
|
31 | (Token.Text, ' '), | |
32 | (Token.Name.Variable, '$HOME'), |
|
32 | (Token.Name.Variable, '$HOME'), | |
33 | (Token.Text, '\n'), |
|
33 | (Token.Text, '\n'), | |
34 | ] |
|
34 | ] | |
35 | self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) |
|
35 | self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) | |
36 |
|
36 | |||
37 | fragment_2 = '!' + fragment |
|
37 | fragment_2 = '!' + fragment | |
38 | tokens_2 = [ |
|
38 | tokens_2 = [ | |
39 | (Token.Operator, '!!'), |
|
39 | (Token.Operator, '!!'), | |
40 | ] + tokens[1:] |
|
40 | ] + tokens[1:] | |
41 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
41 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
42 |
|
42 | |||
43 | fragment_2 = '\t %%!\n' + fragment[1:] |
|
43 | fragment_2 = '\t %%!\n' + fragment[1:] | |
44 | tokens_2 = [ |
|
44 | tokens_2 = [ | |
45 | (Token.Text, '\t '), |
|
45 | (Token.Text, '\t '), | |
46 | (Token.Operator, '%%!'), |
|
46 | (Token.Operator, '%%!'), | |
47 | (Token.Text, '\n'), |
|
47 | (Token.Text, '\n'), | |
48 | ] + tokens[1:] |
|
48 | ] + tokens[1:] | |
49 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
49 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
50 |
|
50 | |||
51 | fragment_2 = 'x = ' + fragment |
|
51 | fragment_2 = 'x = ' + fragment | |
52 | tokens_2 = [ |
|
52 | tokens_2 = [ | |
53 | (Token.Name, 'x'), |
|
53 | (Token.Name, 'x'), | |
54 | (Token.Text, ' '), |
|
54 | (Token.Text, ' '), | |
55 | (Token.Operator, '='), |
|
55 | (Token.Operator, '='), | |
56 | (Token.Text, ' '), |
|
56 | (Token.Text, ' '), | |
57 | ] + tokens |
|
57 | ] + tokens | |
58 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
58 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
59 |
|
59 | |||
60 | fragment_2 = 'x, = ' + fragment |
|
60 | fragment_2 = 'x, = ' + fragment | |
61 | tokens_2 = [ |
|
61 | tokens_2 = [ | |
62 | (Token.Name, 'x'), |
|
62 | (Token.Name, 'x'), | |
63 | (Token.Punctuation, ','), |
|
63 | (Token.Punctuation, ','), | |
64 | (Token.Text, ' '), |
|
64 | (Token.Text, ' '), | |
65 | (Token.Operator, '='), |
|
65 | (Token.Operator, '='), | |
66 | (Token.Text, ' '), |
|
66 | (Token.Text, ' '), | |
67 | ] + tokens |
|
67 | ] + tokens | |
68 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
68 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
69 |
|
69 | |||
70 | fragment_2 = 'x, = %sx ' + fragment[1:] |
|
70 | fragment_2 = 'x, = %sx ' + fragment[1:] | |
71 | tokens_2 = [ |
|
71 | tokens_2 = [ | |
72 | (Token.Name, 'x'), |
|
72 | (Token.Name, 'x'), | |
73 | (Token.Punctuation, ','), |
|
73 | (Token.Punctuation, ','), | |
74 | (Token.Text, ' '), |
|
74 | (Token.Text, ' '), | |
75 | (Token.Operator, '='), |
|
75 | (Token.Operator, '='), | |
76 | (Token.Text, ' '), |
|
76 | (Token.Text, ' '), | |
77 | (Token.Operator, '%'), |
|
77 | (Token.Operator, '%'), | |
78 | (Token.Keyword, 'sx'), |
|
78 | (Token.Keyword, 'sx'), | |
79 | (Token.Text, ' '), |
|
79 | (Token.Text, ' '), | |
80 | ] + tokens[1:] |
|
80 | ] + tokens[1:] | |
81 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
81 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
82 |
|
82 | |||
83 | fragment_2 = 'f = %R function () {}\n' |
|
83 | fragment_2 = 'f = %R function () {}\n' | |
84 | tokens_2 = [ |
|
84 | tokens_2 = [ | |
85 | (Token.Name, 'f'), |
|
85 | (Token.Name, 'f'), | |
86 | (Token.Text, ' '), |
|
86 | (Token.Text, ' '), | |
87 | (Token.Operator, '='), |
|
87 | (Token.Operator, '='), | |
88 | (Token.Text, ' '), |
|
88 | (Token.Text, ' '), | |
89 | (Token.Operator, '%'), |
|
89 | (Token.Operator, '%'), | |
90 | (Token.Keyword, 'R'), |
|
90 | (Token.Keyword, 'R'), | |
91 | (Token.Text, ' function () {}\n'), |
|
91 | (Token.Text, ' function () {}\n'), | |
92 | ] |
|
92 | ] | |
93 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
93 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
94 |
|
94 | |||
95 | fragment_2 = '\t%%xyz\n$foo\n' |
|
95 | fragment_2 = '\t%%xyz\n$foo\n' | |
96 | tokens_2 = [ |
|
96 | tokens_2 = [ | |
97 | (Token.Text, '\t'), |
|
97 | (Token.Text, '\t'), | |
98 | (Token.Operator, '%%'), |
|
98 | (Token.Operator, '%%'), | |
99 | (Token.Keyword, 'xyz'), |
|
99 | (Token.Keyword, 'xyz'), | |
100 | (Token.Text, '\n$foo\n'), |
|
100 | (Token.Text, '\n$foo\n'), | |
101 | ] |
|
101 | ] | |
102 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
102 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
103 |
|
103 | |||
104 | fragment_2 = '%system?\n' |
|
104 | fragment_2 = '%system?\n' | |
105 | tokens_2 = [ |
|
105 | tokens_2 = [ | |
106 | (Token.Operator, '%'), |
|
106 | (Token.Operator, '%'), | |
107 | (Token.Keyword, 'system'), |
|
107 | (Token.Keyword, 'system'), | |
108 | (Token.Operator, '?'), |
|
108 | (Token.Operator, '?'), | |
109 | (Token.Text, '\n'), |
|
109 | (Token.Text, '\n'), | |
110 | ] |
|
110 | ] | |
111 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
111 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
112 |
|
112 | |||
113 | fragment_2 = 'x != y\n' |
|
113 | fragment_2 = 'x != y\n' | |
114 | tokens_2 = [ |
|
114 | tokens_2 = [ | |
115 | (Token.Name, 'x'), |
|
115 | (Token.Name, 'x'), | |
116 | (Token.Text, ' '), |
|
116 | (Token.Text, ' '), | |
117 | (Token.Operator, '!='), |
|
117 | (Token.Operator, '!='), | |
118 | (Token.Text, ' '), |
|
118 | (Token.Text, ' '), | |
119 | (Token.Name, 'y'), |
|
119 | (Token.Name, 'y'), | |
120 | (Token.Text, '\n'), |
|
120 | (Token.Text, '\n'), | |
121 | ] |
|
121 | ] | |
122 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
122 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
123 |
|
123 | |||
124 | fragment_2 = ' ?math.sin\n' |
|
124 | fragment_2 = ' ?math.sin\n' | |
125 | tokens_2 = [ |
|
125 | tokens_2 = [ | |
126 | (Token.Text, ' '), |
|
126 | (Token.Text, ' '), | |
127 | (Token.Operator, '?'), |
|
127 | (Token.Operator, '?'), | |
128 | (Token.Text, 'math.sin'), |
|
128 | (Token.Text, 'math.sin'), | |
129 | (Token.Text, '\n'), |
|
129 | (Token.Text, '\n'), | |
130 | ] |
|
130 | ] | |
131 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
131 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
@@ -1,70 +1,70 b'' | |||||
1 | """Test Pandoc module""" |
|
1 | """Test Pandoc module""" | |
2 | #----------------------------------------------------------------------------- |
|
2 | #----------------------------------------------------------------------------- | |
3 | # Copyright (C) 2014 The IPython Development Team |
|
3 | # Copyright (C) 2014 The IPython Development Team | |
4 | # |
|
4 | # | |
5 | # Distributed under the terms of the BSD License. The full license is in |
|
5 | # Distributed under the terms of the BSD License. The full license is in | |
6 | # the file COPYING, distributed as part of this software. |
|
6 | # the file COPYING, distributed as part of this software. | |
7 | #----------------------------------------------------------------------------- |
|
7 | #----------------------------------------------------------------------------- | |
8 |
|
8 | |||
9 | #----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
10 | # Imports |
|
10 | # Imports | |
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 | import os |
|
12 | import os | |
13 | import warnings |
|
13 | import warnings | |
14 |
|
14 | |||
15 | from IPython.testing import decorators as dec |
|
15 | from IPython.testing import decorators as dec | |
16 |
|
16 | |||
17 |
from |
|
17 | from jupyter_nbconvert.tests.base import TestsBase | |
18 | from .. import pandoc |
|
18 | from .. import pandoc | |
19 |
|
19 | |||
20 | #----------------------------------------------------------------------------- |
|
20 | #----------------------------------------------------------------------------- | |
21 | # Classes and functions |
|
21 | # Classes and functions | |
22 | #----------------------------------------------------------------------------- |
|
22 | #----------------------------------------------------------------------------- | |
23 | class TestPandoc(TestsBase): |
|
23 | class TestPandoc(TestsBase): | |
24 | """Collection of Pandoc tests""" |
|
24 | """Collection of Pandoc tests""" | |
25 |
|
25 | |||
26 | def __init__(self, *args, **kwargs): |
|
26 | def __init__(self, *args, **kwargs): | |
27 | super(TestPandoc, self).__init__(*args, **kwargs) |
|
27 | super(TestPandoc, self).__init__(*args, **kwargs) | |
28 | self.original_env = os.environ.copy() |
|
28 | self.original_env = os.environ.copy() | |
29 |
|
29 | |||
30 | @dec.onlyif_cmds_exist('pandoc') |
|
30 | @dec.onlyif_cmds_exist('pandoc') | |
31 | def test_pandoc_available(self): |
|
31 | def test_pandoc_available(self): | |
32 | """ Test behaviour that pandoc functions raise PandocMissing as documented """ |
|
32 | """ Test behaviour that pandoc functions raise PandocMissing as documented """ | |
33 | pandoc.clean_cache() |
|
33 | pandoc.clean_cache() | |
34 |
|
34 | |||
35 | os.environ["PATH"] = "" |
|
35 | os.environ["PATH"] = "" | |
36 | with self.assertRaises(pandoc.PandocMissing): |
|
36 | with self.assertRaises(pandoc.PandocMissing): | |
37 | pandoc.get_pandoc_version() |
|
37 | pandoc.get_pandoc_version() | |
38 | with self.assertRaises(pandoc.PandocMissing): |
|
38 | with self.assertRaises(pandoc.PandocMissing): | |
39 | pandoc.check_pandoc_version() |
|
39 | pandoc.check_pandoc_version() | |
40 | with self.assertRaises(pandoc.PandocMissing): |
|
40 | with self.assertRaises(pandoc.PandocMissing): | |
41 | pandoc.pandoc("", "markdown", "html") |
|
41 | pandoc.pandoc("", "markdown", "html") | |
42 |
|
42 | |||
43 | # original_env["PATH"] should contain pandoc |
|
43 | # original_env["PATH"] should contain pandoc | |
44 | os.environ["PATH"] = self.original_env["PATH"] |
|
44 | os.environ["PATH"] = self.original_env["PATH"] | |
45 | with warnings.catch_warnings(record=True) as w: |
|
45 | with warnings.catch_warnings(record=True) as w: | |
46 | pandoc.get_pandoc_version() |
|
46 | pandoc.get_pandoc_version() | |
47 | pandoc.check_pandoc_version() |
|
47 | pandoc.check_pandoc_version() | |
48 | pandoc.pandoc("", "markdown", "html") |
|
48 | pandoc.pandoc("", "markdown", "html") | |
49 | self.assertEqual(w, []) |
|
49 | self.assertEqual(w, []) | |
50 |
|
50 | |||
51 | @dec.onlyif_cmds_exist('pandoc') |
|
51 | @dec.onlyif_cmds_exist('pandoc') | |
52 | def test_minimal_version(self): |
|
52 | def test_minimal_version(self): | |
53 | original_minversion = pandoc._minimal_version |
|
53 | original_minversion = pandoc._minimal_version | |
54 |
|
54 | |||
55 | pandoc._minimal_version = "120.0" |
|
55 | pandoc._minimal_version = "120.0" | |
56 | with warnings.catch_warnings(record=True) as w: |
|
56 | with warnings.catch_warnings(record=True) as w: | |
57 | assert not pandoc.check_pandoc_version() |
|
57 | assert not pandoc.check_pandoc_version() | |
58 | self.assertEqual(len(w), 1) |
|
58 | self.assertEqual(len(w), 1) | |
59 |
|
59 | |||
60 | pandoc._minimal_version = pandoc.get_pandoc_version() |
|
60 | pandoc._minimal_version = pandoc.get_pandoc_version() | |
61 | assert pandoc.check_pandoc_version() |
|
61 | assert pandoc.check_pandoc_version() | |
62 |
|
62 | |||
63 |
|
63 | |||
64 | def pandoc_function_raised_missing(f, *args, **kwargs): |
|
64 | def pandoc_function_raised_missing(f, *args, **kwargs): | |
65 | try: |
|
65 | try: | |
66 | f(*args, **kwargs) |
|
66 | f(*args, **kwargs) | |
67 | except pandoc.PandocMissing: |
|
67 | except pandoc.PandocMissing: | |
68 | return True |
|
68 | return True | |
69 | else: |
|
69 | else: | |
70 | return False |
|
70 | return False |
General Comments 0
You need to be logged in to leave comments.
Login now