Show More
@@ -1,149 +1,149 b'' | |||||
1 | """Tornado handlers for nbconvert.""" |
|
1 | """Tornado handlers for nbconvert.""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | import io |
|
6 | import io | |
7 | import os |
|
7 | import os | |
8 | import zipfile |
|
8 | import zipfile | |
9 |
|
9 | |||
10 | from tornado import web |
|
10 | from tornado import web | |
11 |
|
11 | |||
12 | from ..base.handlers import ( |
|
12 | from ..base.handlers import ( | |
13 | IPythonHandler, FilesRedirectHandler, |
|
13 | IPythonHandler, FilesRedirectHandler, | |
14 | notebook_path_regex, path_regex, |
|
14 | notebook_path_regex, path_regex, | |
15 | ) |
|
15 | ) | |
16 | from IPython.nbformat import from_dict |
|
16 | from IPython.nbformat import from_dict | |
17 |
|
17 | |||
18 | from IPython.utils.py3compat import cast_bytes |
|
18 | from IPython.utils.py3compat import cast_bytes | |
19 |
|
19 | |||
20 | def find_resource_files(output_files_dir): |
|
20 | def find_resource_files(output_files_dir): | |
21 | files = [] |
|
21 | files = [] | |
22 | for dirpath, dirnames, filenames in os.walk(output_files_dir): |
|
22 | for dirpath, dirnames, filenames in os.walk(output_files_dir): | |
23 | files.extend([os.path.join(dirpath, f) for f in filenames]) |
|
23 | files.extend([os.path.join(dirpath, f) for f in filenames]) | |
24 | return files |
|
24 | return files | |
25 |
|
25 | |||
26 | def respond_zip(handler, name, output, resources): |
|
26 | def respond_zip(handler, name, output, resources): | |
27 | """Zip up the output and resource files and respond with the zip file. |
|
27 | """Zip up the output and resource files and respond with the zip file. | |
28 |
|
28 | |||
29 | Returns True if it has served a zip file, False if there are no resource |
|
29 | Returns True if it has served a zip file, False if there are no resource | |
30 | files, in which case we serve the plain output file. |
|
30 | files, in which case we serve the plain output file. | |
31 | """ |
|
31 | """ | |
32 | # Check if we have resource files we need to zip |
|
32 | # Check if we have resource files we need to zip | |
33 | output_files = resources.get('outputs', None) |
|
33 | output_files = resources.get('outputs', None) | |
34 | if not output_files: |
|
34 | if not output_files: | |
35 | return False |
|
35 | return False | |
36 |
|
36 | |||
37 | # Headers |
|
37 | # Headers | |
38 | zip_filename = os.path.splitext(name)[0] + '.zip' |
|
38 | zip_filename = os.path.splitext(name)[0] + '.zip' | |
39 | handler.set_header('Content-Disposition', |
|
39 | handler.set_header('Content-Disposition', | |
40 | 'attachment; filename="%s"' % zip_filename) |
|
40 | 'attachment; filename="%s"' % zip_filename) | |
41 | handler.set_header('Content-Type', 'application/zip') |
|
41 | handler.set_header('Content-Type', 'application/zip') | |
42 |
|
42 | |||
43 | # Prepare the zip file |
|
43 | # Prepare the zip file | |
44 | buffer = io.BytesIO() |
|
44 | buffer = io.BytesIO() | |
45 | zipf = zipfile.ZipFile(buffer, mode='w', compression=zipfile.ZIP_DEFLATED) |
|
45 | zipf = zipfile.ZipFile(buffer, mode='w', compression=zipfile.ZIP_DEFLATED) | |
46 |
output_filename = os.path.splitext(name)[0] + |
|
46 | output_filename = os.path.splitext(name)[0] + resources['output_extension'] | |
47 | zipf.writestr(output_filename, cast_bytes(output, 'utf-8')) |
|
47 | zipf.writestr(output_filename, cast_bytes(output, 'utf-8')) | |
48 | for filename, data in output_files.items(): |
|
48 | for filename, data in output_files.items(): | |
49 | zipf.writestr(os.path.basename(filename), data) |
|
49 | zipf.writestr(os.path.basename(filename), data) | |
50 | zipf.close() |
|
50 | zipf.close() | |
51 |
|
51 | |||
52 | handler.finish(buffer.getvalue()) |
|
52 | handler.finish(buffer.getvalue()) | |
53 | return True |
|
53 | return True | |
54 |
|
54 | |||
55 | def get_exporter(format, **kwargs): |
|
55 | def get_exporter(format, **kwargs): | |
56 | """get an exporter, raising appropriate errors""" |
|
56 | """get an exporter, raising appropriate errors""" | |
57 | # if this fails, will raise 500 |
|
57 | # if this fails, will raise 500 | |
58 | try: |
|
58 | try: | |
59 | from IPython.nbconvert.exporters.export import exporter_map |
|
59 | from IPython.nbconvert.exporters.export import exporter_map | |
60 | except ImportError as e: |
|
60 | except ImportError as e: | |
61 | raise web.HTTPError(500, "Could not import nbconvert: %s" % e) |
|
61 | raise web.HTTPError(500, "Could not import nbconvert: %s" % e) | |
62 |
|
62 | |||
63 | try: |
|
63 | try: | |
64 | Exporter = exporter_map[format] |
|
64 | Exporter = exporter_map[format] | |
65 | except KeyError: |
|
65 | except KeyError: | |
66 | # should this be 400? |
|
66 | # should this be 400? | |
67 | raise web.HTTPError(404, u"No exporter for format: %s" % format) |
|
67 | raise web.HTTPError(404, u"No exporter for format: %s" % format) | |
68 |
|
68 | |||
69 | try: |
|
69 | try: | |
70 | return Exporter(**kwargs) |
|
70 | return Exporter(**kwargs) | |
71 | except Exception as e: |
|
71 | except Exception as e: | |
72 | raise web.HTTPError(500, "Could not construct Exporter: %s" % e) |
|
72 | raise web.HTTPError(500, "Could not construct Exporter: %s" % e) | |
73 |
|
73 | |||
74 | class NbconvertFileHandler(IPythonHandler): |
|
74 | class NbconvertFileHandler(IPythonHandler): | |
75 |
|
75 | |||
76 | SUPPORTED_METHODS = ('GET',) |
|
76 | SUPPORTED_METHODS = ('GET',) | |
77 |
|
77 | |||
78 | @web.authenticated |
|
78 | @web.authenticated | |
79 | def get(self, format, path): |
|
79 | def get(self, format, path): | |
80 |
|
80 | |||
81 | exporter = get_exporter(format, config=self.config, log=self.log) |
|
81 | exporter = get_exporter(format, config=self.config, log=self.log) | |
82 |
|
82 | |||
83 | path = path.strip('/') |
|
83 | path = path.strip('/') | |
84 | model = self.contents_manager.get(path=path) |
|
84 | model = self.contents_manager.get(path=path) | |
85 | name = model['name'] |
|
85 | name = model['name'] | |
86 |
|
86 | |||
87 | self.set_header('Last-Modified', model['last_modified']) |
|
87 | self.set_header('Last-Modified', model['last_modified']) | |
88 |
|
88 | |||
89 | try: |
|
89 | try: | |
90 | output, resources = exporter.from_notebook_node(model['content']) |
|
90 | output, resources = exporter.from_notebook_node(model['content']) | |
91 | except Exception as e: |
|
91 | except Exception as e: | |
92 | raise web.HTTPError(500, "nbconvert failed: %s" % e) |
|
92 | raise web.HTTPError(500, "nbconvert failed: %s" % e) | |
93 |
|
93 | |||
94 | if respond_zip(self, name, output, resources): |
|
94 | if respond_zip(self, name, output, resources): | |
95 | return |
|
95 | return | |
96 |
|
96 | |||
97 | # Force download if requested |
|
97 | # Force download if requested | |
98 | if self.get_argument('download', 'false').lower() == 'true': |
|
98 | if self.get_argument('download', 'false').lower() == 'true': | |
99 |
filename = os.path.splitext(name)[0] + |
|
99 | filename = os.path.splitext(name)[0] + resources['output_extension'] | |
100 | self.set_header('Content-Disposition', |
|
100 | self.set_header('Content-Disposition', | |
101 | 'attachment; filename="%s"' % filename) |
|
101 | 'attachment; filename="%s"' % filename) | |
102 |
|
102 | |||
103 | # MIME type |
|
103 | # MIME type | |
104 | if exporter.output_mimetype: |
|
104 | if exporter.output_mimetype: | |
105 | self.set_header('Content-Type', |
|
105 | self.set_header('Content-Type', | |
106 | '%s; charset=utf-8' % exporter.output_mimetype) |
|
106 | '%s; charset=utf-8' % exporter.output_mimetype) | |
107 |
|
107 | |||
108 | self.finish(output) |
|
108 | self.finish(output) | |
109 |
|
109 | |||
110 | class NbconvertPostHandler(IPythonHandler): |
|
110 | class NbconvertPostHandler(IPythonHandler): | |
111 | SUPPORTED_METHODS = ('POST',) |
|
111 | SUPPORTED_METHODS = ('POST',) | |
112 |
|
112 | |||
113 | @web.authenticated |
|
113 | @web.authenticated | |
114 | def post(self, format): |
|
114 | def post(self, format): | |
115 | exporter = get_exporter(format, config=self.config) |
|
115 | exporter = get_exporter(format, config=self.config) | |
116 |
|
116 | |||
117 | model = self.get_json_body() |
|
117 | model = self.get_json_body() | |
118 | name = model.get('name', 'notebook.ipynb') |
|
118 | name = model.get('name', 'notebook.ipynb') | |
119 | nbnode = from_dict(model['content']) |
|
119 | nbnode = from_dict(model['content']) | |
120 |
|
120 | |||
121 | try: |
|
121 | try: | |
122 | output, resources = exporter.from_notebook_node(nbnode) |
|
122 | output, resources = exporter.from_notebook_node(nbnode) | |
123 | except Exception as e: |
|
123 | except Exception as e: | |
124 | raise web.HTTPError(500, "nbconvert failed: %s" % e) |
|
124 | raise web.HTTPError(500, "nbconvert failed: %s" % e) | |
125 |
|
125 | |||
126 | if respond_zip(self, name, output, resources): |
|
126 | if respond_zip(self, name, output, resources): | |
127 | return |
|
127 | return | |
128 |
|
128 | |||
129 | # MIME type |
|
129 | # MIME type | |
130 | if exporter.output_mimetype: |
|
130 | if exporter.output_mimetype: | |
131 | self.set_header('Content-Type', |
|
131 | self.set_header('Content-Type', | |
132 | '%s; charset=utf-8' % exporter.output_mimetype) |
|
132 | '%s; charset=utf-8' % exporter.output_mimetype) | |
133 |
|
133 | |||
134 | self.finish(output) |
|
134 | self.finish(output) | |
135 |
|
135 | |||
136 |
|
136 | |||
137 | #----------------------------------------------------------------------------- |
|
137 | #----------------------------------------------------------------------------- | |
138 | # URL to handler mappings |
|
138 | # URL to handler mappings | |
139 | #----------------------------------------------------------------------------- |
|
139 | #----------------------------------------------------------------------------- | |
140 |
|
140 | |||
141 | _format_regex = r"(?P<format>\w+)" |
|
141 | _format_regex = r"(?P<format>\w+)" | |
142 |
|
142 | |||
143 |
|
143 | |||
144 | default_handlers = [ |
|
144 | default_handlers = [ | |
145 | (r"/nbconvert/%s%s" % (_format_regex, notebook_path_regex), |
|
145 | (r"/nbconvert/%s%s" % (_format_regex, notebook_path_regex), | |
146 | NbconvertFileHandler), |
|
146 | NbconvertFileHandler), | |
147 | (r"/nbconvert/%s" % _format_regex, NbconvertPostHandler), |
|
147 | (r"/nbconvert/%s" % _format_regex, NbconvertPostHandler), | |
148 | (r"/nbconvert/html%s" % path_regex, FilesRedirectHandler), |
|
148 | (r"/nbconvert/html%s" % path_regex, FilesRedirectHandler), | |
149 | ] |
|
149 | ] |
@@ -1,383 +1,383 b'' | |||||
1 | // Copyright (c) IPython Development Team. |
|
1 | // Copyright (c) IPython Development Team. | |
2 | // Distributed under the terms of the Modified BSD License. |
|
2 | // Distributed under the terms of the Modified BSD License. | |
3 |
|
3 | |||
4 | define([ |
|
4 | define([ | |
5 | 'jquery', |
|
5 | 'jquery', | |
6 | 'base/js/namespace', |
|
6 | 'base/js/namespace', | |
7 | 'base/js/dialog', |
|
7 | 'base/js/dialog', | |
8 | 'base/js/utils', |
|
8 | 'base/js/utils', | |
9 | 'notebook/js/tour', |
|
9 | 'notebook/js/tour', | |
10 | 'bootstrap', |
|
10 | 'bootstrap', | |
11 | 'moment', |
|
11 | 'moment', | |
12 | ], function($, IPython, dialog, utils, tour, bootstrap, moment) { |
|
12 | ], function($, IPython, dialog, utils, tour, bootstrap, moment) { | |
13 | "use strict"; |
|
13 | "use strict"; | |
14 |
|
14 | |||
15 | var MenuBar = function (selector, options) { |
|
15 | var MenuBar = function (selector, options) { | |
16 | // Constructor |
|
16 | // Constructor | |
17 | // |
|
17 | // | |
18 | // A MenuBar Class to generate the menubar of IPython notebook |
|
18 | // A MenuBar Class to generate the menubar of IPython notebook | |
19 | // |
|
19 | // | |
20 | // Parameters: |
|
20 | // Parameters: | |
21 | // selector: string |
|
21 | // selector: string | |
22 | // options: dictionary |
|
22 | // options: dictionary | |
23 | // Dictionary of keyword arguments. |
|
23 | // Dictionary of keyword arguments. | |
24 | // notebook: Notebook instance |
|
24 | // notebook: Notebook instance | |
25 | // contents: ContentManager instance |
|
25 | // contents: ContentManager instance | |
26 | // layout_manager: LayoutManager instance |
|
26 | // layout_manager: LayoutManager instance | |
27 | // events: $(Events) instance |
|
27 | // events: $(Events) instance | |
28 | // save_widget: SaveWidget instance |
|
28 | // save_widget: SaveWidget instance | |
29 | // quick_help: QuickHelp instance |
|
29 | // quick_help: QuickHelp instance | |
30 | // base_url : string |
|
30 | // base_url : string | |
31 | // notebook_path : string |
|
31 | // notebook_path : string | |
32 | // notebook_name : string |
|
32 | // notebook_name : string | |
33 | options = options || {}; |
|
33 | options = options || {}; | |
34 | this.base_url = options.base_url || utils.get_body_data("baseUrl"); |
|
34 | this.base_url = options.base_url || utils.get_body_data("baseUrl"); | |
35 | this.selector = selector; |
|
35 | this.selector = selector; | |
36 | this.notebook = options.notebook; |
|
36 | this.notebook = options.notebook; | |
37 | this.contents = options.contents; |
|
37 | this.contents = options.contents; | |
38 | this.layout_manager = options.layout_manager; |
|
38 | this.layout_manager = options.layout_manager; | |
39 | this.events = options.events; |
|
39 | this.events = options.events; | |
40 | this.save_widget = options.save_widget; |
|
40 | this.save_widget = options.save_widget; | |
41 | this.quick_help = options.quick_help; |
|
41 | this.quick_help = options.quick_help; | |
42 |
|
42 | |||
43 | try { |
|
43 | try { | |
44 | this.tour = new tour.Tour(this.notebook, this.events); |
|
44 | this.tour = new tour.Tour(this.notebook, this.events); | |
45 | } catch (e) { |
|
45 | } catch (e) { | |
46 | this.tour = undefined; |
|
46 | this.tour = undefined; | |
47 | console.log("Failed to instantiate Notebook Tour", e); |
|
47 | console.log("Failed to instantiate Notebook Tour", e); | |
48 | } |
|
48 | } | |
49 |
|
49 | |||
50 | if (this.selector !== undefined) { |
|
50 | if (this.selector !== undefined) { | |
51 | this.element = $(selector); |
|
51 | this.element = $(selector); | |
52 | this.style(); |
|
52 | this.style(); | |
53 | this.bind_events(); |
|
53 | this.bind_events(); | |
54 | } |
|
54 | } | |
55 | }; |
|
55 | }; | |
56 |
|
56 | |||
57 | // TODO: This has definitively nothing to do with style ... |
|
57 | // TODO: This has definitively nothing to do with style ... | |
58 | MenuBar.prototype.style = function () { |
|
58 | MenuBar.prototype.style = function () { | |
59 | var that = this; |
|
59 | var that = this; | |
60 | this.element.find("li").click(function (event, ui) { |
|
60 | this.element.find("li").click(function (event, ui) { | |
61 | // The selected cell loses focus when the menu is entered, so we |
|
61 | // The selected cell loses focus when the menu is entered, so we | |
62 | // re-select it upon selection. |
|
62 | // re-select it upon selection. | |
63 | var i = that.notebook.get_selected_index(); |
|
63 | var i = that.notebook.get_selected_index(); | |
64 | that.notebook.select(i); |
|
64 | that.notebook.select(i); | |
65 | } |
|
65 | } | |
66 | ); |
|
66 | ); | |
67 | }; |
|
67 | }; | |
68 |
|
68 | |||
69 | MenuBar.prototype._nbconvert = function (format, download) { |
|
69 | MenuBar.prototype._nbconvert = function (format, download) { | |
70 | download = download || false; |
|
70 | download = download || false; | |
71 | var notebook_path = this.notebook.notebook_path; |
|
71 | var notebook_path = this.notebook.notebook_path; | |
72 | var url = utils.url_join_encode( |
|
72 | var url = utils.url_join_encode( | |
73 | this.base_url, |
|
73 | this.base_url, | |
74 | 'nbconvert', |
|
74 | 'nbconvert', | |
75 | format, |
|
75 | format, | |
76 | notebook_path |
|
76 | notebook_path | |
77 | ) + "?download=" + download.toString(); |
|
77 | ) + "?download=" + download.toString(); | |
78 |
|
78 | |||
79 | var w = window.open() |
|
79 | var w = window.open() | |
80 | if (this.notebook.dirty) { |
|
80 | if (this.notebook.dirty) { | |
81 | this.notebook.save_notebook().then(function() { |
|
81 | this.notebook.save_notebook().then(function() { | |
82 | w.location = url; |
|
82 | w.location = url; | |
83 | }); |
|
83 | }); | |
84 | } else { |
|
84 | } else { | |
85 | w.location = url; |
|
85 | w.location = url; | |
86 | } |
|
86 | } | |
87 | }; |
|
87 | }; | |
88 |
|
88 | |||
89 | MenuBar.prototype.bind_events = function () { |
|
89 | MenuBar.prototype.bind_events = function () { | |
90 | // File |
|
90 | // File | |
91 | var that = this; |
|
91 | var that = this; | |
92 | this.element.find('#new_notebook').click(function () { |
|
92 | this.element.find('#new_notebook').click(function () { | |
93 | var w = window.open(); |
|
93 | var w = window.open(); | |
94 | // Create a new notebook in the same path as the current |
|
94 | // Create a new notebook in the same path as the current | |
95 | // notebook's path. |
|
95 | // notebook's path. | |
96 | var parent = utils.url_path_split(that.notebook.notebook_path)[0]; |
|
96 | var parent = utils.url_path_split(that.notebook.notebook_path)[0]; | |
97 | that.contents.new_untitled(parent, {type: "notebook"}).then( |
|
97 | that.contents.new_untitled(parent, {type: "notebook"}).then( | |
98 | function (data) { |
|
98 | function (data) { | |
99 | w.location = utils.url_join_encode( |
|
99 | w.location = utils.url_join_encode( | |
100 | that.base_url, 'notebooks', data.path |
|
100 | that.base_url, 'notebooks', data.path | |
101 | ); |
|
101 | ); | |
102 | }, |
|
102 | }, | |
103 | function(error) { |
|
103 | function(error) { | |
104 | w.close(); |
|
104 | w.close(); | |
105 | dialog.modal({ |
|
105 | dialog.modal({ | |
106 | title : 'Creating Notebook Failed', |
|
106 | title : 'Creating Notebook Failed', | |
107 | body : "The error was: " + error.message, |
|
107 | body : "The error was: " + error.message, | |
108 | buttons : {'OK' : {'class' : 'btn-primary'}} |
|
108 | buttons : {'OK' : {'class' : 'btn-primary'}} | |
109 | }); |
|
109 | }); | |
110 | } |
|
110 | } | |
111 | ); |
|
111 | ); | |
112 | }); |
|
112 | }); | |
113 | this.element.find('#open_notebook').click(function () { |
|
113 | this.element.find('#open_notebook').click(function () { | |
114 | var parent = utils.url_path_split(that.notebook.notebook_path)[0]; |
|
114 | var parent = utils.url_path_split(that.notebook.notebook_path)[0]; | |
115 | window.open(utils.url_join_encode(that.base_url, 'tree', parent)); |
|
115 | window.open(utils.url_join_encode(that.base_url, 'tree', parent)); | |
116 | }); |
|
116 | }); | |
117 | this.element.find('#copy_notebook').click(function () { |
|
117 | this.element.find('#copy_notebook').click(function () { | |
118 | that.notebook.copy_notebook(); |
|
118 | that.notebook.copy_notebook(); | |
119 | return false; |
|
119 | return false; | |
120 | }); |
|
120 | }); | |
121 | this.element.find('#download_ipynb').click(function () { |
|
121 | this.element.find('#download_ipynb').click(function () { | |
122 | var base_url = that.notebook.base_url; |
|
122 | var base_url = that.notebook.base_url; | |
123 | var notebook_path = that.notebook.notebook_path; |
|
123 | var notebook_path = that.notebook.notebook_path; | |
124 | if (that.notebook.dirty) { |
|
124 | if (that.notebook.dirty) { | |
125 | that.notebook.save_notebook({async : false}); |
|
125 | that.notebook.save_notebook({async : false}); | |
126 | } |
|
126 | } | |
127 |
|
127 | |||
128 | var url = utils.url_join_encode(base_url, 'files', notebook_path); |
|
128 | var url = utils.url_join_encode(base_url, 'files', notebook_path); | |
129 | window.open(url + '?download=1'); |
|
129 | window.open(url + '?download=1'); | |
130 | }); |
|
130 | }); | |
131 |
|
131 | |||
132 | this.element.find('#print_preview').click(function () { |
|
132 | this.element.find('#print_preview').click(function () { | |
133 | that._nbconvert('html', false); |
|
133 | that._nbconvert('html', false); | |
134 | }); |
|
134 | }); | |
135 |
|
135 | |||
136 | this.element.find('#download_html').click(function () { |
|
136 | this.element.find('#download_html').click(function () { | |
137 | that._nbconvert('html', true); |
|
137 | that._nbconvert('html', true); | |
138 | }); |
|
138 | }); | |
139 |
|
139 | |||
140 | this.element.find('#download_rst').click(function () { |
|
140 | this.element.find('#download_rst').click(function () { | |
141 | that._nbconvert('rst', true); |
|
141 | that._nbconvert('rst', true); | |
142 | }); |
|
142 | }); | |
143 |
|
143 | |||
144 | this.element.find('#download_pdf').click(function () { |
|
144 | this.element.find('#download_pdf').click(function () { | |
145 | that._nbconvert('pdf', true); |
|
145 | that._nbconvert('pdf', true); | |
146 | }); |
|
146 | }); | |
147 |
|
147 | |||
148 | this.element.find('#rename_notebook').click(function () { |
|
148 | this.element.find('#rename_notebook').click(function () { | |
149 | that.save_widget.rename_notebook({notebook: that.notebook}); |
|
149 | that.save_widget.rename_notebook({notebook: that.notebook}); | |
150 | }); |
|
150 | }); | |
151 | this.element.find('#save_checkpoint').click(function () { |
|
151 | this.element.find('#save_checkpoint').click(function () { | |
152 | that.notebook.save_checkpoint(); |
|
152 | that.notebook.save_checkpoint(); | |
153 | }); |
|
153 | }); | |
154 | this.element.find('#restore_checkpoint').click(function () { |
|
154 | this.element.find('#restore_checkpoint').click(function () { | |
155 | }); |
|
155 | }); | |
156 | this.element.find('#trust_notebook').click(function () { |
|
156 | this.element.find('#trust_notebook').click(function () { | |
157 | that.notebook.trust_notebook(); |
|
157 | that.notebook.trust_notebook(); | |
158 | }); |
|
158 | }); | |
159 | this.events.on('trust_changed.Notebook', function (event, trusted) { |
|
159 | this.events.on('trust_changed.Notebook', function (event, trusted) { | |
160 | if (trusted) { |
|
160 | if (trusted) { | |
161 | that.element.find('#trust_notebook') |
|
161 | that.element.find('#trust_notebook') | |
162 | .addClass("disabled") |
|
162 | .addClass("disabled") | |
163 | .find("a").text("Trusted Notebook"); |
|
163 | .find("a").text("Trusted Notebook"); | |
164 | } else { |
|
164 | } else { | |
165 | that.element.find('#trust_notebook') |
|
165 | that.element.find('#trust_notebook') | |
166 | .removeClass("disabled") |
|
166 | .removeClass("disabled") | |
167 | .find("a").text("Trust Notebook"); |
|
167 | .find("a").text("Trust Notebook"); | |
168 | } |
|
168 | } | |
169 | }); |
|
169 | }); | |
170 | this.element.find('#kill_and_exit').click(function () { |
|
170 | this.element.find('#kill_and_exit').click(function () { | |
171 | var close_window = function () { |
|
171 | var close_window = function () { | |
172 | // allow closing of new tabs in Chromium, impossible in FF |
|
172 | // allow closing of new tabs in Chromium, impossible in FF | |
173 | window.open('', '_self', ''); |
|
173 | window.open('', '_self', ''); | |
174 | window.close(); |
|
174 | window.close(); | |
175 | }; |
|
175 | }; | |
176 | // finish with close on success or failure |
|
176 | // finish with close on success or failure | |
177 | that.notebook.session.delete(close_window, close_window); |
|
177 | that.notebook.session.delete(close_window, close_window); | |
178 | }); |
|
178 | }); | |
179 | // Edit |
|
179 | // Edit | |
180 | this.element.find('#cut_cell').click(function () { |
|
180 | this.element.find('#cut_cell').click(function () { | |
181 | that.notebook.cut_cell(); |
|
181 | that.notebook.cut_cell(); | |
182 | }); |
|
182 | }); | |
183 | this.element.find('#copy_cell').click(function () { |
|
183 | this.element.find('#copy_cell').click(function () { | |
184 | that.notebook.copy_cell(); |
|
184 | that.notebook.copy_cell(); | |
185 | }); |
|
185 | }); | |
186 | this.element.find('#delete_cell').click(function () { |
|
186 | this.element.find('#delete_cell').click(function () { | |
187 | that.notebook.delete_cell(); |
|
187 | that.notebook.delete_cell(); | |
188 | }); |
|
188 | }); | |
189 | this.element.find('#undelete_cell').click(function () { |
|
189 | this.element.find('#undelete_cell').click(function () { | |
190 | that.notebook.undelete_cell(); |
|
190 | that.notebook.undelete_cell(); | |
191 | }); |
|
191 | }); | |
192 | this.element.find('#split_cell').click(function () { |
|
192 | this.element.find('#split_cell').click(function () { | |
193 | that.notebook.split_cell(); |
|
193 | that.notebook.split_cell(); | |
194 | }); |
|
194 | }); | |
195 | this.element.find('#merge_cell_above').click(function () { |
|
195 | this.element.find('#merge_cell_above').click(function () { | |
196 | that.notebook.merge_cell_above(); |
|
196 | that.notebook.merge_cell_above(); | |
197 | }); |
|
197 | }); | |
198 | this.element.find('#merge_cell_below').click(function () { |
|
198 | this.element.find('#merge_cell_below').click(function () { | |
199 | that.notebook.merge_cell_below(); |
|
199 | that.notebook.merge_cell_below(); | |
200 | }); |
|
200 | }); | |
201 | this.element.find('#move_cell_up').click(function () { |
|
201 | this.element.find('#move_cell_up').click(function () { | |
202 | that.notebook.move_cell_up(); |
|
202 | that.notebook.move_cell_up(); | |
203 | }); |
|
203 | }); | |
204 | this.element.find('#move_cell_down').click(function () { |
|
204 | this.element.find('#move_cell_down').click(function () { | |
205 | that.notebook.move_cell_down(); |
|
205 | that.notebook.move_cell_down(); | |
206 | }); |
|
206 | }); | |
207 | this.element.find('#edit_nb_metadata').click(function () { |
|
207 | this.element.find('#edit_nb_metadata').click(function () { | |
208 | that.notebook.edit_metadata({ |
|
208 | that.notebook.edit_metadata({ | |
209 | notebook: that.notebook, |
|
209 | notebook: that.notebook, | |
210 | keyboard_manager: that.notebook.keyboard_manager}); |
|
210 | keyboard_manager: that.notebook.keyboard_manager}); | |
211 | }); |
|
211 | }); | |
212 |
|
212 | |||
213 | // View |
|
213 | // View | |
214 | this.element.find('#toggle_header').click(function () { |
|
214 | this.element.find('#toggle_header').click(function () { | |
215 | $('div#header').toggle(); |
|
215 | $('div#header').toggle(); | |
216 | that.layout_manager.do_resize(); |
|
216 | that.layout_manager.do_resize(); | |
217 | }); |
|
217 | }); | |
218 | this.element.find('#toggle_toolbar').click(function () { |
|
218 | this.element.find('#toggle_toolbar').click(function () { | |
219 | $('div#maintoolbar').toggle(); |
|
219 | $('div#maintoolbar').toggle(); | |
220 | that.layout_manager.do_resize(); |
|
220 | that.layout_manager.do_resize(); | |
221 | }); |
|
221 | }); | |
222 | // Insert |
|
222 | // Insert | |
223 | this.element.find('#insert_cell_above').click(function () { |
|
223 | this.element.find('#insert_cell_above').click(function () { | |
224 | that.notebook.insert_cell_above('code'); |
|
224 | that.notebook.insert_cell_above('code'); | |
225 | that.notebook.select_prev(); |
|
225 | that.notebook.select_prev(); | |
226 | }); |
|
226 | }); | |
227 | this.element.find('#insert_cell_below').click(function () { |
|
227 | this.element.find('#insert_cell_below').click(function () { | |
228 | that.notebook.insert_cell_below('code'); |
|
228 | that.notebook.insert_cell_below('code'); | |
229 | that.notebook.select_next(); |
|
229 | that.notebook.select_next(); | |
230 | }); |
|
230 | }); | |
231 | // Cell |
|
231 | // Cell | |
232 | this.element.find('#run_cell').click(function () { |
|
232 | this.element.find('#run_cell').click(function () { | |
233 | that.notebook.execute_cell(); |
|
233 | that.notebook.execute_cell(); | |
234 | }); |
|
234 | }); | |
235 | this.element.find('#run_cell_select_below').click(function () { |
|
235 | this.element.find('#run_cell_select_below').click(function () { | |
236 | that.notebook.execute_cell_and_select_below(); |
|
236 | that.notebook.execute_cell_and_select_below(); | |
237 | }); |
|
237 | }); | |
238 | this.element.find('#run_cell_insert_below').click(function () { |
|
238 | this.element.find('#run_cell_insert_below').click(function () { | |
239 | that.notebook.execute_cell_and_insert_below(); |
|
239 | that.notebook.execute_cell_and_insert_below(); | |
240 | }); |
|
240 | }); | |
241 | this.element.find('#run_all_cells').click(function () { |
|
241 | this.element.find('#run_all_cells').click(function () { | |
242 | that.notebook.execute_all_cells(); |
|
242 | that.notebook.execute_all_cells(); | |
243 | }); |
|
243 | }); | |
244 | this.element.find('#run_all_cells_above').click(function () { |
|
244 | this.element.find('#run_all_cells_above').click(function () { | |
245 | that.notebook.execute_cells_above(); |
|
245 | that.notebook.execute_cells_above(); | |
246 | }); |
|
246 | }); | |
247 | this.element.find('#run_all_cells_below').click(function () { |
|
247 | this.element.find('#run_all_cells_below').click(function () { | |
248 | that.notebook.execute_cells_below(); |
|
248 | that.notebook.execute_cells_below(); | |
249 | }); |
|
249 | }); | |
250 | this.element.find('#to_code').click(function () { |
|
250 | this.element.find('#to_code').click(function () { | |
251 | that.notebook.to_code(); |
|
251 | that.notebook.to_code(); | |
252 | }); |
|
252 | }); | |
253 | this.element.find('#to_markdown').click(function () { |
|
253 | this.element.find('#to_markdown').click(function () { | |
254 | that.notebook.to_markdown(); |
|
254 | that.notebook.to_markdown(); | |
255 | }); |
|
255 | }); | |
256 | this.element.find('#to_raw').click(function () { |
|
256 | this.element.find('#to_raw').click(function () { | |
257 | that.notebook.to_raw(); |
|
257 | that.notebook.to_raw(); | |
258 | }); |
|
258 | }); | |
259 |
|
259 | |||
260 | this.element.find('#toggle_current_output').click(function () { |
|
260 | this.element.find('#toggle_current_output').click(function () { | |
261 | that.notebook.toggle_output(); |
|
261 | that.notebook.toggle_output(); | |
262 | }); |
|
262 | }); | |
263 | this.element.find('#toggle_current_output_scroll').click(function () { |
|
263 | this.element.find('#toggle_current_output_scroll').click(function () { | |
264 | that.notebook.toggle_output_scroll(); |
|
264 | that.notebook.toggle_output_scroll(); | |
265 | }); |
|
265 | }); | |
266 | this.element.find('#clear_current_output').click(function () { |
|
266 | this.element.find('#clear_current_output').click(function () { | |
267 | that.notebook.clear_output(); |
|
267 | that.notebook.clear_output(); | |
268 | }); |
|
268 | }); | |
269 |
|
269 | |||
270 | this.element.find('#toggle_all_output').click(function () { |
|
270 | this.element.find('#toggle_all_output').click(function () { | |
271 | that.notebook.toggle_all_output(); |
|
271 | that.notebook.toggle_all_output(); | |
272 | }); |
|
272 | }); | |
273 | this.element.find('#toggle_all_output_scroll').click(function () { |
|
273 | this.element.find('#toggle_all_output_scroll').click(function () { | |
274 | that.notebook.toggle_all_output_scroll(); |
|
274 | that.notebook.toggle_all_output_scroll(); | |
275 | }); |
|
275 | }); | |
276 | this.element.find('#clear_all_output').click(function () { |
|
276 | this.element.find('#clear_all_output').click(function () { | |
277 | that.notebook.clear_all_output(); |
|
277 | that.notebook.clear_all_output(); | |
278 | }); |
|
278 | }); | |
279 |
|
279 | |||
280 | // Kernel |
|
280 | // Kernel | |
281 | this.element.find('#int_kernel').click(function () { |
|
281 | this.element.find('#int_kernel').click(function () { | |
282 | that.notebook.kernel.interrupt(); |
|
282 | that.notebook.kernel.interrupt(); | |
283 | }); |
|
283 | }); | |
284 | this.element.find('#restart_kernel').click(function () { |
|
284 | this.element.find('#restart_kernel').click(function () { | |
285 | that.notebook.restart_kernel(); |
|
285 | that.notebook.restart_kernel(); | |
286 | }); |
|
286 | }); | |
287 | this.element.find('#reconnect_kernel').click(function () { |
|
287 | this.element.find('#reconnect_kernel').click(function () { | |
288 | that.notebook.kernel.reconnect(); |
|
288 | that.notebook.kernel.reconnect(); | |
289 | }); |
|
289 | }); | |
290 | // Help |
|
290 | // Help | |
291 | if (this.tour) { |
|
291 | if (this.tour) { | |
292 | this.element.find('#notebook_tour').click(function () { |
|
292 | this.element.find('#notebook_tour').click(function () { | |
293 | that.tour.start(); |
|
293 | that.tour.start(); | |
294 | }); |
|
294 | }); | |
295 | } else { |
|
295 | } else { | |
296 | this.element.find('#notebook_tour').addClass("disabled"); |
|
296 | this.element.find('#notebook_tour').addClass("disabled"); | |
297 | } |
|
297 | } | |
298 | this.element.find('#keyboard_shortcuts').click(function () { |
|
298 | this.element.find('#keyboard_shortcuts').click(function () { | |
299 | that.quick_help.show_keyboard_shortcuts(); |
|
299 | that.quick_help.show_keyboard_shortcuts(); | |
300 | }); |
|
300 | }); | |
301 |
|
301 | |||
302 | this.update_restore_checkpoint(null); |
|
302 | this.update_restore_checkpoint(null); | |
303 |
|
303 | |||
304 | this.events.on('checkpoints_listed.Notebook', function (event, data) { |
|
304 | this.events.on('checkpoints_listed.Notebook', function (event, data) { | |
305 | that.update_restore_checkpoint(that.notebook.checkpoints); |
|
305 | that.update_restore_checkpoint(that.notebook.checkpoints); | |
306 | }); |
|
306 | }); | |
307 |
|
307 | |||
308 | this.events.on('checkpoint_created.Notebook', function (event, data) { |
|
308 | this.events.on('checkpoint_created.Notebook', function (event, data) { | |
309 | that.update_restore_checkpoint(that.notebook.checkpoints); |
|
309 | that.update_restore_checkpoint(that.notebook.checkpoints); | |
310 | }); |
|
310 | }); | |
311 |
|
311 | |||
312 | this.events.on('notebook_loaded.Notebook', function() { |
|
312 | this.events.on('notebook_loaded.Notebook', function() { | |
313 | var langinfo = that.notebook.metadata.language_info || {}; |
|
313 | var langinfo = that.notebook.metadata.language_info || {}; | |
314 | that.update_nbconvert_script(langinfo); |
|
314 | that.update_nbconvert_script(langinfo); | |
315 | }); |
|
315 | }); | |
316 |
|
316 | |||
317 | this.events.on('kernel_ready.Kernel', function(event, data) { |
|
317 | this.events.on('kernel_ready.Kernel', function(event, data) { | |
318 | var langinfo = data.kernel.info_reply.language_info || {}; |
|
318 | var langinfo = data.kernel.info_reply.language_info || {}; | |
319 | that.update_nbconvert_script(langinfo); |
|
319 | that.update_nbconvert_script(langinfo); | |
320 | }); |
|
320 | }); | |
321 | }; |
|
321 | }; | |
322 |
|
322 | |||
323 | MenuBar.prototype.update_restore_checkpoint = function(checkpoints) { |
|
323 | MenuBar.prototype.update_restore_checkpoint = function(checkpoints) { | |
324 | var ul = this.element.find("#restore_checkpoint").find("ul"); |
|
324 | var ul = this.element.find("#restore_checkpoint").find("ul"); | |
325 | ul.empty(); |
|
325 | ul.empty(); | |
326 | if (!checkpoints || checkpoints.length === 0) { |
|
326 | if (!checkpoints || checkpoints.length === 0) { | |
327 | ul.append( |
|
327 | ul.append( | |
328 | $("<li/>") |
|
328 | $("<li/>") | |
329 | .addClass("disabled") |
|
329 | .addClass("disabled") | |
330 | .append( |
|
330 | .append( | |
331 | $("<a/>") |
|
331 | $("<a/>") | |
332 | .text("No checkpoints") |
|
332 | .text("No checkpoints") | |
333 | ) |
|
333 | ) | |
334 | ); |
|
334 | ); | |
335 | return; |
|
335 | return; | |
336 | } |
|
336 | } | |
337 |
|
337 | |||
338 | var that = this; |
|
338 | var that = this; | |
339 | checkpoints.map(function (checkpoint) { |
|
339 | checkpoints.map(function (checkpoint) { | |
340 | var d = new Date(checkpoint.last_modified); |
|
340 | var d = new Date(checkpoint.last_modified); | |
341 | ul.append( |
|
341 | ul.append( | |
342 | $("<li/>").append( |
|
342 | $("<li/>").append( | |
343 | $("<a/>") |
|
343 | $("<a/>") | |
344 | .attr("href", "#") |
|
344 | .attr("href", "#") | |
345 | .text(moment(d).format("LLLL")) |
|
345 | .text(moment(d).format("LLLL")) | |
346 | .click(function () { |
|
346 | .click(function () { | |
347 | that.notebook.restore_checkpoint_dialog(checkpoint); |
|
347 | that.notebook.restore_checkpoint_dialog(checkpoint); | |
348 | }) |
|
348 | }) | |
349 | ) |
|
349 | ) | |
350 | ); |
|
350 | ); | |
351 | }); |
|
351 | }); | |
352 | }; |
|
352 | }; | |
353 |
|
353 | |||
354 | MenuBar.prototype.update_nbconvert_script = function(langinfo) { |
|
354 | MenuBar.prototype.update_nbconvert_script = function(langinfo) { | |
355 | // Set the 'Download as foo' menu option for the relevant language. |
|
355 | // Set the 'Download as foo' menu option for the relevant language. | |
356 | var el = this.element.find('#download_script'); |
|
356 | var el = this.element.find('#download_script'); | |
357 | var that = this; |
|
357 | var that = this; | |
358 |
|
358 | |||
359 | // Set menu entry text to e.g. "Python (.py)" |
|
359 | // Set menu entry text to e.g. "Python (.py)" | |
360 | var langname = (langinfo.name || 'Script') |
|
360 | var langname = (langinfo.name || 'Script') | |
361 | langname = langname.charAt(0).toUpperCase()+langname.substr(1) // Capitalise |
|
361 | langname = langname.charAt(0).toUpperCase()+langname.substr(1) // Capitalise | |
362 |
el.find('a').text(langname + ' ( |
|
362 | el.find('a').text(langname + ' ('+(langinfo.file_extension || 'txt')+')'); | |
363 |
|
363 | |||
364 | // Unregister any previously registered handlers |
|
364 | // Unregister any previously registered handlers | |
365 | el.off('click'); |
|
365 | el.off('click'); | |
366 | if (langinfo.nbconvert_exporter) { |
|
366 | if (langinfo.nbconvert_exporter) { | |
367 | // Metadata specifies a specific exporter, e.g. 'python' |
|
367 | // Metadata specifies a specific exporter, e.g. 'python' | |
368 | el.click(function() { |
|
368 | el.click(function() { | |
369 | that._nbconvert(langinfo.nbconvert_exporter, true); |
|
369 | that._nbconvert(langinfo.nbconvert_exporter, true); | |
370 | }); |
|
370 | }); | |
371 | } else { |
|
371 | } else { | |
372 | // Use generic 'script' exporter |
|
372 | // Use generic 'script' exporter | |
373 | el.click(function() { |
|
373 | el.click(function() { | |
374 | that._nbconvert('script', true); |
|
374 | that._nbconvert('script', true); | |
375 | }); |
|
375 | }); | |
376 | } |
|
376 | } | |
377 | }; |
|
377 | }; | |
378 |
|
378 | |||
379 | // Backwards compatability. |
|
379 | // Backwards compatability. | |
380 | IPython.MenuBar = MenuBar; |
|
380 | IPython.MenuBar = MenuBar; | |
381 |
|
381 | |||
382 | return {'MenuBar': MenuBar}; |
|
382 | return {'MenuBar': MenuBar}; | |
383 | }); |
|
383 | }); |
@@ -1,330 +1,330 b'' | |||||
1 | """The IPython kernel implementation""" |
|
1 | """The IPython kernel implementation""" | |
2 |
|
2 | |||
3 | import getpass |
|
3 | import getpass | |
4 | import sys |
|
4 | import sys | |
5 | import traceback |
|
5 | import traceback | |
6 |
|
6 | |||
7 | from IPython.core import release |
|
7 | from IPython.core import release | |
8 | from IPython.html.widgets import Widget |
|
8 | from IPython.html.widgets import Widget | |
9 | from IPython.utils.py3compat import builtin_mod, PY3 |
|
9 | from IPython.utils.py3compat import builtin_mod, PY3 | |
10 | from IPython.utils.tokenutil import token_at_cursor, line_at_cursor |
|
10 | from IPython.utils.tokenutil import token_at_cursor, line_at_cursor | |
11 | from IPython.utils.traitlets import Instance, Type, Any |
|
11 | from IPython.utils.traitlets import Instance, Type, Any | |
12 | from IPython.utils.decorators import undoc |
|
12 | from IPython.utils.decorators import undoc | |
13 |
|
13 | |||
14 | from ..comm import CommManager |
|
14 | from ..comm import CommManager | |
15 | from .kernelbase import Kernel as KernelBase |
|
15 | from .kernelbase import Kernel as KernelBase | |
16 | from .serialize import serialize_object, unpack_apply_message |
|
16 | from .serialize import serialize_object, unpack_apply_message | |
17 | from .zmqshell import ZMQInteractiveShell |
|
17 | from .zmqshell import ZMQInteractiveShell | |
18 |
|
18 | |||
19 | class IPythonKernel(KernelBase): |
|
19 | class IPythonKernel(KernelBase): | |
20 | shell = Instance('IPython.core.interactiveshell.InteractiveShellABC') |
|
20 | shell = Instance('IPython.core.interactiveshell.InteractiveShellABC') | |
21 | shell_class = Type(ZMQInteractiveShell) |
|
21 | shell_class = Type(ZMQInteractiveShell) | |
22 |
|
22 | |||
23 | user_module = Any() |
|
23 | user_module = Any() | |
24 | def _user_module_changed(self, name, old, new): |
|
24 | def _user_module_changed(self, name, old, new): | |
25 | if self.shell is not None: |
|
25 | if self.shell is not None: | |
26 | self.shell.user_module = new |
|
26 | self.shell.user_module = new | |
27 |
|
27 | |||
28 | user_ns = Instance(dict, args=None, allow_none=True) |
|
28 | user_ns = Instance(dict, args=None, allow_none=True) | |
29 | def _user_ns_changed(self, name, old, new): |
|
29 | def _user_ns_changed(self, name, old, new): | |
30 | if self.shell is not None: |
|
30 | if self.shell is not None: | |
31 | self.shell.user_ns = new |
|
31 | self.shell.user_ns = new | |
32 | self.shell.init_user_ns() |
|
32 | self.shell.init_user_ns() | |
33 |
|
33 | |||
34 | # A reference to the Python builtin 'raw_input' function. |
|
34 | # A reference to the Python builtin 'raw_input' function. | |
35 | # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3) |
|
35 | # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3) | |
36 | _sys_raw_input = Any() |
|
36 | _sys_raw_input = Any() | |
37 | _sys_eval_input = Any() |
|
37 | _sys_eval_input = Any() | |
38 |
|
38 | |||
39 | def __init__(self, **kwargs): |
|
39 | def __init__(self, **kwargs): | |
40 | super(IPythonKernel, self).__init__(**kwargs) |
|
40 | super(IPythonKernel, self).__init__(**kwargs) | |
41 |
|
41 | |||
42 | # Initialize the InteractiveShell subclass |
|
42 | # Initialize the InteractiveShell subclass | |
43 | self.shell = self.shell_class.instance(parent=self, |
|
43 | self.shell = self.shell_class.instance(parent=self, | |
44 | profile_dir = self.profile_dir, |
|
44 | profile_dir = self.profile_dir, | |
45 | user_module = self.user_module, |
|
45 | user_module = self.user_module, | |
46 | user_ns = self.user_ns, |
|
46 | user_ns = self.user_ns, | |
47 | kernel = self, |
|
47 | kernel = self, | |
48 | ) |
|
48 | ) | |
49 | self.shell.displayhook.session = self.session |
|
49 | self.shell.displayhook.session = self.session | |
50 | self.shell.displayhook.pub_socket = self.iopub_socket |
|
50 | self.shell.displayhook.pub_socket = self.iopub_socket | |
51 | self.shell.displayhook.topic = self._topic('execute_result') |
|
51 | self.shell.displayhook.topic = self._topic('execute_result') | |
52 | self.shell.display_pub.session = self.session |
|
52 | self.shell.display_pub.session = self.session | |
53 | self.shell.display_pub.pub_socket = self.iopub_socket |
|
53 | self.shell.display_pub.pub_socket = self.iopub_socket | |
54 | self.shell.data_pub.session = self.session |
|
54 | self.shell.data_pub.session = self.session | |
55 | self.shell.data_pub.pub_socket = self.iopub_socket |
|
55 | self.shell.data_pub.pub_socket = self.iopub_socket | |
56 |
|
56 | |||
57 | # TMP - hack while developing |
|
57 | # TMP - hack while developing | |
58 | self.shell._reply_content = None |
|
58 | self.shell._reply_content = None | |
59 |
|
59 | |||
60 | self.comm_manager = CommManager(shell=self.shell, parent=self, |
|
60 | self.comm_manager = CommManager(shell=self.shell, parent=self, | |
61 | kernel=self) |
|
61 | kernel=self) | |
62 | self.comm_manager.register_target('ipython.widget', Widget.handle_comm_opened) |
|
62 | self.comm_manager.register_target('ipython.widget', Widget.handle_comm_opened) | |
63 |
|
63 | |||
64 | self.shell.configurables.append(self.comm_manager) |
|
64 | self.shell.configurables.append(self.comm_manager) | |
65 | comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ] |
|
65 | comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ] | |
66 | for msg_type in comm_msg_types: |
|
66 | for msg_type in comm_msg_types: | |
67 | self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type) |
|
67 | self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type) | |
68 |
|
68 | |||
69 | # Kernel info fields |
|
69 | # Kernel info fields | |
70 | implementation = 'ipython' |
|
70 | implementation = 'ipython' | |
71 | implementation_version = release.version |
|
71 | implementation_version = release.version | |
72 | language = 'python' |
|
72 | language = 'python' | |
73 | language_version = sys.version.split()[0] |
|
73 | language_version = sys.version.split()[0] | |
74 | language_info = {'mimetype': 'text/x-python', |
|
74 | language_info = {'mimetype': 'text/x-python', | |
75 | 'codemirror_mode': {'name': 'ipython', |
|
75 | 'codemirror_mode': {'name': 'ipython', | |
76 | 'version': sys.version_info[0]}, |
|
76 | 'version': sys.version_info[0]}, | |
77 | 'pygments_lexer': 'ipython%d' % (3 if PY3 else 2), |
|
77 | 'pygments_lexer': 'ipython%d' % (3 if PY3 else 2), | |
78 | 'nbconvert_exporter': 'python', |
|
78 | 'nbconvert_exporter': 'python', | |
79 | 'file_extension': 'py' |
|
79 | 'file_extension': '.py' | |
80 | } |
|
80 | } | |
81 | @property |
|
81 | @property | |
82 | def banner(self): |
|
82 | def banner(self): | |
83 | return self.shell.banner |
|
83 | return self.shell.banner | |
84 |
|
84 | |||
85 | def start(self): |
|
85 | def start(self): | |
86 | self.shell.exit_now = False |
|
86 | self.shell.exit_now = False | |
87 | super(IPythonKernel, self).start() |
|
87 | super(IPythonKernel, self).start() | |
88 |
|
88 | |||
89 | def set_parent(self, ident, parent): |
|
89 | def set_parent(self, ident, parent): | |
90 | """Overridden from parent to tell the display hook and output streams |
|
90 | """Overridden from parent to tell the display hook and output streams | |
91 | about the parent message. |
|
91 | about the parent message. | |
92 | """ |
|
92 | """ | |
93 | super(IPythonKernel, self).set_parent(ident, parent) |
|
93 | super(IPythonKernel, self).set_parent(ident, parent) | |
94 | self.shell.set_parent(parent) |
|
94 | self.shell.set_parent(parent) | |
95 |
|
95 | |||
96 | def _forward_input(self, allow_stdin=False): |
|
96 | def _forward_input(self, allow_stdin=False): | |
97 | """Forward raw_input and getpass to the current frontend. |
|
97 | """Forward raw_input and getpass to the current frontend. | |
98 |
|
98 | |||
99 | via input_request |
|
99 | via input_request | |
100 | """ |
|
100 | """ | |
101 | self._allow_stdin = allow_stdin |
|
101 | self._allow_stdin = allow_stdin | |
102 |
|
102 | |||
103 | if PY3: |
|
103 | if PY3: | |
104 | self._sys_raw_input = builtin_mod.input |
|
104 | self._sys_raw_input = builtin_mod.input | |
105 | builtin_mod.input = self.raw_input |
|
105 | builtin_mod.input = self.raw_input | |
106 | else: |
|
106 | else: | |
107 | self._sys_raw_input = builtin_mod.raw_input |
|
107 | self._sys_raw_input = builtin_mod.raw_input | |
108 | self._sys_eval_input = builtin_mod.input |
|
108 | self._sys_eval_input = builtin_mod.input | |
109 | builtin_mod.raw_input = self.raw_input |
|
109 | builtin_mod.raw_input = self.raw_input | |
110 | builtin_mod.input = lambda prompt='': eval(self.raw_input(prompt)) |
|
110 | builtin_mod.input = lambda prompt='': eval(self.raw_input(prompt)) | |
111 | self._save_getpass = getpass.getpass |
|
111 | self._save_getpass = getpass.getpass | |
112 | getpass.getpass = self.getpass |
|
112 | getpass.getpass = self.getpass | |
113 |
|
113 | |||
114 | def _restore_input(self): |
|
114 | def _restore_input(self): | |
115 | """Restore raw_input, getpass""" |
|
115 | """Restore raw_input, getpass""" | |
116 | if PY3: |
|
116 | if PY3: | |
117 | builtin_mod.input = self._sys_raw_input |
|
117 | builtin_mod.input = self._sys_raw_input | |
118 | else: |
|
118 | else: | |
119 | builtin_mod.raw_input = self._sys_raw_input |
|
119 | builtin_mod.raw_input = self._sys_raw_input | |
120 | builtin_mod.input = self._sys_eval_input |
|
120 | builtin_mod.input = self._sys_eval_input | |
121 |
|
121 | |||
122 | getpass.getpass = self._save_getpass |
|
122 | getpass.getpass = self._save_getpass | |
123 |
|
123 | |||
124 | @property |
|
124 | @property | |
125 | def execution_count(self): |
|
125 | def execution_count(self): | |
126 | return self.shell.execution_count |
|
126 | return self.shell.execution_count | |
127 |
|
127 | |||
128 | @execution_count.setter |
|
128 | @execution_count.setter | |
129 | def execution_count(self, value): |
|
129 | def execution_count(self, value): | |
130 | # Ignore the incrememnting done by KernelBase, in favour of our shell's |
|
130 | # Ignore the incrememnting done by KernelBase, in favour of our shell's | |
131 | # execution counter. |
|
131 | # execution counter. | |
132 | pass |
|
132 | pass | |
133 |
|
133 | |||
134 | def do_execute(self, code, silent, store_history=True, |
|
134 | def do_execute(self, code, silent, store_history=True, | |
135 | user_expressions=None, allow_stdin=False): |
|
135 | user_expressions=None, allow_stdin=False): | |
136 | shell = self.shell # we'll need this a lot here |
|
136 | shell = self.shell # we'll need this a lot here | |
137 |
|
137 | |||
138 | self._forward_input(allow_stdin) |
|
138 | self._forward_input(allow_stdin) | |
139 |
|
139 | |||
140 | reply_content = {} |
|
140 | reply_content = {} | |
141 | # FIXME: the shell calls the exception handler itself. |
|
141 | # FIXME: the shell calls the exception handler itself. | |
142 | shell._reply_content = None |
|
142 | shell._reply_content = None | |
143 | try: |
|
143 | try: | |
144 | shell.run_cell(code, store_history=store_history, silent=silent) |
|
144 | shell.run_cell(code, store_history=store_history, silent=silent) | |
145 | except: |
|
145 | except: | |
146 | status = u'error' |
|
146 | status = u'error' | |
147 | # FIXME: this code right now isn't being used yet by default, |
|
147 | # FIXME: this code right now isn't being used yet by default, | |
148 | # because the run_cell() call above directly fires off exception |
|
148 | # because the run_cell() call above directly fires off exception | |
149 | # reporting. This code, therefore, is only active in the scenario |
|
149 | # reporting. This code, therefore, is only active in the scenario | |
150 | # where runlines itself has an unhandled exception. We need to |
|
150 | # where runlines itself has an unhandled exception. We need to | |
151 | # uniformize this, for all exception construction to come from a |
|
151 | # uniformize this, for all exception construction to come from a | |
152 | # single location in the codbase. |
|
152 | # single location in the codbase. | |
153 | etype, evalue, tb = sys.exc_info() |
|
153 | etype, evalue, tb = sys.exc_info() | |
154 | tb_list = traceback.format_exception(etype, evalue, tb) |
|
154 | tb_list = traceback.format_exception(etype, evalue, tb) | |
155 | reply_content.update(shell._showtraceback(etype, evalue, tb_list)) |
|
155 | reply_content.update(shell._showtraceback(etype, evalue, tb_list)) | |
156 | else: |
|
156 | else: | |
157 | status = u'ok' |
|
157 | status = u'ok' | |
158 | finally: |
|
158 | finally: | |
159 | self._restore_input() |
|
159 | self._restore_input() | |
160 |
|
160 | |||
161 | reply_content[u'status'] = status |
|
161 | reply_content[u'status'] = status | |
162 |
|
162 | |||
163 | # Return the execution counter so clients can display prompts |
|
163 | # Return the execution counter so clients can display prompts | |
164 | reply_content['execution_count'] = shell.execution_count - 1 |
|
164 | reply_content['execution_count'] = shell.execution_count - 1 | |
165 |
|
165 | |||
166 | # FIXME - fish exception info out of shell, possibly left there by |
|
166 | # FIXME - fish exception info out of shell, possibly left there by | |
167 | # runlines. We'll need to clean up this logic later. |
|
167 | # runlines. We'll need to clean up this logic later. | |
168 | if shell._reply_content is not None: |
|
168 | if shell._reply_content is not None: | |
169 | reply_content.update(shell._reply_content) |
|
169 | reply_content.update(shell._reply_content) | |
170 | e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute') |
|
170 | e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute') | |
171 | reply_content['engine_info'] = e_info |
|
171 | reply_content['engine_info'] = e_info | |
172 | # reset after use |
|
172 | # reset after use | |
173 | shell._reply_content = None |
|
173 | shell._reply_content = None | |
174 |
|
174 | |||
175 | if 'traceback' in reply_content: |
|
175 | if 'traceback' in reply_content: | |
176 | self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback'])) |
|
176 | self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback'])) | |
177 |
|
177 | |||
178 |
|
178 | |||
179 | # At this point, we can tell whether the main code execution succeeded |
|
179 | # At this point, we can tell whether the main code execution succeeded | |
180 | # or not. If it did, we proceed to evaluate user_expressions |
|
180 | # or not. If it did, we proceed to evaluate user_expressions | |
181 | if reply_content['status'] == 'ok': |
|
181 | if reply_content['status'] == 'ok': | |
182 | reply_content[u'user_expressions'] = \ |
|
182 | reply_content[u'user_expressions'] = \ | |
183 | shell.user_expressions(user_expressions or {}) |
|
183 | shell.user_expressions(user_expressions or {}) | |
184 | else: |
|
184 | else: | |
185 | # If there was an error, don't even try to compute expressions |
|
185 | # If there was an error, don't even try to compute expressions | |
186 | reply_content[u'user_expressions'] = {} |
|
186 | reply_content[u'user_expressions'] = {} | |
187 |
|
187 | |||
188 | # Payloads should be retrieved regardless of outcome, so we can both |
|
188 | # Payloads should be retrieved regardless of outcome, so we can both | |
189 | # recover partial output (that could have been generated early in a |
|
189 | # recover partial output (that could have been generated early in a | |
190 | # block, before an error) and clear the payload system always. |
|
190 | # block, before an error) and clear the payload system always. | |
191 | reply_content[u'payload'] = shell.payload_manager.read_payload() |
|
191 | reply_content[u'payload'] = shell.payload_manager.read_payload() | |
192 | # Be agressive about clearing the payload because we don't want |
|
192 | # Be agressive about clearing the payload because we don't want | |
193 | # it to sit in memory until the next execute_request comes in. |
|
193 | # it to sit in memory until the next execute_request comes in. | |
194 | shell.payload_manager.clear_payload() |
|
194 | shell.payload_manager.clear_payload() | |
195 |
|
195 | |||
196 | return reply_content |
|
196 | return reply_content | |
197 |
|
197 | |||
198 | def do_complete(self, code, cursor_pos): |
|
198 | def do_complete(self, code, cursor_pos): | |
199 | # FIXME: IPython completers currently assume single line, |
|
199 | # FIXME: IPython completers currently assume single line, | |
200 | # but completion messages give multi-line context |
|
200 | # but completion messages give multi-line context | |
201 | # For now, extract line from cell, based on cursor_pos: |
|
201 | # For now, extract line from cell, based on cursor_pos: | |
202 | if cursor_pos is None: |
|
202 | if cursor_pos is None: | |
203 | cursor_pos = len(code) |
|
203 | cursor_pos = len(code) | |
204 | line, offset = line_at_cursor(code, cursor_pos) |
|
204 | line, offset = line_at_cursor(code, cursor_pos) | |
205 | line_cursor = cursor_pos - offset |
|
205 | line_cursor = cursor_pos - offset | |
206 |
|
206 | |||
207 | txt, matches = self.shell.complete('', line, line_cursor) |
|
207 | txt, matches = self.shell.complete('', line, line_cursor) | |
208 | return {'matches' : matches, |
|
208 | return {'matches' : matches, | |
209 | 'cursor_end' : cursor_pos, |
|
209 | 'cursor_end' : cursor_pos, | |
210 | 'cursor_start' : cursor_pos - len(txt), |
|
210 | 'cursor_start' : cursor_pos - len(txt), | |
211 | 'metadata' : {}, |
|
211 | 'metadata' : {}, | |
212 | 'status' : 'ok'} |
|
212 | 'status' : 'ok'} | |
213 |
|
213 | |||
214 | def do_inspect(self, code, cursor_pos, detail_level=0): |
|
214 | def do_inspect(self, code, cursor_pos, detail_level=0): | |
215 | name = token_at_cursor(code, cursor_pos) |
|
215 | name = token_at_cursor(code, cursor_pos) | |
216 | info = self.shell.object_inspect(name) |
|
216 | info = self.shell.object_inspect(name) | |
217 |
|
217 | |||
218 | reply_content = {'status' : 'ok'} |
|
218 | reply_content = {'status' : 'ok'} | |
219 | reply_content['data'] = data = {} |
|
219 | reply_content['data'] = data = {} | |
220 | reply_content['metadata'] = {} |
|
220 | reply_content['metadata'] = {} | |
221 | reply_content['found'] = info['found'] |
|
221 | reply_content['found'] = info['found'] | |
222 | if info['found']: |
|
222 | if info['found']: | |
223 | info_text = self.shell.object_inspect_text( |
|
223 | info_text = self.shell.object_inspect_text( | |
224 | name, |
|
224 | name, | |
225 | detail_level=detail_level, |
|
225 | detail_level=detail_level, | |
226 | ) |
|
226 | ) | |
227 | data['text/plain'] = info_text |
|
227 | data['text/plain'] = info_text | |
228 |
|
228 | |||
229 | return reply_content |
|
229 | return reply_content | |
230 |
|
230 | |||
231 | def do_history(self, hist_access_type, output, raw, session=None, start=None, |
|
231 | def do_history(self, hist_access_type, output, raw, session=None, start=None, | |
232 | stop=None, n=None, pattern=None, unique=False): |
|
232 | stop=None, n=None, pattern=None, unique=False): | |
233 | if hist_access_type == 'tail': |
|
233 | if hist_access_type == 'tail': | |
234 | hist = self.shell.history_manager.get_tail(n, raw=raw, output=output, |
|
234 | hist = self.shell.history_manager.get_tail(n, raw=raw, output=output, | |
235 | include_latest=True) |
|
235 | include_latest=True) | |
236 |
|
236 | |||
237 | elif hist_access_type == 'range': |
|
237 | elif hist_access_type == 'range': | |
238 | hist = self.shell.history_manager.get_range(session, start, stop, |
|
238 | hist = self.shell.history_manager.get_range(session, start, stop, | |
239 | raw=raw, output=output) |
|
239 | raw=raw, output=output) | |
240 |
|
240 | |||
241 | elif hist_access_type == 'search': |
|
241 | elif hist_access_type == 'search': | |
242 | hist = self.shell.history_manager.search( |
|
242 | hist = self.shell.history_manager.search( | |
243 | pattern, raw=raw, output=output, n=n, unique=unique) |
|
243 | pattern, raw=raw, output=output, n=n, unique=unique) | |
244 | else: |
|
244 | else: | |
245 | hist = [] |
|
245 | hist = [] | |
246 |
|
246 | |||
247 | return {'history' : list(hist)} |
|
247 | return {'history' : list(hist)} | |
248 |
|
248 | |||
249 | def do_shutdown(self, restart): |
|
249 | def do_shutdown(self, restart): | |
250 | self.shell.exit_now = True |
|
250 | self.shell.exit_now = True | |
251 | return dict(status='ok', restart=restart) |
|
251 | return dict(status='ok', restart=restart) | |
252 |
|
252 | |||
253 | def do_is_complete(self, code): |
|
253 | def do_is_complete(self, code): | |
254 | status, indent_spaces = self.shell.input_transformer_manager.check_complete(code) |
|
254 | status, indent_spaces = self.shell.input_transformer_manager.check_complete(code) | |
255 | r = {'status': status} |
|
255 | r = {'status': status} | |
256 | if status == 'incomplete': |
|
256 | if status == 'incomplete': | |
257 | r['indent'] = ' ' * indent_spaces |
|
257 | r['indent'] = ' ' * indent_spaces | |
258 | return r |
|
258 | return r | |
259 |
|
259 | |||
260 | def do_apply(self, content, bufs, msg_id, reply_metadata): |
|
260 | def do_apply(self, content, bufs, msg_id, reply_metadata): | |
261 | shell = self.shell |
|
261 | shell = self.shell | |
262 | try: |
|
262 | try: | |
263 | working = shell.user_ns |
|
263 | working = shell.user_ns | |
264 |
|
264 | |||
265 | prefix = "_"+str(msg_id).replace("-","")+"_" |
|
265 | prefix = "_"+str(msg_id).replace("-","")+"_" | |
266 |
|
266 | |||
267 | f,args,kwargs = unpack_apply_message(bufs, working, copy=False) |
|
267 | f,args,kwargs = unpack_apply_message(bufs, working, copy=False) | |
268 |
|
268 | |||
269 | fname = getattr(f, '__name__', 'f') |
|
269 | fname = getattr(f, '__name__', 'f') | |
270 |
|
270 | |||
271 | fname = prefix+"f" |
|
271 | fname = prefix+"f" | |
272 | argname = prefix+"args" |
|
272 | argname = prefix+"args" | |
273 | kwargname = prefix+"kwargs" |
|
273 | kwargname = prefix+"kwargs" | |
274 | resultname = prefix+"result" |
|
274 | resultname = prefix+"result" | |
275 |
|
275 | |||
276 | ns = { fname : f, argname : args, kwargname : kwargs , resultname : None } |
|
276 | ns = { fname : f, argname : args, kwargname : kwargs , resultname : None } | |
277 | # print ns |
|
277 | # print ns | |
278 | working.update(ns) |
|
278 | working.update(ns) | |
279 | code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname) |
|
279 | code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname) | |
280 | try: |
|
280 | try: | |
281 | exec(code, shell.user_global_ns, shell.user_ns) |
|
281 | exec(code, shell.user_global_ns, shell.user_ns) | |
282 | result = working.get(resultname) |
|
282 | result = working.get(resultname) | |
283 | finally: |
|
283 | finally: | |
284 | for key in ns: |
|
284 | for key in ns: | |
285 | working.pop(key) |
|
285 | working.pop(key) | |
286 |
|
286 | |||
287 | result_buf = serialize_object(result, |
|
287 | result_buf = serialize_object(result, | |
288 | buffer_threshold=self.session.buffer_threshold, |
|
288 | buffer_threshold=self.session.buffer_threshold, | |
289 | item_threshold=self.session.item_threshold, |
|
289 | item_threshold=self.session.item_threshold, | |
290 | ) |
|
290 | ) | |
291 |
|
291 | |||
292 | except: |
|
292 | except: | |
293 | # invoke IPython traceback formatting |
|
293 | # invoke IPython traceback formatting | |
294 | shell.showtraceback() |
|
294 | shell.showtraceback() | |
295 | # FIXME - fish exception info out of shell, possibly left there by |
|
295 | # FIXME - fish exception info out of shell, possibly left there by | |
296 | # run_code. We'll need to clean up this logic later. |
|
296 | # run_code. We'll need to clean up this logic later. | |
297 | reply_content = {} |
|
297 | reply_content = {} | |
298 | if shell._reply_content is not None: |
|
298 | if shell._reply_content is not None: | |
299 | reply_content.update(shell._reply_content) |
|
299 | reply_content.update(shell._reply_content) | |
300 | e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply') |
|
300 | e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply') | |
301 | reply_content['engine_info'] = e_info |
|
301 | reply_content['engine_info'] = e_info | |
302 | # reset after use |
|
302 | # reset after use | |
303 | shell._reply_content = None |
|
303 | shell._reply_content = None | |
304 |
|
304 | |||
305 | self.send_response(self.iopub_socket, u'error', reply_content, |
|
305 | self.send_response(self.iopub_socket, u'error', reply_content, | |
306 | ident=self._topic('error')) |
|
306 | ident=self._topic('error')) | |
307 | self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback'])) |
|
307 | self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback'])) | |
308 | result_buf = [] |
|
308 | result_buf = [] | |
309 |
|
309 | |||
310 | if reply_content['ename'] == 'UnmetDependency': |
|
310 | if reply_content['ename'] == 'UnmetDependency': | |
311 | reply_metadata['dependencies_met'] = False |
|
311 | reply_metadata['dependencies_met'] = False | |
312 | else: |
|
312 | else: | |
313 | reply_content = {'status' : 'ok'} |
|
313 | reply_content = {'status' : 'ok'} | |
314 |
|
314 | |||
315 | return reply_content, result_buf |
|
315 | return reply_content, result_buf | |
316 |
|
316 | |||
317 | def do_clear(self): |
|
317 | def do_clear(self): | |
318 | self.shell.reset(False) |
|
318 | self.shell.reset(False) | |
319 | return dict(status='ok') |
|
319 | return dict(status='ok') | |
320 |
|
320 | |||
321 |
|
321 | |||
322 | # This exists only for backwards compatibility - use IPythonKernel instead |
|
322 | # This exists only for backwards compatibility - use IPythonKernel instead | |
323 |
|
323 | |||
324 | @undoc |
|
324 | @undoc | |
325 | class Kernel(IPythonKernel): |
|
325 | class Kernel(IPythonKernel): | |
326 | def __init__(self, *args, **kwargs): |
|
326 | def __init__(self, *args, **kwargs): | |
327 | import warnings |
|
327 | import warnings | |
328 | warnings.warn('Kernel is a deprecated alias of IPython.kernel.zmq.ipkernel.IPythonKernel', |
|
328 | warnings.warn('Kernel is a deprecated alias of IPython.kernel.zmq.ipkernel.IPythonKernel', | |
329 | DeprecationWarning) |
|
329 | DeprecationWarning) | |
330 | super(Kernel, self).__init__(*args, **kwargs) No newline at end of file |
|
330 | super(Kernel, self).__init__(*args, **kwargs) |
@@ -1,259 +1,259 b'' | |||||
1 | """This module defines a base Exporter class. For Jinja template-based export, |
|
1 | """This module defines a base Exporter class. For Jinja template-based export, | |
2 | see templateexporter.py. |
|
2 | see templateexporter.py. | |
3 | """ |
|
3 | """ | |
4 |
|
4 | |||
5 |
|
5 | |||
6 | from __future__ import print_function, absolute_import |
|
6 | from __future__ import print_function, absolute_import | |
7 |
|
7 | |||
8 | import io |
|
8 | import io | |
9 | import os |
|
9 | import os | |
10 | import copy |
|
10 | import copy | |
11 | import collections |
|
11 | import collections | |
12 | import datetime |
|
12 | import datetime | |
13 |
|
13 | |||
14 | from IPython.config.configurable import LoggingConfigurable |
|
14 | from IPython.config.configurable import LoggingConfigurable | |
15 | from IPython.config import Config |
|
15 | from IPython.config import Config | |
16 | from IPython import nbformat |
|
16 | from IPython import nbformat | |
17 | from IPython.utils.traitlets import MetaHasTraits, Unicode, List |
|
17 | from IPython.utils.traitlets import MetaHasTraits, Unicode, List | |
18 | from IPython.utils.importstring import import_item |
|
18 | from IPython.utils.importstring import import_item | |
19 | from IPython.utils import text, py3compat |
|
19 | from IPython.utils import text, py3compat | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | class ResourcesDict(collections.defaultdict): |
|
22 | class ResourcesDict(collections.defaultdict): | |
23 | def __missing__(self, key): |
|
23 | def __missing__(self, key): | |
24 | return '' |
|
24 | return '' | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | class Exporter(LoggingConfigurable): |
|
27 | class Exporter(LoggingConfigurable): | |
28 | """ |
|
28 | """ | |
29 | Class containing methods that sequentially run a list of preprocessors on a |
|
29 | Class containing methods that sequentially run a list of preprocessors on a | |
30 | NotebookNode object and then return the modified NotebookNode object and |
|
30 | NotebookNode object and then return the modified NotebookNode object and | |
31 | accompanying resources dict. |
|
31 | accompanying resources dict. | |
32 | """ |
|
32 | """ | |
33 |
|
33 | |||
34 | file_extension = Unicode( |
|
34 | file_extension = Unicode( | |
35 | 'txt', config=True, |
|
35 | '.txt', config=True, | |
36 | help="Extension of the file that should be written to disk" |
|
36 | help="Extension of the file that should be written to disk" | |
37 | ) |
|
37 | ) | |
38 |
|
38 | |||
39 | # MIME type of the result file, for HTTP response headers. |
|
39 | # MIME type of the result file, for HTTP response headers. | |
40 | # This is *not* a traitlet, because we want to be able to access it from |
|
40 | # This is *not* a traitlet, because we want to be able to access it from | |
41 | # the class, not just on instances. |
|
41 | # the class, not just on instances. | |
42 | output_mimetype = '' |
|
42 | output_mimetype = '' | |
43 |
|
43 | |||
44 | #Configurability, allows the user to easily add filters and preprocessors. |
|
44 | #Configurability, allows the user to easily add filters and preprocessors. | |
45 | preprocessors = List(config=True, |
|
45 | preprocessors = List(config=True, | |
46 | help="""List of preprocessors, by name or namespace, to enable.""") |
|
46 | help="""List of preprocessors, by name or namespace, to enable.""") | |
47 |
|
47 | |||
48 | _preprocessors = List() |
|
48 | _preprocessors = List() | |
49 |
|
49 | |||
50 | default_preprocessors = List(['IPython.nbconvert.preprocessors.coalesce_streams', |
|
50 | default_preprocessors = List(['IPython.nbconvert.preprocessors.coalesce_streams', | |
51 | 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', |
|
51 | 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', | |
52 | 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', |
|
52 | 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', | |
53 | 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', |
|
53 | 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', | |
54 | 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', |
|
54 | 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', | |
55 | 'IPython.nbconvert.preprocessors.LatexPreprocessor', |
|
55 | 'IPython.nbconvert.preprocessors.LatexPreprocessor', | |
56 | 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', |
|
56 | 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', | |
57 | 'IPython.nbconvert.preprocessors.ExecutePreprocessor', |
|
57 | 'IPython.nbconvert.preprocessors.ExecutePreprocessor', | |
58 | 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'], |
|
58 | 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'], | |
59 | config=True, |
|
59 | config=True, | |
60 | help="""List of preprocessors available by default, by name, namespace, |
|
60 | help="""List of preprocessors available by default, by name, namespace, | |
61 | instance, or type.""") |
|
61 | instance, or type.""") | |
62 |
|
62 | |||
63 |
|
63 | |||
64 | def __init__(self, config=None, **kw): |
|
64 | def __init__(self, config=None, **kw): | |
65 | """ |
|
65 | """ | |
66 | Public constructor |
|
66 | Public constructor | |
67 |
|
67 | |||
68 | Parameters |
|
68 | Parameters | |
69 | ---------- |
|
69 | ---------- | |
70 | config : config |
|
70 | config : config | |
71 | User configuration instance. |
|
71 | User configuration instance. | |
72 | """ |
|
72 | """ | |
73 | with_default_config = self.default_config |
|
73 | with_default_config = self.default_config | |
74 | if config: |
|
74 | if config: | |
75 | with_default_config.merge(config) |
|
75 | with_default_config.merge(config) | |
76 |
|
76 | |||
77 | super(Exporter, self).__init__(config=with_default_config, **kw) |
|
77 | super(Exporter, self).__init__(config=with_default_config, **kw) | |
78 |
|
78 | |||
79 | self._init_preprocessors() |
|
79 | self._init_preprocessors() | |
80 |
|
80 | |||
81 |
|
81 | |||
82 | @property |
|
82 | @property | |
83 | def default_config(self): |
|
83 | def default_config(self): | |
84 | return Config() |
|
84 | return Config() | |
85 |
|
85 | |||
86 | def from_notebook_node(self, nb, resources=None, **kw): |
|
86 | def from_notebook_node(self, nb, resources=None, **kw): | |
87 | """ |
|
87 | """ | |
88 | Convert a notebook from a notebook node instance. |
|
88 | Convert a notebook from a notebook node instance. | |
89 |
|
89 | |||
90 | Parameters |
|
90 | Parameters | |
91 | ---------- |
|
91 | ---------- | |
92 | nb : :class:`~IPython.nbformat.NotebookNode` |
|
92 | nb : :class:`~IPython.nbformat.NotebookNode` | |
93 | Notebook node (dict-like with attr-access) |
|
93 | Notebook node (dict-like with attr-access) | |
94 | resources : dict |
|
94 | resources : dict | |
95 | Additional resources that can be accessed read/write by |
|
95 | Additional resources that can be accessed read/write by | |
96 | preprocessors and filters. |
|
96 | preprocessors and filters. | |
97 | **kw |
|
97 | **kw | |
98 | Ignored (?) |
|
98 | Ignored (?) | |
99 | """ |
|
99 | """ | |
100 | nb_copy = copy.deepcopy(nb) |
|
100 | nb_copy = copy.deepcopy(nb) | |
101 | resources = self._init_resources(resources) |
|
101 | resources = self._init_resources(resources) | |
102 |
|
102 | |||
103 | if 'language' in nb['metadata']: |
|
103 | if 'language' in nb['metadata']: | |
104 | resources['language'] = nb['metadata']['language'].lower() |
|
104 | resources['language'] = nb['metadata']['language'].lower() | |
105 |
|
105 | |||
106 | # Preprocess |
|
106 | # Preprocess | |
107 | nb_copy, resources = self._preprocess(nb_copy, resources) |
|
107 | nb_copy, resources = self._preprocess(nb_copy, resources) | |
108 |
|
108 | |||
109 | return nb_copy, resources |
|
109 | return nb_copy, resources | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | def from_filename(self, filename, resources=None, **kw): |
|
112 | def from_filename(self, filename, resources=None, **kw): | |
113 | """ |
|
113 | """ | |
114 | Convert a notebook from a notebook file. |
|
114 | Convert a notebook from a notebook file. | |
115 |
|
115 | |||
116 | Parameters |
|
116 | Parameters | |
117 | ---------- |
|
117 | ---------- | |
118 | filename : str |
|
118 | filename : str | |
119 | Full filename of the notebook file to open and convert. |
|
119 | Full filename of the notebook file to open and convert. | |
120 | """ |
|
120 | """ | |
121 |
|
121 | |||
122 | # Pull the metadata from the filesystem. |
|
122 | # Pull the metadata from the filesystem. | |
123 | if resources is None: |
|
123 | if resources is None: | |
124 | resources = ResourcesDict() |
|
124 | resources = ResourcesDict() | |
125 | if not 'metadata' in resources or resources['metadata'] == '': |
|
125 | if not 'metadata' in resources or resources['metadata'] == '': | |
126 | resources['metadata'] = ResourcesDict() |
|
126 | resources['metadata'] = ResourcesDict() | |
127 | basename = os.path.basename(filename) |
|
127 | basename = os.path.basename(filename) | |
128 | notebook_name = basename[:basename.rfind('.')] |
|
128 | notebook_name = basename[:basename.rfind('.')] | |
129 | resources['metadata']['name'] = notebook_name |
|
129 | resources['metadata']['name'] = notebook_name | |
130 |
|
130 | |||
131 | modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(filename)) |
|
131 | modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(filename)) | |
132 | resources['metadata']['modified_date'] = modified_date.strftime(text.date_format) |
|
132 | resources['metadata']['modified_date'] = modified_date.strftime(text.date_format) | |
133 |
|
133 | |||
134 | with io.open(filename, encoding='utf-8') as f: |
|
134 | with io.open(filename, encoding='utf-8') as f: | |
135 | return self.from_notebook_node(nbformat.read(f, as_version=4), resources=resources, **kw) |
|
135 | return self.from_notebook_node(nbformat.read(f, as_version=4), resources=resources, **kw) | |
136 |
|
136 | |||
137 |
|
137 | |||
138 | def from_file(self, file_stream, resources=None, **kw): |
|
138 | def from_file(self, file_stream, resources=None, **kw): | |
139 | """ |
|
139 | """ | |
140 | Convert a notebook from a notebook file. |
|
140 | Convert a notebook from a notebook file. | |
141 |
|
141 | |||
142 | Parameters |
|
142 | Parameters | |
143 | ---------- |
|
143 | ---------- | |
144 | file_stream : file-like object |
|
144 | file_stream : file-like object | |
145 | Notebook file-like object to convert. |
|
145 | Notebook file-like object to convert. | |
146 | """ |
|
146 | """ | |
147 | return self.from_notebook_node(nbformat.read(file_stream, as_version=4), resources=resources, **kw) |
|
147 | return self.from_notebook_node(nbformat.read(file_stream, as_version=4), resources=resources, **kw) | |
148 |
|
148 | |||
149 |
|
149 | |||
150 | def register_preprocessor(self, preprocessor, enabled=False): |
|
150 | def register_preprocessor(self, preprocessor, enabled=False): | |
151 | """ |
|
151 | """ | |
152 | Register a preprocessor. |
|
152 | Register a preprocessor. | |
153 | Preprocessors are classes that act upon the notebook before it is |
|
153 | Preprocessors are classes that act upon the notebook before it is | |
154 | passed into the Jinja templating engine. preprocessors are also |
|
154 | passed into the Jinja templating engine. preprocessors are also | |
155 | capable of passing additional information to the Jinja |
|
155 | capable of passing additional information to the Jinja | |
156 | templating engine. |
|
156 | templating engine. | |
157 |
|
157 | |||
158 | Parameters |
|
158 | Parameters | |
159 | ---------- |
|
159 | ---------- | |
160 | preprocessor : preprocessor |
|
160 | preprocessor : preprocessor | |
161 | """ |
|
161 | """ | |
162 | if preprocessor is None: |
|
162 | if preprocessor is None: | |
163 | raise TypeError('preprocessor') |
|
163 | raise TypeError('preprocessor') | |
164 | isclass = isinstance(preprocessor, type) |
|
164 | isclass = isinstance(preprocessor, type) | |
165 | constructed = not isclass |
|
165 | constructed = not isclass | |
166 |
|
166 | |||
167 | # Handle preprocessor's registration based on it's type |
|
167 | # Handle preprocessor's registration based on it's type | |
168 | if constructed and isinstance(preprocessor, py3compat.string_types): |
|
168 | if constructed and isinstance(preprocessor, py3compat.string_types): | |
169 | # Preprocessor is a string, import the namespace and recursively call |
|
169 | # Preprocessor is a string, import the namespace and recursively call | |
170 | # this register_preprocessor method |
|
170 | # this register_preprocessor method | |
171 | preprocessor_cls = import_item(preprocessor) |
|
171 | preprocessor_cls = import_item(preprocessor) | |
172 | return self.register_preprocessor(preprocessor_cls, enabled) |
|
172 | return self.register_preprocessor(preprocessor_cls, enabled) | |
173 |
|
173 | |||
174 | if constructed and hasattr(preprocessor, '__call__'): |
|
174 | if constructed and hasattr(preprocessor, '__call__'): | |
175 | # Preprocessor is a function, no need to construct it. |
|
175 | # Preprocessor is a function, no need to construct it. | |
176 | # Register and return the preprocessor. |
|
176 | # Register and return the preprocessor. | |
177 | if enabled: |
|
177 | if enabled: | |
178 | preprocessor.enabled = True |
|
178 | preprocessor.enabled = True | |
179 | self._preprocessors.append(preprocessor) |
|
179 | self._preprocessors.append(preprocessor) | |
180 | return preprocessor |
|
180 | return preprocessor | |
181 |
|
181 | |||
182 | elif isclass and isinstance(preprocessor, MetaHasTraits): |
|
182 | elif isclass and isinstance(preprocessor, MetaHasTraits): | |
183 | # Preprocessor is configurable. Make sure to pass in new default for |
|
183 | # Preprocessor is configurable. Make sure to pass in new default for | |
184 | # the enabled flag if one was specified. |
|
184 | # the enabled flag if one was specified. | |
185 | self.register_preprocessor(preprocessor(parent=self), enabled) |
|
185 | self.register_preprocessor(preprocessor(parent=self), enabled) | |
186 |
|
186 | |||
187 | elif isclass: |
|
187 | elif isclass: | |
188 | # Preprocessor is not configurable, construct it |
|
188 | # Preprocessor is not configurable, construct it | |
189 | self.register_preprocessor(preprocessor(), enabled) |
|
189 | self.register_preprocessor(preprocessor(), enabled) | |
190 |
|
190 | |||
191 | else: |
|
191 | else: | |
192 | # Preprocessor is an instance of something without a __call__ |
|
192 | # Preprocessor is an instance of something without a __call__ | |
193 | # attribute. |
|
193 | # attribute. | |
194 | raise TypeError('preprocessor') |
|
194 | raise TypeError('preprocessor') | |
195 |
|
195 | |||
196 |
|
196 | |||
197 | def _init_preprocessors(self): |
|
197 | def _init_preprocessors(self): | |
198 | """ |
|
198 | """ | |
199 | Register all of the preprocessors needed for this exporter, disabled |
|
199 | Register all of the preprocessors needed for this exporter, disabled | |
200 | unless specified explicitly. |
|
200 | unless specified explicitly. | |
201 | """ |
|
201 | """ | |
202 | self._preprocessors = [] |
|
202 | self._preprocessors = [] | |
203 |
|
203 | |||
204 | # Load default preprocessors (not necessarly enabled by default). |
|
204 | # Load default preprocessors (not necessarly enabled by default). | |
205 | for preprocessor in self.default_preprocessors: |
|
205 | for preprocessor in self.default_preprocessors: | |
206 | self.register_preprocessor(preprocessor) |
|
206 | self.register_preprocessor(preprocessor) | |
207 |
|
207 | |||
208 | # Load user-specified preprocessors. Enable by default. |
|
208 | # Load user-specified preprocessors. Enable by default. | |
209 | for preprocessor in self.preprocessors: |
|
209 | for preprocessor in self.preprocessors: | |
210 | self.register_preprocessor(preprocessor, enabled=True) |
|
210 | self.register_preprocessor(preprocessor, enabled=True) | |
211 |
|
211 | |||
212 |
|
212 | |||
213 | def _init_resources(self, resources): |
|
213 | def _init_resources(self, resources): | |
214 |
|
214 | |||
215 | #Make sure the resources dict is of ResourcesDict type. |
|
215 | #Make sure the resources dict is of ResourcesDict type. | |
216 | if resources is None: |
|
216 | if resources is None: | |
217 | resources = ResourcesDict() |
|
217 | resources = ResourcesDict() | |
218 | if not isinstance(resources, ResourcesDict): |
|
218 | if not isinstance(resources, ResourcesDict): | |
219 | new_resources = ResourcesDict() |
|
219 | new_resources = ResourcesDict() | |
220 | new_resources.update(resources) |
|
220 | new_resources.update(resources) | |
221 | resources = new_resources |
|
221 | resources = new_resources | |
222 |
|
222 | |||
223 | #Make sure the metadata extension exists in resources |
|
223 | #Make sure the metadata extension exists in resources | |
224 | if 'metadata' in resources: |
|
224 | if 'metadata' in resources: | |
225 | if not isinstance(resources['metadata'], ResourcesDict): |
|
225 | if not isinstance(resources['metadata'], ResourcesDict): | |
226 | resources['metadata'] = ResourcesDict(resources['metadata']) |
|
226 | resources['metadata'] = ResourcesDict(resources['metadata']) | |
227 | else: |
|
227 | else: | |
228 | resources['metadata'] = ResourcesDict() |
|
228 | resources['metadata'] = ResourcesDict() | |
229 | if not resources['metadata']['name']: |
|
229 | if not resources['metadata']['name']: | |
230 | resources['metadata']['name'] = 'Notebook' |
|
230 | resources['metadata']['name'] = 'Notebook' | |
231 |
|
231 | |||
232 | #Set the output extension |
|
232 | #Set the output extension | |
233 | resources['output_extension'] = self.file_extension |
|
233 | resources['output_extension'] = self.file_extension | |
234 | return resources |
|
234 | return resources | |
235 |
|
235 | |||
236 |
|
236 | |||
237 | def _preprocess(self, nb, resources): |
|
237 | def _preprocess(self, nb, resources): | |
238 | """ |
|
238 | """ | |
239 | Preprocess the notebook before passing it into the Jinja engine. |
|
239 | Preprocess the notebook before passing it into the Jinja engine. | |
240 | To preprocess the notebook is to apply all of the |
|
240 | To preprocess the notebook is to apply all of the | |
241 |
|
241 | |||
242 | Parameters |
|
242 | Parameters | |
243 | ---------- |
|
243 | ---------- | |
244 | nb : notebook node |
|
244 | nb : notebook node | |
245 | notebook that is being exported. |
|
245 | notebook that is being exported. | |
246 | resources : a dict of additional resources that |
|
246 | resources : a dict of additional resources that | |
247 | can be accessed read/write by preprocessors |
|
247 | can be accessed read/write by preprocessors | |
248 | """ |
|
248 | """ | |
249 |
|
249 | |||
250 | # Do a copy.deepcopy first, |
|
250 | # Do a copy.deepcopy first, | |
251 | # we are never safe enough with what the preprocessors could do. |
|
251 | # we are never safe enough with what the preprocessors could do. | |
252 | nbc = copy.deepcopy(nb) |
|
252 | nbc = copy.deepcopy(nb) | |
253 | resc = copy.deepcopy(resources) |
|
253 | resc = copy.deepcopy(resources) | |
254 |
|
254 | |||
255 | #Run each preprocessor on the notebook. Carry the output along |
|
255 | #Run each preprocessor on the notebook. Carry the output along | |
256 | #to each preprocessor |
|
256 | #to each preprocessor | |
257 | for preprocessor in self._preprocessors: |
|
257 | for preprocessor in self._preprocessors: | |
258 | nbc, resc = preprocessor(nbc, resc) |
|
258 | nbc, resc = preprocessor(nbc, resc) | |
259 | return nbc, resc |
|
259 | return nbc, resc |
@@ -1,66 +1,66 b'' | |||||
1 | """HTML Exporter class""" |
|
1 | """HTML Exporter class""" | |
2 |
|
2 | |||
3 | #----------------------------------------------------------------------------- |
|
3 | #----------------------------------------------------------------------------- | |
4 | # Copyright (c) 2013, the IPython Development Team. |
|
4 | # Copyright (c) 2013, the IPython Development Team. | |
5 | # |
|
5 | # | |
6 | # Distributed under the terms of the Modified BSD License. |
|
6 | # Distributed under the terms of the Modified BSD License. | |
7 | # |
|
7 | # | |
8 | # The full license is in the file COPYING.txt, distributed with this software. |
|
8 | # The full license is in the file COPYING.txt, distributed with this software. | |
9 | #----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
10 |
|
10 | |||
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 | # Imports |
|
12 | # Imports | |
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | import os |
|
15 | import os | |
16 |
|
16 | |||
17 | from IPython.nbconvert.filters.highlight import Highlight2HTML |
|
17 | from IPython.nbconvert.filters.highlight import Highlight2HTML | |
18 | from IPython.config import Config |
|
18 | from IPython.config import Config | |
19 |
|
19 | |||
20 | from .templateexporter import TemplateExporter |
|
20 | from .templateexporter import TemplateExporter | |
21 |
|
21 | |||
22 | #----------------------------------------------------------------------------- |
|
22 | #----------------------------------------------------------------------------- | |
23 | # Classes |
|
23 | # Classes | |
24 | #----------------------------------------------------------------------------- |
|
24 | #----------------------------------------------------------------------------- | |
25 |
|
25 | |||
26 | class HTMLExporter(TemplateExporter): |
|
26 | class HTMLExporter(TemplateExporter): | |
27 | """ |
|
27 | """ | |
28 | Exports a basic HTML document. This exporter assists with the export of |
|
28 | Exports a basic HTML document. This exporter assists with the export of | |
29 | HTML. Inherit from it if you are writing your own HTML template and need |
|
29 | HTML. Inherit from it if you are writing your own HTML template and need | |
30 | custom preprocessors/filters. If you don't need custom preprocessors/ |
|
30 | custom preprocessors/filters. If you don't need custom preprocessors/ | |
31 | filters, just change the 'template_file' config option. |
|
31 | filters, just change the 'template_file' config option. | |
32 | """ |
|
32 | """ | |
33 |
|
33 | |||
34 | def _file_extension_default(self): |
|
34 | def _file_extension_default(self): | |
35 | return 'html' |
|
35 | return '.html' | |
36 |
|
36 | |||
37 | def _default_template_path_default(self): |
|
37 | def _default_template_path_default(self): | |
38 | return os.path.join("..", "templates", "html") |
|
38 | return os.path.join("..", "templates", "html") | |
39 |
|
39 | |||
40 | def _template_file_default(self): |
|
40 | def _template_file_default(self): | |
41 | return 'full' |
|
41 | return 'full' | |
42 |
|
42 | |||
43 | output_mimetype = 'text/html' |
|
43 | output_mimetype = 'text/html' | |
44 |
|
44 | |||
45 | @property |
|
45 | @property | |
46 | def default_config(self): |
|
46 | def default_config(self): | |
47 | c = Config({ |
|
47 | c = Config({ | |
48 | 'NbConvertBase': { |
|
48 | 'NbConvertBase': { | |
49 | 'display_data_priority' : ['text/javascript', 'text/html', 'application/pdf', 'image/svg+xml', 'text/latex', 'image/png', 'image/jpeg', 'text/plain'] |
|
49 | 'display_data_priority' : ['text/javascript', 'text/html', 'application/pdf', 'image/svg+xml', 'text/latex', 'image/png', 'image/jpeg', 'text/plain'] | |
50 | }, |
|
50 | }, | |
51 | 'CSSHTMLHeaderPreprocessor':{ |
|
51 | 'CSSHTMLHeaderPreprocessor':{ | |
52 | 'enabled':True |
|
52 | 'enabled':True | |
53 | }, |
|
53 | }, | |
54 | 'HighlightMagicsPreprocessor': { |
|
54 | 'HighlightMagicsPreprocessor': { | |
55 | 'enabled':True |
|
55 | 'enabled':True | |
56 | } |
|
56 | } | |
57 | }) |
|
57 | }) | |
58 | c.merge(super(HTMLExporter,self).default_config) |
|
58 | c.merge(super(HTMLExporter,self).default_config) | |
59 | return c |
|
59 | return c | |
60 |
|
60 | |||
61 | def from_notebook_node(self, nb, resources=None, **kw): |
|
61 | def from_notebook_node(self, nb, resources=None, **kw): | |
62 | langinfo = nb.metadata.get('language_info', {}) |
|
62 | langinfo = nb.metadata.get('language_info', {}) | |
63 | lexer = langinfo.get('pygments_lexer', langinfo.get('name', None)) |
|
63 | lexer = langinfo.get('pygments_lexer', langinfo.get('name', None)) | |
64 | self.register_filter('highlight_code', |
|
64 | self.register_filter('highlight_code', | |
65 | Highlight2HTML(pygments_lexer=lexer, parent=self)) |
|
65 | Highlight2HTML(pygments_lexer=lexer, parent=self)) | |
66 | return super(HTMLExporter, self).from_notebook_node(nb, resources, **kw) |
|
66 | return super(HTMLExporter, self).from_notebook_node(nb, resources, **kw) |
@@ -1,141 +1,141 b'' | |||||
1 | """Export to PDF via latex""" |
|
1 | """Export to PDF via latex""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | import subprocess |
|
6 | import subprocess | |
7 | import os |
|
7 | import os | |
8 | import sys |
|
8 | import sys | |
9 |
|
9 | |||
10 | from IPython.utils.traitlets import Integer, List, Bool, Instance |
|
10 | from IPython.utils.traitlets import Integer, List, Bool, Instance | |
11 | from IPython.utils.tempdir import TemporaryWorkingDirectory |
|
11 | from IPython.utils.tempdir import TemporaryWorkingDirectory | |
12 | from .latex import LatexExporter |
|
12 | from .latex import LatexExporter | |
13 |
|
13 | |||
14 |
|
14 | |||
15 | class PDFExporter(LatexExporter): |
|
15 | class PDFExporter(LatexExporter): | |
16 | """Writer designed to write to PDF files""" |
|
16 | """Writer designed to write to PDF files""" | |
17 |
|
17 | |||
18 | latex_count = Integer(3, config=True, |
|
18 | latex_count = Integer(3, config=True, | |
19 | help="How many times latex will be called." |
|
19 | help="How many times latex will be called." | |
20 | ) |
|
20 | ) | |
21 |
|
21 | |||
22 | latex_command = List([u"pdflatex", u"{filename}"], config=True, |
|
22 | latex_command = List([u"pdflatex", u"{filename}"], config=True, | |
23 | help="Shell command used to compile latex." |
|
23 | help="Shell command used to compile latex." | |
24 | ) |
|
24 | ) | |
25 |
|
25 | |||
26 | bib_command = List([u"bibtex", u"{filename}"], config=True, |
|
26 | bib_command = List([u"bibtex", u"{filename}"], config=True, | |
27 | help="Shell command used to run bibtex." |
|
27 | help="Shell command used to run bibtex." | |
28 | ) |
|
28 | ) | |
29 |
|
29 | |||
30 | verbose = Bool(False, config=True, |
|
30 | verbose = Bool(False, config=True, | |
31 | help="Whether to display the output of latex commands." |
|
31 | help="Whether to display the output of latex commands." | |
32 | ) |
|
32 | ) | |
33 |
|
33 | |||
34 | temp_file_exts = List(['.aux', '.bbl', '.blg', '.idx', '.log', '.out'], config=True, |
|
34 | temp_file_exts = List(['.aux', '.bbl', '.blg', '.idx', '.log', '.out'], config=True, | |
35 | help="File extensions of temp files to remove after running." |
|
35 | help="File extensions of temp files to remove after running." | |
36 | ) |
|
36 | ) | |
37 |
|
37 | |||
38 | writer = Instance("IPython.nbconvert.writers.FilesWriter", args=()) |
|
38 | writer = Instance("IPython.nbconvert.writers.FilesWriter", args=()) | |
39 |
|
39 | |||
40 | def run_command(self, command_list, filename, count, log_function): |
|
40 | def run_command(self, command_list, filename, count, log_function): | |
41 | """Run command_list count times. |
|
41 | """Run command_list count times. | |
42 |
|
42 | |||
43 | Parameters |
|
43 | Parameters | |
44 | ---------- |
|
44 | ---------- | |
45 | command_list : list |
|
45 | command_list : list | |
46 | A list of args to provide to Popen. Each element of this |
|
46 | A list of args to provide to Popen. Each element of this | |
47 | list will be interpolated with the filename to convert. |
|
47 | list will be interpolated with the filename to convert. | |
48 | filename : unicode |
|
48 | filename : unicode | |
49 | The name of the file to convert. |
|
49 | The name of the file to convert. | |
50 | count : int |
|
50 | count : int | |
51 | How many times to run the command. |
|
51 | How many times to run the command. | |
52 |
|
52 | |||
53 | Returns |
|
53 | Returns | |
54 | ------- |
|
54 | ------- | |
55 | success : bool |
|
55 | success : bool | |
56 | A boolean indicating if the command was successful (True) |
|
56 | A boolean indicating if the command was successful (True) | |
57 | or failed (False). |
|
57 | or failed (False). | |
58 | """ |
|
58 | """ | |
59 | command = [c.format(filename=filename) for c in command_list] |
|
59 | command = [c.format(filename=filename) for c in command_list] | |
60 | #In windows and python 2.x there is a bug in subprocess.Popen and |
|
60 | #In windows and python 2.x there is a bug in subprocess.Popen and | |
61 | # unicode commands are not supported |
|
61 | # unicode commands are not supported | |
62 | if sys.platform == 'win32' and sys.version_info < (3,0): |
|
62 | if sys.platform == 'win32' and sys.version_info < (3,0): | |
63 | #We must use cp1252 encoding for calling subprocess.Popen |
|
63 | #We must use cp1252 encoding for calling subprocess.Popen | |
64 | #Note that sys.stdin.encoding and encoding.DEFAULT_ENCODING |
|
64 | #Note that sys.stdin.encoding and encoding.DEFAULT_ENCODING | |
65 | # could be different (cp437 in case of dos console) |
|
65 | # could be different (cp437 in case of dos console) | |
66 | command = [c.encode('cp1252') for c in command] |
|
66 | command = [c.encode('cp1252') for c in command] | |
67 | times = 'time' if count == 1 else 'times' |
|
67 | times = 'time' if count == 1 else 'times' | |
68 | self.log.info("Running %s %i %s: %s", command_list[0], count, times, command) |
|
68 | self.log.info("Running %s %i %s: %s", command_list[0], count, times, command) | |
69 | with open(os.devnull, 'rb') as null: |
|
69 | with open(os.devnull, 'rb') as null: | |
70 | stdout = subprocess.PIPE if not self.verbose else None |
|
70 | stdout = subprocess.PIPE if not self.verbose else None | |
71 | for index in range(count): |
|
71 | for index in range(count): | |
72 | p = subprocess.Popen(command, stdout=stdout, stdin=null) |
|
72 | p = subprocess.Popen(command, stdout=stdout, stdin=null) | |
73 | out, err = p.communicate() |
|
73 | out, err = p.communicate() | |
74 | if p.returncode: |
|
74 | if p.returncode: | |
75 | if self.verbose: |
|
75 | if self.verbose: | |
76 | # verbose means I didn't capture stdout with PIPE, |
|
76 | # verbose means I didn't capture stdout with PIPE, | |
77 | # so it's already been displayed and `out` is None. |
|
77 | # so it's already been displayed and `out` is None. | |
78 | out = u'' |
|
78 | out = u'' | |
79 | else: |
|
79 | else: | |
80 | out = out.decode('utf-8', 'replace') |
|
80 | out = out.decode('utf-8', 'replace') | |
81 | log_function(command, out) |
|
81 | log_function(command, out) | |
82 | return False # failure |
|
82 | return False # failure | |
83 | return True # success |
|
83 | return True # success | |
84 |
|
84 | |||
85 | def run_latex(self, filename): |
|
85 | def run_latex(self, filename): | |
86 | """Run pdflatex self.latex_count times.""" |
|
86 | """Run pdflatex self.latex_count times.""" | |
87 |
|
87 | |||
88 | def log_error(command, out): |
|
88 | def log_error(command, out): | |
89 | self.log.critical(u"%s failed: %s\n%s", command[0], command, out) |
|
89 | self.log.critical(u"%s failed: %s\n%s", command[0], command, out) | |
90 |
|
90 | |||
91 | return self.run_command(self.latex_command, filename, |
|
91 | return self.run_command(self.latex_command, filename, | |
92 | self.latex_count, log_error) |
|
92 | self.latex_count, log_error) | |
93 |
|
93 | |||
94 | def run_bib(self, filename): |
|
94 | def run_bib(self, filename): | |
95 | """Run bibtex self.latex_count times.""" |
|
95 | """Run bibtex self.latex_count times.""" | |
96 | filename = os.path.splitext(filename)[0] |
|
96 | filename = os.path.splitext(filename)[0] | |
97 |
|
97 | |||
98 | def log_error(command, out): |
|
98 | def log_error(command, out): | |
99 | self.log.warn('%s had problems, most likely because there were no citations', |
|
99 | self.log.warn('%s had problems, most likely because there were no citations', | |
100 | command[0]) |
|
100 | command[0]) | |
101 | self.log.debug(u"%s output: %s\n%s", command[0], command, out) |
|
101 | self.log.debug(u"%s output: %s\n%s", command[0], command, out) | |
102 |
|
102 | |||
103 | return self.run_command(self.bib_command, filename, 1, log_error) |
|
103 | return self.run_command(self.bib_command, filename, 1, log_error) | |
104 |
|
104 | |||
105 | def clean_temp_files(self, filename): |
|
105 | def clean_temp_files(self, filename): | |
106 | """Remove temporary files created by pdflatex/bibtex.""" |
|
106 | """Remove temporary files created by pdflatex/bibtex.""" | |
107 | self.log.info("Removing temporary LaTeX files") |
|
107 | self.log.info("Removing temporary LaTeX files") | |
108 | filename = os.path.splitext(filename)[0] |
|
108 | filename = os.path.splitext(filename)[0] | |
109 | for ext in self.temp_file_exts: |
|
109 | for ext in self.temp_file_exts: | |
110 | try: |
|
110 | try: | |
111 | os.remove(filename+ext) |
|
111 | os.remove(filename+ext) | |
112 | except OSError: |
|
112 | except OSError: | |
113 | pass |
|
113 | pass | |
114 |
|
114 | |||
115 | def from_notebook_node(self, nb, resources=None, **kw): |
|
115 | def from_notebook_node(self, nb, resources=None, **kw): | |
116 | latex, resources = super(PDFExporter, self).from_notebook_node( |
|
116 | latex, resources = super(PDFExporter, self).from_notebook_node( | |
117 | nb, resources=resources, **kw |
|
117 | nb, resources=resources, **kw | |
118 | ) |
|
118 | ) | |
119 | with TemporaryWorkingDirectory() as td: |
|
119 | with TemporaryWorkingDirectory() as td: | |
120 | notebook_name = "notebook" |
|
120 | notebook_name = "notebook" | |
121 | tex_file = self.writer.write(latex, resources, notebook_name=notebook_name) |
|
121 | tex_file = self.writer.write(latex, resources, notebook_name=notebook_name) | |
122 | self.log.info("Building PDF") |
|
122 | self.log.info("Building PDF") | |
123 | rc = self.run_latex(tex_file) |
|
123 | rc = self.run_latex(tex_file) | |
124 | if not rc: |
|
124 | if not rc: | |
125 | rc = self.run_bib(tex_file) |
|
125 | rc = self.run_bib(tex_file) | |
126 | if not rc: |
|
126 | if not rc: | |
127 | rc = self.run_latex(tex_file) |
|
127 | rc = self.run_latex(tex_file) | |
128 |
|
128 | |||
129 | pdf_file = notebook_name + '.pdf' |
|
129 | pdf_file = notebook_name + '.pdf' | |
130 | if not os.path.isfile(pdf_file): |
|
130 | if not os.path.isfile(pdf_file): | |
131 | raise RuntimeError("PDF creating failed") |
|
131 | raise RuntimeError("PDF creating failed") | |
132 | self.log.info('PDF successfully created') |
|
132 | self.log.info('PDF successfully created') | |
133 | with open(pdf_file, 'rb') as f: |
|
133 | with open(pdf_file, 'rb') as f: | |
134 | pdf_data = f.read() |
|
134 | pdf_data = f.read() | |
135 |
|
135 | |||
136 | # convert output extension to pdf |
|
136 | # convert output extension to pdf | |
137 | # the writer above required it to be tex |
|
137 | # the writer above required it to be tex | |
138 | resources['output_extension'] = 'pdf' |
|
138 | resources['output_extension'] = '.pdf' | |
139 |
|
139 | |||
140 | return pdf_data, resources |
|
140 | return pdf_data, resources | |
141 |
|
141 |
@@ -1,31 +1,31 b'' | |||||
1 | """Python script Exporter class""" |
|
1 | """Python script Exporter class""" | |
2 |
|
2 | |||
3 | #----------------------------------------------------------------------------- |
|
3 | #----------------------------------------------------------------------------- | |
4 | # Copyright (c) 2013, the IPython Development Team. |
|
4 | # Copyright (c) 2013, the IPython Development Team. | |
5 | # |
|
5 | # | |
6 | # Distributed under the terms of the Modified BSD License. |
|
6 | # Distributed under the terms of the Modified BSD License. | |
7 | # |
|
7 | # | |
8 | # The full license is in the file COPYING.txt, distributed with this software. |
|
8 | # The full license is in the file COPYING.txt, distributed with this software. | |
9 | #----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
10 |
|
10 | |||
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 | # Imports |
|
12 | # Imports | |
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | from .templateexporter import TemplateExporter |
|
15 | from .templateexporter import TemplateExporter | |
16 |
|
16 | |||
17 | #----------------------------------------------------------------------------- |
|
17 | #----------------------------------------------------------------------------- | |
18 | # Classes |
|
18 | # Classes | |
19 | #----------------------------------------------------------------------------- |
|
19 | #----------------------------------------------------------------------------- | |
20 |
|
20 | |||
21 | class PythonExporter(TemplateExporter): |
|
21 | class PythonExporter(TemplateExporter): | |
22 | """ |
|
22 | """ | |
23 | Exports a Python code file. |
|
23 | Exports a Python code file. | |
24 | """ |
|
24 | """ | |
25 | def _file_extension_default(self): |
|
25 | def _file_extension_default(self): | |
26 | return 'py' |
|
26 | return '.py' | |
27 |
|
27 | |||
28 | def _template_file_default(self): |
|
28 | def _template_file_default(self): | |
29 | return 'python' |
|
29 | return 'python' | |
30 |
|
30 | |||
31 | output_mimetype = 'text/x-python' |
|
31 | output_mimetype = 'text/x-python' |
@@ -1,40 +1,40 b'' | |||||
1 | """restructuredText Exporter class""" |
|
1 | """restructuredText Exporter class""" | |
2 |
|
2 | |||
3 | #----------------------------------------------------------------------------- |
|
3 | #----------------------------------------------------------------------------- | |
4 | # Copyright (c) 2013, the IPython Development Team. |
|
4 | # Copyright (c) 2013, the IPython Development Team. | |
5 | # |
|
5 | # | |
6 | # Distributed under the terms of the Modified BSD License. |
|
6 | # Distributed under the terms of the Modified BSD License. | |
7 | # |
|
7 | # | |
8 | # The full license is in the file COPYING.txt, distributed with this software. |
|
8 | # The full license is in the file COPYING.txt, distributed with this software. | |
9 | #----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
10 |
|
10 | |||
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 | # Imports |
|
12 | # Imports | |
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | from IPython.config import Config |
|
15 | from IPython.config import Config | |
16 |
|
16 | |||
17 | from .templateexporter import TemplateExporter |
|
17 | from .templateexporter import TemplateExporter | |
18 |
|
18 | |||
19 | #----------------------------------------------------------------------------- |
|
19 | #----------------------------------------------------------------------------- | |
20 | # Classes |
|
20 | # Classes | |
21 | #----------------------------------------------------------------------------- |
|
21 | #----------------------------------------------------------------------------- | |
22 |
|
22 | |||
23 | class RSTExporter(TemplateExporter): |
|
23 | class RSTExporter(TemplateExporter): | |
24 | """ |
|
24 | """ | |
25 | Exports restructured text documents. |
|
25 | Exports restructured text documents. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | def _file_extension_default(self): |
|
28 | def _file_extension_default(self): | |
29 | return 'rst' |
|
29 | return '.rst' | |
30 |
|
30 | |||
31 | def _template_file_default(self): |
|
31 | def _template_file_default(self): | |
32 | return 'rst' |
|
32 | return 'rst' | |
33 |
|
33 | |||
34 | output_mimetype = 'text/restructuredtext' |
|
34 | output_mimetype = 'text/restructuredtext' | |
35 |
|
35 | |||
36 | @property |
|
36 | @property | |
37 | def default_config(self): |
|
37 | def default_config(self): | |
38 | c = Config({'ExtractOutputPreprocessor':{'enabled':True}}) |
|
38 | c = Config({'ExtractOutputPreprocessor':{'enabled':True}}) | |
39 | c.merge(super(RSTExporter,self).default_config) |
|
39 | c.merge(super(RSTExporter,self).default_config) | |
40 | return c |
|
40 | return c |
@@ -1,14 +1,14 b'' | |||||
1 | """Generic script exporter class for any kernel language""" |
|
1 | """Generic script exporter class for any kernel language""" | |
2 |
|
2 | |||
3 | from .templateexporter import TemplateExporter |
|
3 | from .templateexporter import TemplateExporter | |
4 |
|
4 | |||
5 | class ScriptExporter(TemplateExporter): |
|
5 | class ScriptExporter(TemplateExporter): | |
6 | def _template_file_default(self): |
|
6 | def _template_file_default(self): | |
7 | return 'script' |
|
7 | return 'script' | |
8 |
|
8 | |||
9 | def from_notebook_node(self, nb, resources=None, **kw): |
|
9 | def from_notebook_node(self, nb, resources=None, **kw): | |
10 | langinfo = nb.metadata.get('language_info', {}) |
|
10 | langinfo = nb.metadata.get('language_info', {}) | |
11 | self.file_extension = langinfo.get('file_extension', 'txt') |
|
11 | self.file_extension = langinfo.get('file_extension', '.txt') | |
12 | self.output_mimetype = langinfo.get('mimetype', 'text/plain') |
|
12 | self.output_mimetype = langinfo.get('mimetype', 'text/plain') | |
13 |
|
13 | |||
14 | return super(ScriptExporter, self).from_notebook_node(nb, resources, **kw) |
|
14 | return super(ScriptExporter, self).from_notebook_node(nb, resources, **kw) |
@@ -1,324 +1,324 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | """NbConvert is a utility for conversion of .ipynb files. |
|
2 | """NbConvert is a utility for conversion of .ipynb files. | |
3 |
|
3 | |||
4 | Command-line interface for the NbConvert conversion utility. |
|
4 | Command-line interface for the NbConvert conversion utility. | |
5 | """ |
|
5 | """ | |
6 |
|
6 | |||
7 | # Copyright (c) IPython Development Team. |
|
7 | # Copyright (c) IPython Development Team. | |
8 | # Distributed under the terms of the Modified BSD License. |
|
8 | # Distributed under the terms of the Modified BSD License. | |
9 |
|
9 | |||
10 | from __future__ import print_function |
|
10 | from __future__ import print_function | |
11 |
|
11 | |||
12 | import logging |
|
12 | import logging | |
13 | import sys |
|
13 | import sys | |
14 | import os |
|
14 | import os | |
15 | import glob |
|
15 | import glob | |
16 |
|
16 | |||
17 | from IPython.core.application import BaseIPythonApplication, base_aliases, base_flags |
|
17 | from IPython.core.application import BaseIPythonApplication, base_aliases, base_flags | |
18 | from IPython.core.profiledir import ProfileDir |
|
18 | from IPython.core.profiledir import ProfileDir | |
19 | from IPython.config import catch_config_error, Configurable |
|
19 | from IPython.config import catch_config_error, Configurable | |
20 | from IPython.utils.traitlets import ( |
|
20 | from IPython.utils.traitlets import ( | |
21 | Unicode, List, Instance, DottedObjectName, Type, CaselessStrEnum, |
|
21 | Unicode, List, Instance, DottedObjectName, Type, CaselessStrEnum, | |
22 | ) |
|
22 | ) | |
23 | from IPython.utils.importstring import import_item |
|
23 | from IPython.utils.importstring import import_item | |
24 |
|
24 | |||
25 | from .exporters.export import get_export_names, exporter_map |
|
25 | from .exporters.export import get_export_names, exporter_map | |
26 | from IPython.nbconvert import exporters, preprocessors, writers, postprocessors |
|
26 | from IPython.nbconvert import exporters, preprocessors, writers, postprocessors | |
27 | from .utils.base import NbConvertBase |
|
27 | from .utils.base import NbConvertBase | |
28 | from .utils.exceptions import ConversionException |
|
28 | from .utils.exceptions import ConversionException | |
29 |
|
29 | |||
30 | #----------------------------------------------------------------------------- |
|
30 | #----------------------------------------------------------------------------- | |
31 | #Classes and functions |
|
31 | #Classes and functions | |
32 | #----------------------------------------------------------------------------- |
|
32 | #----------------------------------------------------------------------------- | |
33 |
|
33 | |||
34 | class DottedOrNone(DottedObjectName): |
|
34 | class DottedOrNone(DottedObjectName): | |
35 | """ |
|
35 | """ | |
36 | A string holding a valid dotted object name in Python, such as A.b3._c |
|
36 | A string holding a valid dotted object name in Python, such as A.b3._c | |
37 | Also allows for None type.""" |
|
37 | Also allows for None type.""" | |
38 |
|
38 | |||
39 | default_value = u'' |
|
39 | default_value = u'' | |
40 |
|
40 | |||
41 | def validate(self, obj, value): |
|
41 | def validate(self, obj, value): | |
42 | if value is not None and len(value) > 0: |
|
42 | if value is not None and len(value) > 0: | |
43 | return super(DottedOrNone, self).validate(obj, value) |
|
43 | return super(DottedOrNone, self).validate(obj, value) | |
44 | else: |
|
44 | else: | |
45 | return value |
|
45 | return value | |
46 |
|
46 | |||
47 | nbconvert_aliases = {} |
|
47 | nbconvert_aliases = {} | |
48 | nbconvert_aliases.update(base_aliases) |
|
48 | nbconvert_aliases.update(base_aliases) | |
49 | nbconvert_aliases.update({ |
|
49 | nbconvert_aliases.update({ | |
50 | 'to' : 'NbConvertApp.export_format', |
|
50 | 'to' : 'NbConvertApp.export_format', | |
51 | 'template' : 'TemplateExporter.template_file', |
|
51 | 'template' : 'TemplateExporter.template_file', | |
52 | 'writer' : 'NbConvertApp.writer_class', |
|
52 | 'writer' : 'NbConvertApp.writer_class', | |
53 | 'post': 'NbConvertApp.postprocessor_class', |
|
53 | 'post': 'NbConvertApp.postprocessor_class', | |
54 | 'output': 'NbConvertApp.output_base', |
|
54 | 'output': 'NbConvertApp.output_base', | |
55 | 'reveal-prefix': 'RevealHelpPreprocessor.url_prefix', |
|
55 | 'reveal-prefix': 'RevealHelpPreprocessor.url_prefix', | |
56 | 'nbformat': 'NotebookExporter.nbformat_version', |
|
56 | 'nbformat': 'NotebookExporter.nbformat_version', | |
57 | }) |
|
57 | }) | |
58 |
|
58 | |||
59 | nbconvert_flags = {} |
|
59 | nbconvert_flags = {} | |
60 | nbconvert_flags.update(base_flags) |
|
60 | nbconvert_flags.update(base_flags) | |
61 | nbconvert_flags.update({ |
|
61 | nbconvert_flags.update({ | |
62 | 'execute' : ( |
|
62 | 'execute' : ( | |
63 | {'ExecutePreprocessor' : {'enabled' : True}}, |
|
63 | {'ExecutePreprocessor' : {'enabled' : True}}, | |
64 | "Execute the notebook prior to export." |
|
64 | "Execute the notebook prior to export." | |
65 | ), |
|
65 | ), | |
66 | 'stdout' : ( |
|
66 | 'stdout' : ( | |
67 | {'NbConvertApp' : {'writer_class' : "StdoutWriter"}}, |
|
67 | {'NbConvertApp' : {'writer_class' : "StdoutWriter"}}, | |
68 | "Write notebook output to stdout instead of files." |
|
68 | "Write notebook output to stdout instead of files." | |
69 | ) |
|
69 | ) | |
70 | }) |
|
70 | }) | |
71 |
|
71 | |||
72 |
|
72 | |||
73 | class NbConvertApp(BaseIPythonApplication): |
|
73 | class NbConvertApp(BaseIPythonApplication): | |
74 | """Application used to convert from notebook file type (``*.ipynb``)""" |
|
74 | """Application used to convert from notebook file type (``*.ipynb``)""" | |
75 |
|
75 | |||
76 | name = 'ipython-nbconvert' |
|
76 | name = 'ipython-nbconvert' | |
77 | aliases = nbconvert_aliases |
|
77 | aliases = nbconvert_aliases | |
78 | flags = nbconvert_flags |
|
78 | flags = nbconvert_flags | |
79 |
|
79 | |||
80 | def _log_level_default(self): |
|
80 | def _log_level_default(self): | |
81 | return logging.INFO |
|
81 | return logging.INFO | |
82 |
|
82 | |||
83 | def _classes_default(self): |
|
83 | def _classes_default(self): | |
84 | classes = [NbConvertBase, ProfileDir] |
|
84 | classes = [NbConvertBase, ProfileDir] | |
85 | for pkg in (exporters, preprocessors, writers, postprocessors): |
|
85 | for pkg in (exporters, preprocessors, writers, postprocessors): | |
86 | for name in dir(pkg): |
|
86 | for name in dir(pkg): | |
87 | cls = getattr(pkg, name) |
|
87 | cls = getattr(pkg, name) | |
88 | if isinstance(cls, type) and issubclass(cls, Configurable): |
|
88 | if isinstance(cls, type) and issubclass(cls, Configurable): | |
89 | classes.append(cls) |
|
89 | classes.append(cls) | |
90 |
|
90 | |||
91 | return classes |
|
91 | return classes | |
92 |
|
92 | |||
93 | description = Unicode( |
|
93 | description = Unicode( | |
94 | u"""This application is used to convert notebook files (*.ipynb) |
|
94 | u"""This application is used to convert notebook files (*.ipynb) | |
95 | to various other formats. |
|
95 | to various other formats. | |
96 |
|
96 | |||
97 | WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.""") |
|
97 | WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.""") | |
98 |
|
98 | |||
99 | output_base = Unicode('', config=True, help='''overwrite base name use for output files. |
|
99 | output_base = Unicode('', config=True, help='''overwrite base name use for output files. | |
100 | can only be used when converting one notebook at a time. |
|
100 | can only be used when converting one notebook at a time. | |
101 | ''') |
|
101 | ''') | |
102 |
|
102 | |||
103 | examples = Unicode(u""" |
|
103 | examples = Unicode(u""" | |
104 | The simplest way to use nbconvert is |
|
104 | The simplest way to use nbconvert is | |
105 |
|
105 | |||
106 | > ipython nbconvert mynotebook.ipynb |
|
106 | > ipython nbconvert mynotebook.ipynb | |
107 |
|
107 | |||
108 | which will convert mynotebook.ipynb to the default format (probably HTML). |
|
108 | which will convert mynotebook.ipynb to the default format (probably HTML). | |
109 |
|
109 | |||
110 | You can specify the export format with `--to`. |
|
110 | You can specify the export format with `--to`. | |
111 | Options include {0} |
|
111 | Options include {0} | |
112 |
|
112 | |||
113 | > ipython nbconvert --to latex mynotebook.ipynb |
|
113 | > ipython nbconvert --to latex mynotebook.ipynb | |
114 |
|
114 | |||
115 | Both HTML and LaTeX support multiple output templates. LaTeX includes |
|
115 | Both HTML and LaTeX support multiple output templates. LaTeX includes | |
116 | 'base', 'article' and 'report'. HTML includes 'basic' and 'full'. You |
|
116 | 'base', 'article' and 'report'. HTML includes 'basic' and 'full'. You | |
117 | can specify the flavor of the format used. |
|
117 | can specify the flavor of the format used. | |
118 |
|
118 | |||
119 | > ipython nbconvert --to html --template basic mynotebook.ipynb |
|
119 | > ipython nbconvert --to html --template basic mynotebook.ipynb | |
120 |
|
120 | |||
121 | You can also pipe the output to stdout, rather than a file |
|
121 | You can also pipe the output to stdout, rather than a file | |
122 |
|
122 | |||
123 | > ipython nbconvert mynotebook.ipynb --stdout |
|
123 | > ipython nbconvert mynotebook.ipynb --stdout | |
124 |
|
124 | |||
125 | PDF is generated via latex |
|
125 | PDF is generated via latex | |
126 |
|
126 | |||
127 | > ipython nbconvert mynotebook.ipynb --to pdf |
|
127 | > ipython nbconvert mynotebook.ipynb --to pdf | |
128 |
|
128 | |||
129 | You can get (and serve) a Reveal.js-powered slideshow |
|
129 | You can get (and serve) a Reveal.js-powered slideshow | |
130 |
|
130 | |||
131 | > ipython nbconvert myslides.ipynb --to slides --post serve |
|
131 | > ipython nbconvert myslides.ipynb --to slides --post serve | |
132 |
|
132 | |||
133 | Multiple notebooks can be given at the command line in a couple of |
|
133 | Multiple notebooks can be given at the command line in a couple of | |
134 | different ways: |
|
134 | different ways: | |
135 |
|
135 | |||
136 | > ipython nbconvert notebook*.ipynb |
|
136 | > ipython nbconvert notebook*.ipynb | |
137 | > ipython nbconvert notebook1.ipynb notebook2.ipynb |
|
137 | > ipython nbconvert notebook1.ipynb notebook2.ipynb | |
138 |
|
138 | |||
139 | or you can specify the notebooks list in a config file, containing:: |
|
139 | or you can specify the notebooks list in a config file, containing:: | |
140 |
|
140 | |||
141 | c.NbConvertApp.notebooks = ["my_notebook.ipynb"] |
|
141 | c.NbConvertApp.notebooks = ["my_notebook.ipynb"] | |
142 |
|
142 | |||
143 | > ipython nbconvert --config mycfg.py |
|
143 | > ipython nbconvert --config mycfg.py | |
144 | """.format(get_export_names())) |
|
144 | """.format(get_export_names())) | |
145 |
|
145 | |||
146 | # Writer specific variables |
|
146 | # Writer specific variables | |
147 | writer = Instance('IPython.nbconvert.writers.base.WriterBase', |
|
147 | writer = Instance('IPython.nbconvert.writers.base.WriterBase', | |
148 | help="""Instance of the writer class used to write the |
|
148 | help="""Instance of the writer class used to write the | |
149 | results of the conversion.""") |
|
149 | results of the conversion.""") | |
150 | writer_class = DottedObjectName('FilesWriter', config=True, |
|
150 | writer_class = DottedObjectName('FilesWriter', config=True, | |
151 | help="""Writer class used to write the |
|
151 | help="""Writer class used to write the | |
152 | results of the conversion""") |
|
152 | results of the conversion""") | |
153 | writer_aliases = {'fileswriter': 'IPython.nbconvert.writers.files.FilesWriter', |
|
153 | writer_aliases = {'fileswriter': 'IPython.nbconvert.writers.files.FilesWriter', | |
154 | 'debugwriter': 'IPython.nbconvert.writers.debug.DebugWriter', |
|
154 | 'debugwriter': 'IPython.nbconvert.writers.debug.DebugWriter', | |
155 | 'stdoutwriter': 'IPython.nbconvert.writers.stdout.StdoutWriter'} |
|
155 | 'stdoutwriter': 'IPython.nbconvert.writers.stdout.StdoutWriter'} | |
156 | writer_factory = Type() |
|
156 | writer_factory = Type() | |
157 |
|
157 | |||
158 | def _writer_class_changed(self, name, old, new): |
|
158 | def _writer_class_changed(self, name, old, new): | |
159 | if new.lower() in self.writer_aliases: |
|
159 | if new.lower() in self.writer_aliases: | |
160 | new = self.writer_aliases[new.lower()] |
|
160 | new = self.writer_aliases[new.lower()] | |
161 | self.writer_factory = import_item(new) |
|
161 | self.writer_factory = import_item(new) | |
162 |
|
162 | |||
163 | # Post-processor specific variables |
|
163 | # Post-processor specific variables | |
164 | postprocessor = Instance('IPython.nbconvert.postprocessors.base.PostProcessorBase', |
|
164 | postprocessor = Instance('IPython.nbconvert.postprocessors.base.PostProcessorBase', | |
165 | help="""Instance of the PostProcessor class used to write the |
|
165 | help="""Instance of the PostProcessor class used to write the | |
166 | results of the conversion.""") |
|
166 | results of the conversion.""") | |
167 |
|
167 | |||
168 | postprocessor_class = DottedOrNone(config=True, |
|
168 | postprocessor_class = DottedOrNone(config=True, | |
169 | help="""PostProcessor class used to write the |
|
169 | help="""PostProcessor class used to write the | |
170 | results of the conversion""") |
|
170 | results of the conversion""") | |
171 | postprocessor_aliases = {'serve': 'IPython.nbconvert.postprocessors.serve.ServePostProcessor'} |
|
171 | postprocessor_aliases = {'serve': 'IPython.nbconvert.postprocessors.serve.ServePostProcessor'} | |
172 | postprocessor_factory = Type() |
|
172 | postprocessor_factory = Type() | |
173 |
|
173 | |||
174 | def _postprocessor_class_changed(self, name, old, new): |
|
174 | def _postprocessor_class_changed(self, name, old, new): | |
175 | if new.lower() in self.postprocessor_aliases: |
|
175 | if new.lower() in self.postprocessor_aliases: | |
176 | new = self.postprocessor_aliases[new.lower()] |
|
176 | new = self.postprocessor_aliases[new.lower()] | |
177 | if new: |
|
177 | if new: | |
178 | self.postprocessor_factory = import_item(new) |
|
178 | self.postprocessor_factory = import_item(new) | |
179 |
|
179 | |||
180 |
|
180 | |||
181 | # Other configurable variables |
|
181 | # Other configurable variables | |
182 | export_format = CaselessStrEnum(get_export_names(), |
|
182 | export_format = CaselessStrEnum(get_export_names(), | |
183 | default_value="html", |
|
183 | default_value="html", | |
184 | config=True, |
|
184 | config=True, | |
185 | help="""The export format to be used.""" |
|
185 | help="""The export format to be used.""" | |
186 | ) |
|
186 | ) | |
187 |
|
187 | |||
188 | notebooks = List([], config=True, help="""List of notebooks to convert. |
|
188 | notebooks = List([], config=True, help="""List of notebooks to convert. | |
189 | Wildcards are supported. |
|
189 | Wildcards are supported. | |
190 | Filenames passed positionally will be added to the list. |
|
190 | Filenames passed positionally will be added to the list. | |
191 | """) |
|
191 | """) | |
192 |
|
192 | |||
193 | @catch_config_error |
|
193 | @catch_config_error | |
194 | def initialize(self, argv=None): |
|
194 | def initialize(self, argv=None): | |
195 | self.init_syspath() |
|
195 | self.init_syspath() | |
196 | super(NbConvertApp, self).initialize(argv) |
|
196 | super(NbConvertApp, self).initialize(argv) | |
197 | self.init_notebooks() |
|
197 | self.init_notebooks() | |
198 | self.init_writer() |
|
198 | self.init_writer() | |
199 | self.init_postprocessor() |
|
199 | self.init_postprocessor() | |
200 |
|
200 | |||
201 |
|
201 | |||
202 |
|
202 | |||
203 | def init_syspath(self): |
|
203 | def init_syspath(self): | |
204 | """ |
|
204 | """ | |
205 | Add the cwd to the sys.path ($PYTHONPATH) |
|
205 | Add the cwd to the sys.path ($PYTHONPATH) | |
206 | """ |
|
206 | """ | |
207 | sys.path.insert(0, os.getcwd()) |
|
207 | sys.path.insert(0, os.getcwd()) | |
208 |
|
208 | |||
209 |
|
209 | |||
210 | def init_notebooks(self): |
|
210 | def init_notebooks(self): | |
211 | """Construct the list of notebooks. |
|
211 | """Construct the list of notebooks. | |
212 | If notebooks are passed on the command-line, |
|
212 | If notebooks are passed on the command-line, | |
213 | they override notebooks specified in config files. |
|
213 | they override notebooks specified in config files. | |
214 | Glob each notebook to replace notebook patterns with filenames. |
|
214 | Glob each notebook to replace notebook patterns with filenames. | |
215 | """ |
|
215 | """ | |
216 |
|
216 | |||
217 | # Specifying notebooks on the command-line overrides (rather than adds) |
|
217 | # Specifying notebooks on the command-line overrides (rather than adds) | |
218 | # the notebook list |
|
218 | # the notebook list | |
219 | if self.extra_args: |
|
219 | if self.extra_args: | |
220 | patterns = self.extra_args |
|
220 | patterns = self.extra_args | |
221 | else: |
|
221 | else: | |
222 | patterns = self.notebooks |
|
222 | patterns = self.notebooks | |
223 |
|
223 | |||
224 | # Use glob to replace all the notebook patterns with filenames. |
|
224 | # Use glob to replace all the notebook patterns with filenames. | |
225 | filenames = [] |
|
225 | filenames = [] | |
226 | for pattern in patterns: |
|
226 | for pattern in patterns: | |
227 |
|
227 | |||
228 | # Use glob to find matching filenames. Allow the user to convert |
|
228 | # Use glob to find matching filenames. Allow the user to convert | |
229 | # notebooks without having to type the extension. |
|
229 | # notebooks without having to type the extension. | |
230 | globbed_files = glob.glob(pattern) |
|
230 | globbed_files = glob.glob(pattern) | |
231 | globbed_files.extend(glob.glob(pattern + '.ipynb')) |
|
231 | globbed_files.extend(glob.glob(pattern + '.ipynb')) | |
232 | if not globbed_files: |
|
232 | if not globbed_files: | |
233 | self.log.warn("pattern %r matched no files", pattern) |
|
233 | self.log.warn("pattern %r matched no files", pattern) | |
234 |
|
234 | |||
235 | for filename in globbed_files: |
|
235 | for filename in globbed_files: | |
236 | if not filename in filenames: |
|
236 | if not filename in filenames: | |
237 | filenames.append(filename) |
|
237 | filenames.append(filename) | |
238 | self.notebooks = filenames |
|
238 | self.notebooks = filenames | |
239 |
|
239 | |||
240 | def init_writer(self): |
|
240 | def init_writer(self): | |
241 | """ |
|
241 | """ | |
242 | Initialize the writer (which is stateless) |
|
242 | Initialize the writer (which is stateless) | |
243 | """ |
|
243 | """ | |
244 | self._writer_class_changed(None, self.writer_class, self.writer_class) |
|
244 | self._writer_class_changed(None, self.writer_class, self.writer_class) | |
245 | self.writer = self.writer_factory(parent=self) |
|
245 | self.writer = self.writer_factory(parent=self) | |
246 |
|
246 | |||
247 | def init_postprocessor(self): |
|
247 | def init_postprocessor(self): | |
248 | """ |
|
248 | """ | |
249 | Initialize the postprocessor (which is stateless) |
|
249 | Initialize the postprocessor (which is stateless) | |
250 | """ |
|
250 | """ | |
251 | self._postprocessor_class_changed(None, self.postprocessor_class, |
|
251 | self._postprocessor_class_changed(None, self.postprocessor_class, | |
252 | self.postprocessor_class) |
|
252 | self.postprocessor_class) | |
253 | if self.postprocessor_factory: |
|
253 | if self.postprocessor_factory: | |
254 | self.postprocessor = self.postprocessor_factory(parent=self) |
|
254 | self.postprocessor = self.postprocessor_factory(parent=self) | |
255 |
|
255 | |||
256 | def start(self): |
|
256 | def start(self): | |
257 | """ |
|
257 | """ | |
258 | Ran after initialization completed |
|
258 | Ran after initialization completed | |
259 | """ |
|
259 | """ | |
260 | super(NbConvertApp, self).start() |
|
260 | super(NbConvertApp, self).start() | |
261 | self.convert_notebooks() |
|
261 | self.convert_notebooks() | |
262 |
|
262 | |||
263 | def convert_notebooks(self): |
|
263 | def convert_notebooks(self): | |
264 | """ |
|
264 | """ | |
265 | Convert the notebooks in the self.notebook traitlet |
|
265 | Convert the notebooks in the self.notebook traitlet | |
266 | """ |
|
266 | """ | |
267 | # Export each notebook |
|
267 | # Export each notebook | |
268 | conversion_success = 0 |
|
268 | conversion_success = 0 | |
269 |
|
269 | |||
270 | if self.output_base != '' and len(self.notebooks) > 1: |
|
270 | if self.output_base != '' and len(self.notebooks) > 1: | |
271 | self.log.error( |
|
271 | self.log.error( | |
272 | """UsageError: --output flag or `NbConvertApp.output_base` config option |
|
272 | """UsageError: --output flag or `NbConvertApp.output_base` config option | |
273 | cannot be used when converting multiple notebooks. |
|
273 | cannot be used when converting multiple notebooks. | |
274 | """) |
|
274 | """) | |
275 | self.exit(1) |
|
275 | self.exit(1) | |
276 |
|
276 | |||
277 | exporter = exporter_map[self.export_format](config=self.config) |
|
277 | exporter = exporter_map[self.export_format](config=self.config) | |
278 |
|
278 | |||
279 | for notebook_filename in self.notebooks: |
|
279 | for notebook_filename in self.notebooks: | |
280 | self.log.info("Converting notebook %s to %s", notebook_filename, self.export_format) |
|
280 | self.log.info("Converting notebook %s to %s", notebook_filename, self.export_format) | |
281 |
|
281 | |||
282 | # Get a unique key for the notebook and set it in the resources object. |
|
282 | # Get a unique key for the notebook and set it in the resources object. | |
283 | basename = os.path.basename(notebook_filename) |
|
283 | basename = os.path.basename(notebook_filename) | |
284 | notebook_name = basename[:basename.rfind('.')] |
|
284 | notebook_name = basename[:basename.rfind('.')] | |
285 | if self.output_base: |
|
285 | if self.output_base: | |
286 | # strip duplicate extension from output_base, to avoid Basname.ext.ext |
|
286 | # strip duplicate extension from output_base, to avoid Basname.ext.ext | |
287 | if getattr(exporter, 'file_extension', False): |
|
287 | if getattr(exporter, 'file_extension', False): | |
288 | base, ext = os.path.splitext(self.output_base) |
|
288 | base, ext = os.path.splitext(self.output_base) | |
289 |
if ext == |
|
289 | if ext == exporter.file_extension: | |
290 | self.output_base = base |
|
290 | self.output_base = base | |
291 | notebook_name = self.output_base |
|
291 | notebook_name = self.output_base | |
292 | resources = {} |
|
292 | resources = {} | |
293 | resources['profile_dir'] = self.profile_dir.location |
|
293 | resources['profile_dir'] = self.profile_dir.location | |
294 | resources['unique_key'] = notebook_name |
|
294 | resources['unique_key'] = notebook_name | |
295 | resources['output_files_dir'] = '%s_files' % notebook_name |
|
295 | resources['output_files_dir'] = '%s_files' % notebook_name | |
296 | self.log.info("Support files will be in %s", os.path.join(resources['output_files_dir'], '')) |
|
296 | self.log.info("Support files will be in %s", os.path.join(resources['output_files_dir'], '')) | |
297 |
|
297 | |||
298 | # Try to export |
|
298 | # Try to export | |
299 | try: |
|
299 | try: | |
300 | output, resources = exporter.from_filename(notebook_filename, resources=resources) |
|
300 | output, resources = exporter.from_filename(notebook_filename, resources=resources) | |
301 | except ConversionException as e: |
|
301 | except ConversionException as e: | |
302 | self.log.error("Error while converting '%s'", notebook_filename, |
|
302 | self.log.error("Error while converting '%s'", notebook_filename, | |
303 | exc_info=True) |
|
303 | exc_info=True) | |
304 | self.exit(1) |
|
304 | self.exit(1) | |
305 | else: |
|
305 | else: | |
306 | if 'output_suffix' in resources and not self.output_base: |
|
306 | if 'output_suffix' in resources and not self.output_base: | |
307 | notebook_name += resources['output_suffix'] |
|
307 | notebook_name += resources['output_suffix'] | |
308 | write_results = self.writer.write(output, resources, notebook_name=notebook_name) |
|
308 | write_results = self.writer.write(output, resources, notebook_name=notebook_name) | |
309 |
|
309 | |||
310 | #Post-process if post processor has been defined. |
|
310 | #Post-process if post processor has been defined. | |
311 | if hasattr(self, 'postprocessor') and self.postprocessor: |
|
311 | if hasattr(self, 'postprocessor') and self.postprocessor: | |
312 | self.postprocessor(write_results) |
|
312 | self.postprocessor(write_results) | |
313 | conversion_success += 1 |
|
313 | conversion_success += 1 | |
314 |
|
314 | |||
315 | # If nothing was converted successfully, help the user. |
|
315 | # If nothing was converted successfully, help the user. | |
316 | if conversion_success == 0: |
|
316 | if conversion_success == 0: | |
317 | self.print_help() |
|
317 | self.print_help() | |
318 | sys.exit(-1) |
|
318 | sys.exit(-1) | |
319 |
|
319 | |||
320 | #----------------------------------------------------------------------------- |
|
320 | #----------------------------------------------------------------------------- | |
321 | # Main entry point |
|
321 | # Main entry point | |
322 | #----------------------------------------------------------------------------- |
|
322 | #----------------------------------------------------------------------------- | |
323 |
|
323 | |||
324 | launch_new_instance = NbConvertApp.launch_instance |
|
324 | launch_new_instance = NbConvertApp.launch_instance |
@@ -1,111 +1,111 b'' | |||||
1 | """Contains writer for writing nbconvert output to filesystem.""" |
|
1 | """Contains writer for writing nbconvert output to filesystem.""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | import io |
|
6 | import io | |
7 | import os |
|
7 | import os | |
8 | import glob |
|
8 | import glob | |
9 |
|
9 | |||
10 | from IPython.utils.traitlets import Unicode |
|
10 | from IPython.utils.traitlets import Unicode | |
11 | from IPython.utils.path import link_or_copy, ensure_dir_exists |
|
11 | from IPython.utils.path import link_or_copy, ensure_dir_exists | |
12 | from IPython.utils.py3compat import unicode_type |
|
12 | from IPython.utils.py3compat import unicode_type | |
13 |
|
13 | |||
14 | from .base import WriterBase |
|
14 | from .base import WriterBase | |
15 |
|
15 | |||
16 | #----------------------------------------------------------------------------- |
|
16 | #----------------------------------------------------------------------------- | |
17 | # Classes |
|
17 | # Classes | |
18 | #----------------------------------------------------------------------------- |
|
18 | #----------------------------------------------------------------------------- | |
19 |
|
19 | |||
20 | class FilesWriter(WriterBase): |
|
20 | class FilesWriter(WriterBase): | |
21 | """Consumes nbconvert output and produces files.""" |
|
21 | """Consumes nbconvert output and produces files.""" | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | build_directory = Unicode("", config=True, |
|
24 | build_directory = Unicode("", config=True, | |
25 | help="""Directory to write output to. Leave blank |
|
25 | help="""Directory to write output to. Leave blank | |
26 | to output to the current directory""") |
|
26 | to output to the current directory""") | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | # Make sure that the output directory exists. |
|
29 | # Make sure that the output directory exists. | |
30 | def _build_directory_changed(self, name, old, new): |
|
30 | def _build_directory_changed(self, name, old, new): | |
31 | if new: |
|
31 | if new: | |
32 | ensure_dir_exists(new) |
|
32 | ensure_dir_exists(new) | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | def __init__(self, **kw): |
|
35 | def __init__(self, **kw): | |
36 | super(FilesWriter, self).__init__(**kw) |
|
36 | super(FilesWriter, self).__init__(**kw) | |
37 | self._build_directory_changed('build_directory', self.build_directory, |
|
37 | self._build_directory_changed('build_directory', self.build_directory, | |
38 | self.build_directory) |
|
38 | self.build_directory) | |
39 |
|
39 | |||
40 | def _makedir(self, path): |
|
40 | def _makedir(self, path): | |
41 | """Make a directory if it doesn't already exist""" |
|
41 | """Make a directory if it doesn't already exist""" | |
42 | if path: |
|
42 | if path: | |
43 | self.log.info("Making directory %s", path) |
|
43 | self.log.info("Making directory %s", path) | |
44 | ensure_dir_exists(path) |
|
44 | ensure_dir_exists(path) | |
45 |
|
45 | |||
46 | def write(self, output, resources, notebook_name=None, **kw): |
|
46 | def write(self, output, resources, notebook_name=None, **kw): | |
47 | """ |
|
47 | """ | |
48 | Consume and write Jinja output to the file system. Output directory |
|
48 | Consume and write Jinja output to the file system. Output directory | |
49 | is set via the 'build_directory' variable of this instance (a |
|
49 | is set via the 'build_directory' variable of this instance (a | |
50 | configurable). |
|
50 | configurable). | |
51 |
|
51 | |||
52 | See base for more... |
|
52 | See base for more... | |
53 | """ |
|
53 | """ | |
54 |
|
54 | |||
55 | # Verify that a notebook name is provided. |
|
55 | # Verify that a notebook name is provided. | |
56 | if notebook_name is None: |
|
56 | if notebook_name is None: | |
57 | raise TypeError('notebook_name') |
|
57 | raise TypeError('notebook_name') | |
58 |
|
58 | |||
59 | # Pull the extension and subdir from the resources dict. |
|
59 | # Pull the extension and subdir from the resources dict. | |
60 | output_extension = resources.get('output_extension', None) |
|
60 | output_extension = resources.get('output_extension', None) | |
61 |
|
61 | |||
62 | # Write all of the extracted resources to the destination directory. |
|
62 | # Write all of the extracted resources to the destination directory. | |
63 | # NOTE: WE WRITE EVERYTHING AS-IF IT'S BINARY. THE EXTRACT FIG |
|
63 | # NOTE: WE WRITE EVERYTHING AS-IF IT'S BINARY. THE EXTRACT FIG | |
64 | # PREPROCESSOR SHOULD HANDLE UNIX/WINDOWS LINE ENDINGS... |
|
64 | # PREPROCESSOR SHOULD HANDLE UNIX/WINDOWS LINE ENDINGS... | |
65 | for filename, data in resources.get('outputs', {}).items(): |
|
65 | for filename, data in resources.get('outputs', {}).items(): | |
66 |
|
66 | |||
67 | # Determine where to write the file to |
|
67 | # Determine where to write the file to | |
68 | dest = os.path.join(self.build_directory, filename) |
|
68 | dest = os.path.join(self.build_directory, filename) | |
69 | path = os.path.dirname(dest) |
|
69 | path = os.path.dirname(dest) | |
70 | self._makedir(path) |
|
70 | self._makedir(path) | |
71 |
|
71 | |||
72 | # Write file |
|
72 | # Write file | |
73 | self.log.debug("Writing %i bytes to support file %s", len(data), dest) |
|
73 | self.log.debug("Writing %i bytes to support file %s", len(data), dest) | |
74 | with io.open(dest, 'wb') as f: |
|
74 | with io.open(dest, 'wb') as f: | |
75 | f.write(data) |
|
75 | f.write(data) | |
76 |
|
76 | |||
77 | # Copy referenced files to output directory |
|
77 | # Copy referenced files to output directory | |
78 | if self.build_directory: |
|
78 | if self.build_directory: | |
79 | for filename in self.files: |
|
79 | for filename in self.files: | |
80 |
|
80 | |||
81 | # Copy files that match search pattern |
|
81 | # Copy files that match search pattern | |
82 | for matching_filename in glob.glob(filename): |
|
82 | for matching_filename in glob.glob(filename): | |
83 |
|
83 | |||
84 | # Make sure folder exists. |
|
84 | # Make sure folder exists. | |
85 | dest = os.path.join(self.build_directory, matching_filename) |
|
85 | dest = os.path.join(self.build_directory, matching_filename) | |
86 | path = os.path.dirname(dest) |
|
86 | path = os.path.dirname(dest) | |
87 | self._makedir(path) |
|
87 | self._makedir(path) | |
88 |
|
88 | |||
89 | # Copy if destination is different. |
|
89 | # Copy if destination is different. | |
90 | if not os.path.normpath(dest) == os.path.normpath(matching_filename): |
|
90 | if not os.path.normpath(dest) == os.path.normpath(matching_filename): | |
91 | self.log.info("Linking %s -> %s", matching_filename, dest) |
|
91 | self.log.info("Linking %s -> %s", matching_filename, dest) | |
92 | link_or_copy(matching_filename, dest) |
|
92 | link_or_copy(matching_filename, dest) | |
93 |
|
93 | |||
94 | # Determine where to write conversion results. |
|
94 | # Determine where to write conversion results. | |
95 | if output_extension is not None: |
|
95 | if output_extension is not None: | |
96 |
dest = notebook_name + |
|
96 | dest = notebook_name + output_extension | |
97 | else: |
|
97 | else: | |
98 | dest = notebook_name |
|
98 | dest = notebook_name | |
99 | if self.build_directory: |
|
99 | if self.build_directory: | |
100 | dest = os.path.join(self.build_directory, dest) |
|
100 | dest = os.path.join(self.build_directory, dest) | |
101 |
|
101 | |||
102 | # Write conversion results. |
|
102 | # Write conversion results. | |
103 | self.log.info("Writing %i bytes to %s", len(output), dest) |
|
103 | self.log.info("Writing %i bytes to %s", len(output), dest) | |
104 | if isinstance(output, unicode_type): |
|
104 | if isinstance(output, unicode_type): | |
105 | with io.open(dest, 'w', encoding='utf-8') as f: |
|
105 | with io.open(dest, 'w', encoding='utf-8') as f: | |
106 | f.write(output) |
|
106 | f.write(output) | |
107 | else: |
|
107 | else: | |
108 | with io.open(dest, 'wb') as f: |
|
108 | with io.open(dest, 'wb') as f: | |
109 | f.write(output) |
|
109 | f.write(output) | |
110 |
|
110 | |||
111 | return dest |
|
111 | return dest |
@@ -1,203 +1,203 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Module with tests for files |
|
2 | Module with tests for files | |
3 | """ |
|
3 | """ | |
4 |
|
4 | |||
5 | #----------------------------------------------------------------------------- |
|
5 | #----------------------------------------------------------------------------- | |
6 | # Copyright (c) 2013, the IPython Development Team. |
|
6 | # Copyright (c) 2013, the IPython Development Team. | |
7 | # |
|
7 | # | |
8 | # Distributed under the terms of the Modified BSD License. |
|
8 | # Distributed under the terms of the Modified BSD License. | |
9 | # |
|
9 | # | |
10 | # The full license is in the file COPYING.txt, distributed with this software. |
|
10 | # The full license is in the file COPYING.txt, distributed with this software. | |
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 |
|
12 | |||
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 | # Imports |
|
14 | # Imports | |
15 | #----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
16 |
|
16 | |||
17 | import sys |
|
17 | import sys | |
18 | import os |
|
18 | import os | |
19 |
|
19 | |||
20 | from ...tests.base import TestsBase |
|
20 | from ...tests.base import TestsBase | |
21 | from ..files import FilesWriter |
|
21 | from ..files import FilesWriter | |
22 | from IPython.utils.py3compat import PY3 |
|
22 | from IPython.utils.py3compat import PY3 | |
23 |
|
23 | |||
24 | if PY3: |
|
24 | if PY3: | |
25 | from io import StringIO |
|
25 | from io import StringIO | |
26 | else: |
|
26 | else: | |
27 | from StringIO import StringIO |
|
27 | from StringIO import StringIO | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | #----------------------------------------------------------------------------- |
|
30 | #----------------------------------------------------------------------------- | |
31 | # Class |
|
31 | # Class | |
32 | #----------------------------------------------------------------------------- |
|
32 | #----------------------------------------------------------------------------- | |
33 |
|
33 | |||
34 | class Testfiles(TestsBase): |
|
34 | class Testfiles(TestsBase): | |
35 | """Contains test functions for files.py""" |
|
35 | """Contains test functions for files.py""" | |
36 |
|
36 | |||
37 | def test_basic_output(self): |
|
37 | def test_basic_output(self): | |
38 | """Is FilesWriter basic output correct?""" |
|
38 | """Is FilesWriter basic output correct?""" | |
39 |
|
39 | |||
40 | # Work in a temporary directory. |
|
40 | # Work in a temporary directory. | |
41 | with self.create_temp_cwd(): |
|
41 | with self.create_temp_cwd(): | |
42 |
|
42 | |||
43 | # Create the resoruces dictionary |
|
43 | # Create the resoruces dictionary | |
44 | res = {} |
|
44 | res = {} | |
45 |
|
45 | |||
46 | # Create files writer, test output |
|
46 | # Create files writer, test output | |
47 | writer = FilesWriter() |
|
47 | writer = FilesWriter() | |
48 | writer.write(u'y', res, notebook_name="z") |
|
48 | writer.write(u'y', res, notebook_name="z") | |
49 |
|
49 | |||
50 | # Check the output of the file |
|
50 | # Check the output of the file | |
51 | with open('z', 'r') as f: |
|
51 | with open('z', 'r') as f: | |
52 | output = f.read() |
|
52 | output = f.read() | |
53 | self.assertEqual(output, u'y') |
|
53 | self.assertEqual(output, u'y') | |
54 |
|
54 | |||
55 | def test_ext(self): |
|
55 | def test_ext(self): | |
56 | """Does the FilesWriter add the correct extension to the output?""" |
|
56 | """Does the FilesWriter add the correct extension to the output?""" | |
57 |
|
57 | |||
58 | # Work in a temporary directory. |
|
58 | # Work in a temporary directory. | |
59 | with self.create_temp_cwd(): |
|
59 | with self.create_temp_cwd(): | |
60 |
|
60 | |||
61 | # Create the resoruces dictionary |
|
61 | # Create the resoruces dictionary | |
62 | res = {'output_extension': 'txt'} |
|
62 | res = {'output_extension': '.txt'} | |
63 |
|
63 | |||
64 | # Create files writer, test output |
|
64 | # Create files writer, test output | |
65 | writer = FilesWriter() |
|
65 | writer = FilesWriter() | |
66 | writer.write(u'y', res, notebook_name="z") |
|
66 | writer.write(u'y', res, notebook_name="z") | |
67 |
|
67 | |||
68 | # Check the output of the file |
|
68 | # Check the output of the file | |
69 | assert os.path.isfile('z.txt') |
|
69 | assert os.path.isfile('z.txt') | |
70 | with open('z.txt', 'r') as f: |
|
70 | with open('z.txt', 'r') as f: | |
71 | output = f.read() |
|
71 | output = f.read() | |
72 | self.assertEqual(output, u'y') |
|
72 | self.assertEqual(output, u'y') | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | def test_extract(self): |
|
75 | def test_extract(self): | |
76 | """Can FilesWriter write extracted figures correctly?""" |
|
76 | """Can FilesWriter write extracted figures correctly?""" | |
77 |
|
77 | |||
78 | # Work in a temporary directory. |
|
78 | # Work in a temporary directory. | |
79 | with self.create_temp_cwd(): |
|
79 | with self.create_temp_cwd(): | |
80 |
|
80 | |||
81 | # Create the resoruces dictionary |
|
81 | # Create the resoruces dictionary | |
82 | res = {'outputs': {os.path.join('z_files', 'a'): b'b'}} |
|
82 | res = {'outputs': {os.path.join('z_files', 'a'): b'b'}} | |
83 |
|
83 | |||
84 | # Create files writer, test output |
|
84 | # Create files writer, test output | |
85 | writer = FilesWriter() |
|
85 | writer = FilesWriter() | |
86 | writer.write(u'y', res, notebook_name="z") |
|
86 | writer.write(u'y', res, notebook_name="z") | |
87 |
|
87 | |||
88 | # Check the output of the file |
|
88 | # Check the output of the file | |
89 | with open('z', 'r') as f: |
|
89 | with open('z', 'r') as f: | |
90 | output = f.read() |
|
90 | output = f.read() | |
91 | self.assertEqual(output, u'y') |
|
91 | self.assertEqual(output, u'y') | |
92 |
|
92 | |||
93 | # Check the output of the extracted file |
|
93 | # Check the output of the extracted file | |
94 | extracted_file_dest = os.path.join('z_files', 'a') |
|
94 | extracted_file_dest = os.path.join('z_files', 'a') | |
95 | assert os.path.isfile(extracted_file_dest) |
|
95 | assert os.path.isfile(extracted_file_dest) | |
96 | with open(extracted_file_dest, 'r') as f: |
|
96 | with open(extracted_file_dest, 'r') as f: | |
97 | output = f.read() |
|
97 | output = f.read() | |
98 | self.assertEqual(output, 'b') |
|
98 | self.assertEqual(output, 'b') | |
99 |
|
99 | |||
100 |
|
100 | |||
101 | def test_builddir(self): |
|
101 | def test_builddir(self): | |
102 | """Can FilesWriter write to a build dir correctly?""" |
|
102 | """Can FilesWriter write to a build dir correctly?""" | |
103 |
|
103 | |||
104 | # Work in a temporary directory. |
|
104 | # Work in a temporary directory. | |
105 | with self.create_temp_cwd(): |
|
105 | with self.create_temp_cwd(): | |
106 |
|
106 | |||
107 | # Create the resoruces dictionary |
|
107 | # Create the resoruces dictionary | |
108 | res = {'outputs': {os.path.join('z_files', 'a'): b'b'}} |
|
108 | res = {'outputs': {os.path.join('z_files', 'a'): b'b'}} | |
109 |
|
109 | |||
110 | # Create files writer, test output |
|
110 | # Create files writer, test output | |
111 | writer = FilesWriter() |
|
111 | writer = FilesWriter() | |
112 | writer.build_directory = u'build' |
|
112 | writer.build_directory = u'build' | |
113 | writer.write(u'y', res, notebook_name="z") |
|
113 | writer.write(u'y', res, notebook_name="z") | |
114 |
|
114 | |||
115 | # Check the output of the file |
|
115 | # Check the output of the file | |
116 | assert os.path.isdir(writer.build_directory) |
|
116 | assert os.path.isdir(writer.build_directory) | |
117 | dest = os.path.join(writer.build_directory, 'z') |
|
117 | dest = os.path.join(writer.build_directory, 'z') | |
118 | with open(dest, 'r') as f: |
|
118 | with open(dest, 'r') as f: | |
119 | output = f.read() |
|
119 | output = f.read() | |
120 | self.assertEqual(output, u'y') |
|
120 | self.assertEqual(output, u'y') | |
121 |
|
121 | |||
122 | # Check the output of the extracted file |
|
122 | # Check the output of the extracted file | |
123 | extracted_file_dest = os.path.join(writer.build_directory, 'z_files', 'a') |
|
123 | extracted_file_dest = os.path.join(writer.build_directory, 'z_files', 'a') | |
124 | assert os.path.isfile(extracted_file_dest) |
|
124 | assert os.path.isfile(extracted_file_dest) | |
125 | with open(extracted_file_dest, 'r') as f: |
|
125 | with open(extracted_file_dest, 'r') as f: | |
126 | output = f.read() |
|
126 | output = f.read() | |
127 | self.assertEqual(output, 'b') |
|
127 | self.assertEqual(output, 'b') | |
128 |
|
128 | |||
129 |
|
129 | |||
130 | def test_links(self): |
|
130 | def test_links(self): | |
131 | """Can the FilesWriter handle linked files correctly?""" |
|
131 | """Can the FilesWriter handle linked files correctly?""" | |
132 |
|
132 | |||
133 | # Work in a temporary directory. |
|
133 | # Work in a temporary directory. | |
134 | with self.create_temp_cwd(): |
|
134 | with self.create_temp_cwd(): | |
135 |
|
135 | |||
136 | # Create test file |
|
136 | # Create test file | |
137 | os.mkdir('sub') |
|
137 | os.mkdir('sub') | |
138 | with open(os.path.join('sub', 'c'), 'w') as f: |
|
138 | with open(os.path.join('sub', 'c'), 'w') as f: | |
139 | f.write('d') |
|
139 | f.write('d') | |
140 |
|
140 | |||
141 | # Create the resoruces dictionary |
|
141 | # Create the resoruces dictionary | |
142 | res = {} |
|
142 | res = {} | |
143 |
|
143 | |||
144 | # Create files writer, test output |
|
144 | # Create files writer, test output | |
145 | writer = FilesWriter() |
|
145 | writer = FilesWriter() | |
146 | writer.files = [os.path.join('sub', 'c')] |
|
146 | writer.files = [os.path.join('sub', 'c')] | |
147 | writer.build_directory = u'build' |
|
147 | writer.build_directory = u'build' | |
148 | writer.write(u'y', res, notebook_name="z") |
|
148 | writer.write(u'y', res, notebook_name="z") | |
149 |
|
149 | |||
150 | # Check the output of the file |
|
150 | # Check the output of the file | |
151 | assert os.path.isdir(writer.build_directory) |
|
151 | assert os.path.isdir(writer.build_directory) | |
152 | dest = os.path.join(writer.build_directory, 'z') |
|
152 | dest = os.path.join(writer.build_directory, 'z') | |
153 | with open(dest, 'r') as f: |
|
153 | with open(dest, 'r') as f: | |
154 | output = f.read() |
|
154 | output = f.read() | |
155 | self.assertEqual(output, u'y') |
|
155 | self.assertEqual(output, u'y') | |
156 |
|
156 | |||
157 | # Check to make sure the linked file was copied |
|
157 | # Check to make sure the linked file was copied | |
158 | path = os.path.join(writer.build_directory, 'sub') |
|
158 | path = os.path.join(writer.build_directory, 'sub') | |
159 | assert os.path.isdir(path) |
|
159 | assert os.path.isdir(path) | |
160 | dest = os.path.join(path, 'c') |
|
160 | dest = os.path.join(path, 'c') | |
161 | assert os.path.isfile(dest) |
|
161 | assert os.path.isfile(dest) | |
162 | with open(dest, 'r') as f: |
|
162 | with open(dest, 'r') as f: | |
163 | output = f.read() |
|
163 | output = f.read() | |
164 | self.assertEqual(output, 'd') |
|
164 | self.assertEqual(output, 'd') | |
165 |
|
165 | |||
166 | def test_glob(self): |
|
166 | def test_glob(self): | |
167 | """Can the FilesWriter handle globbed files correctly?""" |
|
167 | """Can the FilesWriter handle globbed files correctly?""" | |
168 |
|
168 | |||
169 | # Work in a temporary directory. |
|
169 | # Work in a temporary directory. | |
170 | with self.create_temp_cwd(): |
|
170 | with self.create_temp_cwd(): | |
171 |
|
171 | |||
172 | # Create test files |
|
172 | # Create test files | |
173 | os.mkdir('sub') |
|
173 | os.mkdir('sub') | |
174 | with open(os.path.join('sub', 'c'), 'w') as f: |
|
174 | with open(os.path.join('sub', 'c'), 'w') as f: | |
175 | f.write('e') |
|
175 | f.write('e') | |
176 | with open(os.path.join('sub', 'd'), 'w') as f: |
|
176 | with open(os.path.join('sub', 'd'), 'w') as f: | |
177 | f.write('e') |
|
177 | f.write('e') | |
178 |
|
178 | |||
179 | # Create the resoruces dictionary |
|
179 | # Create the resoruces dictionary | |
180 | res = {} |
|
180 | res = {} | |
181 |
|
181 | |||
182 | # Create files writer, test output |
|
182 | # Create files writer, test output | |
183 | writer = FilesWriter() |
|
183 | writer = FilesWriter() | |
184 | writer.files = ['sub/*'] |
|
184 | writer.files = ['sub/*'] | |
185 | writer.build_directory = u'build' |
|
185 | writer.build_directory = u'build' | |
186 | writer.write(u'y', res, notebook_name="z") |
|
186 | writer.write(u'y', res, notebook_name="z") | |
187 |
|
187 | |||
188 | # Check the output of the file |
|
188 | # Check the output of the file | |
189 | assert os.path.isdir(writer.build_directory) |
|
189 | assert os.path.isdir(writer.build_directory) | |
190 | dest = os.path.join(writer.build_directory, 'z') |
|
190 | dest = os.path.join(writer.build_directory, 'z') | |
191 | with open(dest, 'r') as f: |
|
191 | with open(dest, 'r') as f: | |
192 | output = f.read() |
|
192 | output = f.read() | |
193 | self.assertEqual(output, u'y') |
|
193 | self.assertEqual(output, u'y') | |
194 |
|
194 | |||
195 | # Check to make sure the globbed files were copied |
|
195 | # Check to make sure the globbed files were copied | |
196 | path = os.path.join(writer.build_directory, 'sub') |
|
196 | path = os.path.join(writer.build_directory, 'sub') | |
197 | assert os.path.isdir(path) |
|
197 | assert os.path.isdir(path) | |
198 | for filename in ['c', 'd']: |
|
198 | for filename in ['c', 'd']: | |
199 | dest = os.path.join(path, filename) |
|
199 | dest = os.path.join(path, filename) | |
200 | assert os.path.isfile(dest) |
|
200 | assert os.path.isfile(dest) | |
201 | with open(dest, 'r') as f: |
|
201 | with open(dest, 'r') as f: | |
202 | output = f.read() |
|
202 | output = f.read() | |
203 | self.assertEqual(output, 'e') |
|
203 | self.assertEqual(output, 'e') |
General Comments 0
You need to be logged in to leave comments.
Login now