Show More
@@ -1,38 +1,38 b'' | |||||
1 | """The basic dict based notebook format. |
|
1 | """The basic dict based notebook format. | |
2 |
|
2 | |||
3 | The Python representation of a notebook is a nested structure of |
|
3 | The Python representation of a notebook is a nested structure of | |
4 | dictionary subclasses that support attribute access |
|
4 | dictionary subclasses that support attribute access | |
5 | (IPython.utils.ipstruct.Struct). The functions in this module are merely |
|
5 | (IPython.utils.ipstruct.Struct). The functions in this module are merely | |
6 | helpers to build the structs in the right form. |
|
6 | helpers to build the structs in the right form. | |
7 | """ |
|
7 | """ | |
8 |
|
8 | |||
9 | # Copyright (c) IPython Development Team. |
|
9 | # Copyright (c) IPython Development Team. | |
10 | # Distributed under the terms of the Modified BSD License. |
|
10 | # Distributed under the terms of the Modified BSD License. | |
11 |
|
11 | |||
12 | import pprint |
|
12 | import pprint | |
13 | import uuid |
|
13 | import uuid | |
14 |
|
14 | |||
15 | from IPython.utils.ipstruct import Struct |
|
15 | from IPython.utils.ipstruct import Struct | |
16 | from IPython.utils.py3compat import cast_unicode, unicode_type |
|
16 | from IPython.utils.py3compat import cast_unicode, unicode_type | |
17 |
|
17 | |||
18 |
|
18 | |||
19 | # Change this when incrementing the nbformat version |
|
19 | # Change this when incrementing the nbformat version | |
20 | nbformat = 4 |
|
20 | nbformat = 4 | |
21 | nbformat_minor = 0 |
|
21 | nbformat_minor = 0 | |
22 |
nbformat_schema = ' |
|
22 | nbformat_schema = 'nbformat.v4.schema.json' | |
23 |
|
23 | |||
24 | class NotebookNode(Struct): |
|
24 | class NotebookNode(Struct): | |
25 | pass |
|
25 | pass | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | def from_dict(d): |
|
28 | def from_dict(d): | |
29 | if isinstance(d, dict): |
|
29 | if isinstance(d, dict): | |
30 | newd = NotebookNode() |
|
30 | newd = NotebookNode() | |
31 | for k,v in d.items(): |
|
31 | for k,v in d.items(): | |
32 | newd[k] = from_dict(v) |
|
32 | newd[k] = from_dict(v) | |
33 | return newd |
|
33 | return newd | |
34 | elif isinstance(d, (tuple, list)): |
|
34 | elif isinstance(d, (tuple, list)): | |
35 | return [from_dict(i) for i in d] |
|
35 | return [from_dict(i) for i in d] | |
36 | else: |
|
36 | else: | |
37 | return d |
|
37 | return d | |
38 |
|
38 |
@@ -1,347 +1,346 b'' | |||||
1 | { |
|
1 | { | |
2 | "$schema": "http://json-schema.org/draft-04/schema#", |
|
2 | "$schema": "http://json-schema.org/draft-04/schema#", | |
3 | "description": "IPython Notebook v4.0 JSON schema.", |
|
3 | "description": "IPython Notebook v4.0 JSON schema.", | |
4 | "type": "object", |
|
4 | "type": "object", | |
5 | "additionalProperties": false, |
|
5 | "additionalProperties": false, | |
6 | "required": ["metadata", "nbformat_minor", "nbformat", "cells"], |
|
6 | "required": ["metadata", "nbformat_minor", "nbformat", "cells"], | |
7 | "properties": { |
|
7 | "properties": { | |
8 | "metadata": { |
|
8 | "metadata": { | |
9 | "description": "Notebook root-level metadata.", |
|
9 | "description": "Notebook root-level metadata.", | |
10 | "type": "object", |
|
10 | "type": "object", | |
11 | "additionalProperties": true, |
|
11 | "additionalProperties": true, | |
12 | "properties": { |
|
12 | "properties": { | |
13 | "kernel_info": { |
|
13 | "kernel_info": { | |
14 | "description": "Kernel information.", |
|
14 | "description": "Kernel information.", | |
15 | "type": "object", |
|
15 | "type": "object", | |
16 | "required": ["name", "language"], |
|
16 | "required": ["name", "language"], | |
17 | "properties": { |
|
17 | "properties": { | |
18 | "name": { |
|
18 | "name": { | |
19 | "description": "Name of the kernel specification.", |
|
19 | "description": "Name of the kernel specification.", | |
20 | "type": "string" |
|
20 | "type": "string" | |
21 | }, |
|
21 | }, | |
22 | "language": { |
|
22 | "language": { | |
23 | "description": "The programming language which this kernel runs.", |
|
23 | "description": "The programming language which this kernel runs.", | |
24 | "type": "string" |
|
24 | "type": "string" | |
25 | }, |
|
25 | }, | |
26 | "codemirror_mode": { |
|
26 | "codemirror_mode": { | |
27 | "description": "The codemirror mode to use for code in this language.", |
|
27 | "description": "The codemirror mode to use for code in this language.", | |
28 | "type": "string" |
|
28 | "type": "string" | |
29 | } |
|
29 | } | |
30 | } |
|
30 | } | |
31 | }, |
|
31 | }, | |
32 | "signature": { |
|
32 | "signature": { | |
33 | "description": "Hash of the notebook.", |
|
33 | "description": "Hash of the notebook.", | |
34 | "type": "string" |
|
34 | "type": "string" | |
35 | }, |
|
35 | }, | |
36 | "orig_nbformat": { |
|
36 | "orig_nbformat": { | |
37 | "description": "Original notebook format (major number) before converting the notebook between versions.", |
|
37 | "description": "Original notebook format (major number) before converting the notebook between versions.", | |
38 | "type": "integer", |
|
38 | "type": "integer", | |
39 | "minimum": 1 |
|
39 | "minimum": 1 | |
40 | } |
|
40 | } | |
41 | } |
|
41 | } | |
42 | }, |
|
42 | }, | |
43 | "nbformat_minor": { |
|
43 | "nbformat_minor": { | |
44 | "description": "Notebook format (minor number). Incremented for backward compatible changes to the notebook format.", |
|
44 | "description": "Notebook format (minor number). Incremented for backward compatible changes to the notebook format.", | |
45 | "type": "integer", |
|
45 | "type": "integer", | |
46 | "minimum": 0 |
|
46 | "minimum": 0 | |
47 | }, |
|
47 | }, | |
48 | "nbformat": { |
|
48 | "nbformat": { | |
49 | "description": "Notebook format (major number). Incremented between backwards incompatible changes to the notebook format.", |
|
49 | "description": "Notebook format (major number). Incremented between backwards incompatible changes to the notebook format.", | |
50 | "type": "integer", |
|
50 | "type": "integer", | |
51 | "minimum": 4, |
|
51 | "minimum": 4, | |
52 | "maximum": 4 |
|
52 | "maximum": 4 | |
53 | }, |
|
53 | }, | |
54 | "cells": { |
|
54 | "cells": { | |
55 | "description": "Array of cells of the current notebook.", |
|
55 | "description": "Array of cells of the current notebook.", | |
56 | "type": "array", |
|
56 | "type": "array", | |
57 | "items": { |
|
57 | "items": { | |
58 | "type": "object", |
|
58 | "type": "object", | |
59 | "oneOf": [ |
|
59 | "oneOf": [ | |
60 | {"$ref": "#/definitions/raw_cell"}, |
|
60 | {"$ref": "#/definitions/raw_cell"}, | |
61 | {"$ref": "#/definitions/markdown_cell"}, |
|
61 | {"$ref": "#/definitions/markdown_cell"}, | |
62 | {"$ref": "#/definitions/heading_cell"}, |
|
62 | {"$ref": "#/definitions/heading_cell"}, | |
63 | {"$ref": "#/definitions/code_cell"} |
|
63 | {"$ref": "#/definitions/code_cell"} | |
64 | ] |
|
64 | ] | |
65 | } |
|
65 | } | |
66 | } |
|
66 | } | |
67 | }, |
|
67 | }, | |
68 |
|
68 | |||
69 | "definitions": { |
|
69 | "definitions": { | |
70 |
|
70 | |||
71 | "raw_cell": { |
|
71 | "raw_cell": { | |
72 | "description": "Notebook raw nbconvert cell.", |
|
72 | "description": "Notebook raw nbconvert cell.", | |
73 | "type": "object", |
|
73 | "type": "object", | |
74 | "additionalProperties": false, |
|
74 | "additionalProperties": false, | |
75 | "required": ["cell_type", "metadata", "source"], |
|
75 | "required": ["cell_type", "metadata", "source"], | |
76 | "properties": { |
|
76 | "properties": { | |
77 | "cell_type": { |
|
77 | "cell_type": { | |
78 | "description": "String identifying the type of cell.", |
|
78 | "description": "String identifying the type of cell.", | |
79 | "enum": ["raw"] |
|
79 | "enum": ["raw"] | |
80 | }, |
|
80 | }, | |
81 | "metadata": { |
|
81 | "metadata": { | |
82 | "description": "Cell-level metadata.", |
|
82 | "description": "Cell-level metadata.", | |
83 | "type": "object", |
|
83 | "type": "object", | |
84 | "additionalProperties": true, |
|
84 | "additionalProperties": true, | |
85 | "properties": { |
|
85 | "properties": { | |
86 | "format": { |
|
86 | "format": { | |
87 | "description": "Raw cell metadata format for nbconvert.", |
|
87 | "description": "Raw cell metadata format for nbconvert.", | |
88 | "type": "string" |
|
88 | "type": "string" | |
89 | }, |
|
89 | }, | |
90 | "name": {"$ref": "#/definitions/misc/metadata_name"}, |
|
90 | "name": {"$ref": "#/definitions/misc/metadata_name"}, | |
91 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} |
|
91 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} | |
92 | } |
|
92 | } | |
93 | }, |
|
93 | }, | |
94 | "source": {"$ref": "#/definitions/misc/source"} |
|
94 | "source": {"$ref": "#/definitions/misc/source"} | |
95 | } |
|
95 | } | |
96 | }, |
|
96 | }, | |
97 |
|
97 | |||
98 | "markdown_cell": { |
|
98 | "markdown_cell": { | |
99 | "description": "Notebook markdown cell.", |
|
99 | "description": "Notebook markdown cell.", | |
100 | "type": "object", |
|
100 | "type": "object", | |
101 | "additionalProperties": false, |
|
101 | "additionalProperties": false, | |
102 | "required": ["cell_type", "metadata", "source"], |
|
102 | "required": ["cell_type", "metadata", "source"], | |
103 | "properties": { |
|
103 | "properties": { | |
104 | "cell_type": { |
|
104 | "cell_type": { | |
105 | "description": "String identifying the type of cell.", |
|
105 | "description": "String identifying the type of cell.", | |
106 | "enum": ["markdown"] |
|
106 | "enum": ["markdown"] | |
107 | }, |
|
107 | }, | |
108 | "metadata": { |
|
108 | "metadata": { | |
109 | "description": "Cell-level metadata.", |
|
109 | "description": "Cell-level metadata.", | |
110 | "type": "object", |
|
110 | "type": "object", | |
111 | "properties": { |
|
111 | "properties": { | |
112 | "name": {"$ref": "#/definitions/misc/metadata_name"}, |
|
112 | "name": {"$ref": "#/definitions/misc/metadata_name"}, | |
113 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} |
|
113 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} | |
114 | }, |
|
114 | }, | |
115 | "additionalProperties": true |
|
115 | "additionalProperties": true | |
116 | }, |
|
116 | }, | |
117 | "source": {"$ref": "#/definitions/misc/source"} |
|
117 | "source": {"$ref": "#/definitions/misc/source"} | |
118 | } |
|
118 | } | |
119 | }, |
|
119 | }, | |
120 |
|
120 | |||
121 | "heading_cell": { |
|
121 | "heading_cell": { | |
122 | "description": "Notebook heading cell.", |
|
122 | "description": "Notebook heading cell.", | |
123 | "type": "object", |
|
123 | "type": "object", | |
124 | "additionalProperties": false, |
|
124 | "additionalProperties": false, | |
125 | "required": ["cell_type", "metadata", "source", "level"], |
|
125 | "required": ["cell_type", "metadata", "source", "level"], | |
126 | "properties": { |
|
126 | "properties": { | |
127 | "cell_type": { |
|
127 | "cell_type": { | |
128 | "description": "String identifying the type of cell.", |
|
128 | "description": "String identifying the type of cell.", | |
129 | "enum": ["heading"] |
|
129 | "enum": ["heading"] | |
130 | }, |
|
130 | }, | |
131 | "metadata": { |
|
131 | "metadata": { | |
132 | "description": "Cell-level metadata.", |
|
132 | "description": "Cell-level metadata.", | |
133 | "type": "object", |
|
133 | "type": "object", | |
134 | "properties": { |
|
134 | "properties": { | |
135 | "name": {"$ref": "#/definitions/misc/metadata_name"}, |
|
135 | "name": {"$ref": "#/definitions/misc/metadata_name"}, | |
136 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} |
|
136 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} | |
137 | }, |
|
137 | }, | |
138 | "additionalProperties": true |
|
138 | "additionalProperties": true | |
139 | }, |
|
139 | }, | |
140 | "source": {"$ref": "#/definitions/misc/source"}, |
|
140 | "source": {"$ref": "#/definitions/misc/source"}, | |
141 | "level": { |
|
141 | "level": { | |
142 | "description": "Level of heading cells.", |
|
142 | "description": "Level of heading cells.", | |
143 | "type": "integer", |
|
143 | "type": "integer", | |
144 |
"minimum": 1 |
|
144 | "minimum": 1 | |
145 | "maximum": 6 |
|
|||
146 | } |
|
145 | } | |
147 | } |
|
146 | } | |
148 | }, |
|
147 | }, | |
149 |
|
148 | |||
150 | "code_cell": { |
|
149 | "code_cell": { | |
151 | "description": "Notebook code cell.", |
|
150 | "description": "Notebook code cell.", | |
152 | "type": "object", |
|
151 | "type": "object", | |
153 | "additionalProperties": false, |
|
152 | "additionalProperties": false, | |
154 | "required": ["cell_type", "metadata", "source", "outputs", "prompt_number"], |
|
153 | "required": ["cell_type", "metadata", "source", "outputs", "prompt_number"], | |
155 | "properties": { |
|
154 | "properties": { | |
156 | "cell_type": { |
|
155 | "cell_type": { | |
157 | "description": "String identifying the type of cell.", |
|
156 | "description": "String identifying the type of cell.", | |
158 | "enum": ["code"] |
|
157 | "enum": ["code"] | |
159 | }, |
|
158 | }, | |
160 | "metadata": { |
|
159 | "metadata": { | |
161 | "description": "Cell-level metadata.", |
|
160 | "description": "Cell-level metadata.", | |
162 | "type": "object", |
|
161 | "type": "object", | |
163 | "additionalProperties": true, |
|
162 | "additionalProperties": true, | |
164 | "properties": { |
|
163 | "properties": { | |
165 | "collapsed": { |
|
164 | "collapsed": { | |
166 | "description": "Whether the cell is collapsed/expanded.", |
|
165 | "description": "Whether the cell is collapsed/expanded.", | |
167 | "type": "boolean" |
|
166 | "type": "boolean" | |
168 | }, |
|
167 | }, | |
169 | "autoscroll": { |
|
168 | "autoscroll": { | |
170 | "description": "Whether the cell's output is scrolled, unscrolled, or autoscrolled.", |
|
169 | "description": "Whether the cell's output is scrolled, unscrolled, or autoscrolled.", | |
171 | "enum": [true, false, "auto"] |
|
170 | "enum": [true, false, "auto"] | |
172 | }, |
|
171 | }, | |
173 | "name": {"$ref": "#/definitions/misc/metadata_name"}, |
|
172 | "name": {"$ref": "#/definitions/misc/metadata_name"}, | |
174 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} |
|
173 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} | |
175 | } |
|
174 | } | |
176 | }, |
|
175 | }, | |
177 | "source": {"$ref": "#/definitions/misc/source"}, |
|
176 | "source": {"$ref": "#/definitions/misc/source"}, | |
178 | "outputs": { |
|
177 | "outputs": { | |
179 | "description": "Execution, display, or stream outputs.", |
|
178 | "description": "Execution, display, or stream outputs.", | |
180 | "type": "array", |
|
179 | "type": "array", | |
181 | "items": {"$ref": "#/definitions/output"} |
|
180 | "items": {"$ref": "#/definitions/output"} | |
182 | }, |
|
181 | }, | |
183 | "prompt_number": { |
|
182 | "prompt_number": { | |
184 | "description": "The code cell's prompt number. Will be null if the cell has not been run.", |
|
183 | "description": "The code cell's prompt number. Will be null if the cell has not been run.", | |
185 | "type": ["integer", "null"], |
|
184 | "type": ["integer", "null"], | |
186 | "minimum": 0 |
|
185 | "minimum": 0 | |
187 | } |
|
186 | } | |
188 | } |
|
187 | } | |
189 | }, |
|
188 | }, | |
190 | "output": { |
|
189 | "output": { | |
191 | "type": "object", |
|
190 | "type": "object", | |
192 | "oneOf": [ |
|
191 | "oneOf": [ | |
193 | {"$ref": "#/definitions/execute_result"}, |
|
192 | {"$ref": "#/definitions/execute_result"}, | |
194 | {"$ref": "#/definitions/display_data"}, |
|
193 | {"$ref": "#/definitions/display_data"}, | |
195 | {"$ref": "#/definitions/stream"}, |
|
194 | {"$ref": "#/definitions/stream"}, | |
196 | {"$ref": "#/definitions/error"} |
|
195 | {"$ref": "#/definitions/error"} | |
197 | ] |
|
196 | ] | |
198 | }, |
|
197 | }, | |
199 | "execute_result": { |
|
198 | "execute_result": { | |
200 | "description": "Result of executing a code cell.", |
|
199 | "description": "Result of executing a code cell.", | |
201 | "type": "object", |
|
200 | "type": "object", | |
202 | "additionalProperties": false, |
|
201 | "additionalProperties": false, | |
203 | "required": ["output_type", "metadata", "prompt_number"], |
|
202 | "required": ["output_type", "metadata", "prompt_number"], | |
204 | "properties": { |
|
203 | "properties": { | |
205 | "output_type": { |
|
204 | "output_type": { | |
206 | "description": "Type of cell output.", |
|
205 | "description": "Type of cell output.", | |
207 | "enum": ["execute_result"] |
|
206 | "enum": ["execute_result"] | |
208 | }, |
|
207 | }, | |
209 | "prompt_number": { |
|
208 | "prompt_number": { | |
210 | "description": "A result's prompt number.", |
|
209 | "description": "A result's prompt number.", | |
211 | "type": ["integer"], |
|
210 | "type": ["integer"], | |
212 | "minimum": 0 |
|
211 | "minimum": 0 | |
213 | }, |
|
212 | }, | |
214 | "application/json": { |
|
213 | "application/json": { | |
215 | "type": "object" |
|
214 | "type": "object" | |
216 | }, |
|
215 | }, | |
217 | "metadata": {"$ref": "#/definitions/misc/output_metadata"} |
|
216 | "metadata": {"$ref": "#/definitions/misc/output_metadata"} | |
218 | }, |
|
217 | }, | |
219 | "patternProperties": { |
|
218 | "patternProperties": { | |
220 | "^(?!application/json$)[a-zA-Z0-9]+/[a-zA-Z0-9\\-\\+\\.]+$": { |
|
219 | "^(?!application/json$)[a-zA-Z0-9]+/[a-zA-Z0-9\\-\\+\\.]+$": { | |
221 | "description": "mimetype output (e.g. text/plain), represented as either an array of strings or a string.", |
|
220 | "description": "mimetype output (e.g. text/plain), represented as either an array of strings or a string.", | |
222 | "$ref": "#/definitions/misc/multiline_string" |
|
221 | "$ref": "#/definitions/misc/multiline_string" | |
223 | } |
|
222 | } | |
224 | } |
|
223 | } | |
225 | }, |
|
224 | }, | |
226 |
|
225 | |||
227 | "display_data": { |
|
226 | "display_data": { | |
228 | "description": "Data displayed as a result of code cell execution.", |
|
227 | "description": "Data displayed as a result of code cell execution.", | |
229 | "type": "object", |
|
228 | "type": "object", | |
230 | "additionalProperties": false, |
|
229 | "additionalProperties": false, | |
231 | "required": ["output_type", "metadata"], |
|
230 | "required": ["output_type", "metadata"], | |
232 | "properties": { |
|
231 | "properties": { | |
233 | "output_type": { |
|
232 | "output_type": { | |
234 | "description": "Type of cell output.", |
|
233 | "description": "Type of cell output.", | |
235 | "enum": ["display_data"] |
|
234 | "enum": ["display_data"] | |
236 | }, |
|
235 | }, | |
237 | "application/json": { |
|
236 | "application/json": { | |
238 | "type": "object" |
|
237 | "type": "object" | |
239 | }, |
|
238 | }, | |
240 | "metadata": {"$ref": "#/definitions/misc/output_metadata"} |
|
239 | "metadata": {"$ref": "#/definitions/misc/output_metadata"} | |
241 | }, |
|
240 | }, | |
242 | "patternProperties": { |
|
241 | "patternProperties": { | |
243 | "^(?!application/json$)[a-zA-Z0-9]+/[a-zA-Z0-9\\-\\+\\.]+$": { |
|
242 | "^(?!application/json$)[a-zA-Z0-9]+/[a-zA-Z0-9\\-\\+\\.]+$": { | |
244 | "description": "mimetype output (e.g. text/plain), represented as either an array of strings or a string.", |
|
243 | "description": "mimetype output (e.g. text/plain), represented as either an array of strings or a string.", | |
245 | "$ref": "#/definitions/misc/multiline_string" |
|
244 | "$ref": "#/definitions/misc/multiline_string" | |
246 | } |
|
245 | } | |
247 | } |
|
246 | } | |
248 | }, |
|
247 | }, | |
249 |
|
248 | |||
250 | "stream": { |
|
249 | "stream": { | |
251 | "description": "Stream output from a code cell.", |
|
250 | "description": "Stream output from a code cell.", | |
252 | "type": "object", |
|
251 | "type": "object", | |
253 | "additionalProperties": false, |
|
252 | "additionalProperties": false, | |
254 | "required": ["output_type", "metadata", "stream", "text"], |
|
253 | "required": ["output_type", "metadata", "stream", "text"], | |
255 | "properties": { |
|
254 | "properties": { | |
256 | "output_type": { |
|
255 | "output_type": { | |
257 | "description": "Type of cell output.", |
|
256 | "description": "Type of cell output.", | |
258 | "enum": ["stream"] |
|
257 | "enum": ["stream"] | |
259 | }, |
|
258 | }, | |
260 | "metadata": {"$ref": "#/definitions/misc/output_metadata"}, |
|
259 | "metadata": {"$ref": "#/definitions/misc/output_metadata"}, | |
261 | "stream": { |
|
260 | "stream": { | |
262 | "description": "The stream type/destination.", |
|
261 | "description": "The stream type/destination.", | |
263 | "type": "string" |
|
262 | "type": "string" | |
264 | }, |
|
263 | }, | |
265 | "text": { |
|
264 | "text": { | |
266 | "description": "The stream's text output, represented as an array of strings.", |
|
265 | "description": "The stream's text output, represented as an array of strings.", | |
267 | "$ref": "#/definitions/misc/multiline_string" |
|
266 | "$ref": "#/definitions/misc/multiline_string" | |
268 | } |
|
267 | } | |
269 | } |
|
268 | } | |
270 | }, |
|
269 | }, | |
271 |
|
270 | |||
272 | "error": { |
|
271 | "error": { | |
273 | "description": "Output of an error that occurred during code cell execution.", |
|
272 | "description": "Output of an error that occurred during code cell execution.", | |
274 | "type": "object", |
|
273 | "type": "object", | |
275 | "additionalProperties": false, |
|
274 | "additionalProperties": false, | |
276 | "required": ["output_type", "metadata", "ename", "evalue", "traceback"], |
|
275 | "required": ["output_type", "metadata", "ename", "evalue", "traceback"], | |
277 | "properties": { |
|
276 | "properties": { | |
278 | "output_type": { |
|
277 | "output_type": { | |
279 | "description": "Type of cell output.", |
|
278 | "description": "Type of cell output.", | |
280 | "enum": ["error"] |
|
279 | "enum": ["error"] | |
281 | }, |
|
280 | }, | |
282 | "metadata": {"$ref": "#/definitions/misc/output_metadata"}, |
|
281 | "metadata": {"$ref": "#/definitions/misc/output_metadata"}, | |
283 | "ename": { |
|
282 | "ename": { | |
284 | "description": "The name of the error.", |
|
283 | "description": "The name of the error.", | |
285 | "type": "string" |
|
284 | "type": "string" | |
286 | }, |
|
285 | }, | |
287 | "evalue": { |
|
286 | "evalue": { | |
288 | "description": "The value, or message, of the error.", |
|
287 | "description": "The value, or message, of the error.", | |
289 | "type": "string" |
|
288 | "type": "string" | |
290 | }, |
|
289 | }, | |
291 | "traceback": { |
|
290 | "traceback": { | |
292 | "description": "The error's traceback, represented as an array of strings.", |
|
291 | "description": "The error's traceback, represented as an array of strings.", | |
293 | "type": "array", |
|
292 | "type": "array", | |
294 | "items": {"type": "string"} |
|
293 | "items": {"type": "string"} | |
295 | } |
|
294 | } | |
296 | } |
|
295 | } | |
297 | }, |
|
296 | }, | |
298 |
|
297 | |||
299 | "misc": { |
|
298 | "misc": { | |
300 | "metadata_name": { |
|
299 | "metadata_name": { | |
301 | "description": "The cell's name. If present, must be a non-empty string.", |
|
300 | "description": "The cell's name. If present, must be a non-empty string.", | |
302 | "type": "string", |
|
301 | "type": "string", | |
303 | "pattern": "^.+$" |
|
302 | "pattern": "^.+$" | |
304 | }, |
|
303 | }, | |
305 | "metadata_tags": { |
|
304 | "metadata_tags": { | |
306 | "description": "The cell's tags. Tags must be unique, and must not contain commas.", |
|
305 | "description": "The cell's tags. Tags must be unique, and must not contain commas.", | |
307 | "type": "array", |
|
306 | "type": "array", | |
308 | "uniqueItems": true, |
|
307 | "uniqueItems": true, | |
309 | "items": { |
|
308 | "items": { | |
310 | "type": "string", |
|
309 | "type": "string", | |
311 | "pattern": "^[^,]+$" |
|
310 | "pattern": "^[^,]+$" | |
312 | } |
|
311 | } | |
313 | }, |
|
312 | }, | |
314 | "source": { |
|
313 | "source": { | |
315 | "description": "Contents of the cell, represented as an array of lines.", |
|
314 | "description": "Contents of the cell, represented as an array of lines.", | |
316 | "$ref": "#/definitions/misc/multiline_string" |
|
315 | "$ref": "#/definitions/misc/multiline_string" | |
317 | }, |
|
316 | }, | |
318 | "prompt_number": { |
|
317 | "prompt_number": { | |
319 | "description": "The code cell's prompt number. Will be null if the cell has not been run.", |
|
318 | "description": "The code cell's prompt number. Will be null if the cell has not been run.", | |
320 | "type": ["integer", "null"], |
|
319 | "type": ["integer", "null"], | |
321 | "minimum": 0 |
|
320 | "minimum": 0 | |
322 | }, |
|
321 | }, | |
323 | "mimetype": { |
|
322 | "mimetype": { | |
324 | "patternProperties": { |
|
323 | "patternProperties": { | |
325 | "^[a-zA-Z0-9\\-\\+]+/[a-zA-Z0-9\\-\\+]+": { |
|
324 | "^[a-zA-Z0-9\\-\\+]+/[a-zA-Z0-9\\-\\+]+": { | |
326 | "description": "The cell's mimetype output (e.g. text/plain), represented as either an array of strings or a string.", |
|
325 | "description": "The cell's mimetype output (e.g. text/plain), represented as either an array of strings or a string.", | |
327 | "$ref": "#/definitions/misc/multiline_string" |
|
326 | "$ref": "#/definitions/misc/multiline_string" | |
328 | } |
|
327 | } | |
329 | } |
|
328 | } | |
330 | }, |
|
329 | }, | |
331 | "output_metadata": { |
|
330 | "output_metadata": { | |
332 | "description": "Cell output metadata.", |
|
331 | "description": "Cell output metadata.", | |
333 | "type": "object", |
|
332 | "type": "object", | |
334 | "additionalProperties": true |
|
333 | "additionalProperties": true | |
335 | }, |
|
334 | }, | |
336 | "multiline_string": { |
|
335 | "multiline_string": { | |
337 | "oneOf" : [ |
|
336 | "oneOf" : [ | |
338 | {"type": "string"}, |
|
337 | {"type": "string"}, | |
339 | { |
|
338 | { | |
340 | "type": "array", |
|
339 | "type": "array", | |
341 | "items": {"type": "string"} |
|
340 | "items": {"type": "string"} | |
342 | } |
|
341 | } | |
343 | ] |
|
342 | ] | |
344 | } |
|
343 | } | |
345 | } |
|
344 | } | |
346 | } |
|
345 | } | |
347 | } |
|
346 | } |
@@ -1,126 +1,130 b'' | |||||
1 | """Tests for nbformat validation""" |
|
1 | """Tests for nbformat validation""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | import io |
|
6 | import io | |
7 | import os |
|
7 | import os | |
8 |
|
8 | |||
9 | import nose.tools as nt |
|
9 | import nose.tools as nt | |
10 |
|
10 | |||
11 |
from ..validator import |
|
11 | from IPython.nbformat.validator import validate, ValidationError | |
12 | from ..nbjson import reads |
|
12 | from ..nbjson import reads | |
|
13 | from ..nbbase import nbformat | |||
13 | from ..compose import ( |
|
14 | from ..compose import ( | |
14 | new_code_cell, new_heading_cell, new_markdown_cell, new_notebook, |
|
15 | new_code_cell, new_heading_cell, new_markdown_cell, new_notebook, | |
15 | new_output, new_raw_cell, |
|
16 | new_output, new_raw_cell, | |
16 | ) |
|
17 | ) | |
17 |
|
18 | |||
|
19 | def validate4(obj, ref=None): | |||
|
20 | return validate(obj, ref, version=nbformat) | |||
|
21 | ||||
18 | def test_valid_code_cell(): |
|
22 | def test_valid_code_cell(): | |
19 | cell = new_code_cell() |
|
23 | cell = new_code_cell() | |
20 | validate(cell, 'code_cell') |
|
24 | validate4(cell, 'code_cell') | |
21 |
|
25 | |||
22 | def test_invalid_code_cell(): |
|
26 | def test_invalid_code_cell(): | |
23 | cell = new_code_cell() |
|
27 | cell = new_code_cell() | |
24 |
|
28 | |||
25 | cell['source'] = 5 |
|
29 | cell['source'] = 5 | |
26 | with nt.assert_raises(ValidationError): |
|
30 | with nt.assert_raises(ValidationError): | |
27 | validate(cell, 'code_cell') |
|
31 | validate4(cell, 'code_cell') | |
28 |
|
32 | |||
29 | cell = new_code_cell() |
|
33 | cell = new_code_cell() | |
30 | del cell['metadata'] |
|
34 | del cell['metadata'] | |
31 |
|
35 | |||
32 | with nt.assert_raises(ValidationError): |
|
36 | with nt.assert_raises(ValidationError): | |
33 | validate(cell, 'code_cell') |
|
37 | validate4(cell, 'code_cell') | |
34 |
|
38 | |||
35 | cell = new_code_cell() |
|
39 | cell = new_code_cell() | |
36 | del cell['source'] |
|
40 | del cell['source'] | |
37 |
|
41 | |||
38 | with nt.assert_raises(ValidationError): |
|
42 | with nt.assert_raises(ValidationError): | |
39 | validate(cell, 'code_cell') |
|
43 | validate4(cell, 'code_cell') | |
40 |
|
44 | |||
41 | cell = new_code_cell() |
|
45 | cell = new_code_cell() | |
42 | del cell['cell_type'] |
|
46 | del cell['cell_type'] | |
43 |
|
47 | |||
44 | with nt.assert_raises(ValidationError): |
|
48 | with nt.assert_raises(ValidationError): | |
45 | validate(cell, 'code_cell') |
|
49 | validate4(cell, 'code_cell') | |
46 |
|
50 | |||
47 | def test_invalid_markdown_cell(): |
|
51 | def test_invalid_markdown_cell(): | |
48 | cell = new_markdown_cell() |
|
52 | cell = new_markdown_cell() | |
49 |
|
53 | |||
50 | cell['source'] = 5 |
|
54 | cell['source'] = 5 | |
51 | with nt.assert_raises(ValidationError): |
|
55 | with nt.assert_raises(ValidationError): | |
52 | validate(cell, 'markdown_cell') |
|
56 | validate4(cell, 'markdown_cell') | |
53 |
|
57 | |||
54 | cell = new_markdown_cell() |
|
58 | cell = new_markdown_cell() | |
55 | del cell['metadata'] |
|
59 | del cell['metadata'] | |
56 |
|
60 | |||
57 | with nt.assert_raises(ValidationError): |
|
61 | with nt.assert_raises(ValidationError): | |
58 | validate(cell, 'markdown_cell') |
|
62 | validate4(cell, 'markdown_cell') | |
59 |
|
63 | |||
60 | cell = new_markdown_cell() |
|
64 | cell = new_markdown_cell() | |
61 | del cell['source'] |
|
65 | del cell['source'] | |
62 |
|
66 | |||
63 | with nt.assert_raises(ValidationError): |
|
67 | with nt.assert_raises(ValidationError): | |
64 | validate(cell, 'markdown_cell') |
|
68 | validate4(cell, 'markdown_cell') | |
65 |
|
69 | |||
66 | cell = new_markdown_cell() |
|
70 | cell = new_markdown_cell() | |
67 | del cell['cell_type'] |
|
71 | del cell['cell_type'] | |
68 |
|
72 | |||
69 | with nt.assert_raises(ValidationError): |
|
73 | with nt.assert_raises(ValidationError): | |
70 | validate(cell, 'markdown_cell') |
|
74 | validate4(cell, 'markdown_cell') | |
71 |
|
75 | |||
72 | def test_invalid_heading_cell(): |
|
76 | def test_invalid_heading_cell(): | |
73 | cell = new_heading_cell() |
|
77 | cell = new_heading_cell() | |
74 |
|
78 | |||
75 | cell['source'] = 5 |
|
79 | cell['source'] = 5 | |
76 | with nt.assert_raises(ValidationError): |
|
80 | with nt.assert_raises(ValidationError): | |
77 | validate(cell, 'heading_cell') |
|
81 | validate4(cell, 'heading_cell') | |
78 |
|
82 | |||
79 | cell = new_heading_cell() |
|
83 | cell = new_heading_cell() | |
80 | del cell['metadata'] |
|
84 | del cell['metadata'] | |
81 |
|
85 | |||
82 | with nt.assert_raises(ValidationError): |
|
86 | with nt.assert_raises(ValidationError): | |
83 | validate(cell, 'heading_cell') |
|
87 | validate4(cell, 'heading_cell') | |
84 |
|
88 | |||
85 | cell = new_heading_cell() |
|
89 | cell = new_heading_cell() | |
86 | del cell['source'] |
|
90 | del cell['source'] | |
87 |
|
91 | |||
88 | with nt.assert_raises(ValidationError): |
|
92 | with nt.assert_raises(ValidationError): | |
89 | validate(cell, 'heading_cell') |
|
93 | validate4(cell, 'heading_cell') | |
90 |
|
94 | |||
91 | cell = new_heading_cell() |
|
95 | cell = new_heading_cell() | |
92 | del cell['cell_type'] |
|
96 | del cell['cell_type'] | |
93 |
|
97 | |||
94 | with nt.assert_raises(ValidationError): |
|
98 | with nt.assert_raises(ValidationError): | |
95 | validate(cell, 'heading_cell') |
|
99 | validate4(cell, 'heading_cell') | |
96 |
|
100 | |||
97 | def test_invalid_raw_cell(): |
|
101 | def test_invalid_raw_cell(): | |
98 | cell = new_raw_cell() |
|
102 | cell = new_raw_cell() | |
99 |
|
103 | |||
100 | cell['source'] = 5 |
|
104 | cell['source'] = 5 | |
101 | with nt.assert_raises(ValidationError): |
|
105 | with nt.assert_raises(ValidationError): | |
102 | validate(cell, 'raw_cell') |
|
106 | validate4(cell, 'raw_cell') | |
103 |
|
107 | |||
104 | cell = new_raw_cell() |
|
108 | cell = new_raw_cell() | |
105 | del cell['metadata'] |
|
109 | del cell['metadata'] | |
106 |
|
110 | |||
107 | with nt.assert_raises(ValidationError): |
|
111 | with nt.assert_raises(ValidationError): | |
108 | validate(cell, 'raw_cell') |
|
112 | validate4(cell, 'raw_cell') | |
109 |
|
113 | |||
110 | cell = new_raw_cell() |
|
114 | cell = new_raw_cell() | |
111 | del cell['source'] |
|
115 | del cell['source'] | |
112 |
|
116 | |||
113 | with nt.assert_raises(ValidationError): |
|
117 | with nt.assert_raises(ValidationError): | |
114 | validate(cell, 'raw_cell') |
|
118 | validate4(cell, 'raw_cell') | |
115 |
|
119 | |||
116 | cell = new_raw_cell() |
|
120 | cell = new_raw_cell() | |
117 | del cell['cell_type'] |
|
121 | del cell['cell_type'] | |
118 |
|
122 | |||
119 | with nt.assert_raises(ValidationError): |
|
123 | with nt.assert_raises(ValidationError): | |
120 | validate(cell, 'raw_cell') |
|
124 | validate4(cell, 'raw_cell') | |
121 |
|
125 | |||
122 | def test_sample_notebook(): |
|
126 | def test_sample_notebook(): | |
123 | here = os.path.dirname(__file__) |
|
127 | here = os.path.dirname(__file__) | |
124 | with io.open(os.path.join(here, "v4-test.ipynb"), encoding='utf-8') as f: |
|
128 | with io.open(os.path.join(here, "v4-test.ipynb"), encoding='utf-8') as f: | |
125 | nb = reads(f.read()) |
|
129 | nb = reads(f.read()) | |
126 | validate(nb) |
|
130 | validate4(nb) |
@@ -1,736 +1,737 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 | """ |
|
2 | """ | |
3 | This module defines the things that are used in setup.py for building IPython |
|
3 | This module defines the things that are used in setup.py for building IPython | |
4 |
|
4 | |||
5 | This includes: |
|
5 | This includes: | |
6 |
|
6 | |||
7 | * The basic arguments to setup |
|
7 | * The basic arguments to setup | |
8 | * Functions for finding things like packages, package data, etc. |
|
8 | * Functions for finding things like packages, package data, etc. | |
9 | * A function for checking dependencies. |
|
9 | * A function for checking dependencies. | |
10 | """ |
|
10 | """ | |
11 |
|
11 | |||
12 | # Copyright (c) IPython Development Team. |
|
12 | # Copyright (c) IPython Development Team. | |
13 | # Distributed under the terms of the Modified BSD License. |
|
13 | # Distributed under the terms of the Modified BSD License. | |
14 |
|
14 | |||
15 | from __future__ import print_function |
|
15 | from __future__ import print_function | |
16 |
|
16 | |||
17 | import errno |
|
17 | import errno | |
18 | import os |
|
18 | import os | |
19 | import sys |
|
19 | import sys | |
20 |
|
20 | |||
21 | from distutils import log |
|
21 | from distutils import log | |
22 | from distutils.command.build_py import build_py |
|
22 | from distutils.command.build_py import build_py | |
23 | from distutils.command.build_scripts import build_scripts |
|
23 | from distutils.command.build_scripts import build_scripts | |
24 | from distutils.command.install import install |
|
24 | from distutils.command.install import install | |
25 | from distutils.command.install_scripts import install_scripts |
|
25 | from distutils.command.install_scripts import install_scripts | |
26 | from distutils.cmd import Command |
|
26 | from distutils.cmd import Command | |
27 | from fnmatch import fnmatch |
|
27 | from fnmatch import fnmatch | |
28 | from glob import glob |
|
28 | from glob import glob | |
29 | from subprocess import check_call |
|
29 | from subprocess import check_call | |
30 |
|
30 | |||
31 | from setupext import install_data_ext |
|
31 | from setupext import install_data_ext | |
32 |
|
32 | |||
33 | #------------------------------------------------------------------------------- |
|
33 | #------------------------------------------------------------------------------- | |
34 | # Useful globals and utility functions |
|
34 | # Useful globals and utility functions | |
35 | #------------------------------------------------------------------------------- |
|
35 | #------------------------------------------------------------------------------- | |
36 |
|
36 | |||
37 | # A few handy globals |
|
37 | # A few handy globals | |
38 | isfile = os.path.isfile |
|
38 | isfile = os.path.isfile | |
39 | pjoin = os.path.join |
|
39 | pjoin = os.path.join | |
40 | repo_root = os.path.dirname(os.path.abspath(__file__)) |
|
40 | repo_root = os.path.dirname(os.path.abspath(__file__)) | |
41 |
|
41 | |||
42 | def oscmd(s): |
|
42 | def oscmd(s): | |
43 | print(">", s) |
|
43 | print(">", s) | |
44 | os.system(s) |
|
44 | os.system(s) | |
45 |
|
45 | |||
46 | # Py3 compatibility hacks, without assuming IPython itself is installed with |
|
46 | # Py3 compatibility hacks, without assuming IPython itself is installed with | |
47 | # the full py3compat machinery. |
|
47 | # the full py3compat machinery. | |
48 |
|
48 | |||
49 | try: |
|
49 | try: | |
50 | execfile |
|
50 | execfile | |
51 | except NameError: |
|
51 | except NameError: | |
52 | def execfile(fname, globs, locs=None): |
|
52 | def execfile(fname, globs, locs=None): | |
53 | locs = locs or globs |
|
53 | locs = locs or globs | |
54 | exec(compile(open(fname).read(), fname, "exec"), globs, locs) |
|
54 | exec(compile(open(fname).read(), fname, "exec"), globs, locs) | |
55 |
|
55 | |||
56 | # A little utility we'll need below, since glob() does NOT allow you to do |
|
56 | # A little utility we'll need below, since glob() does NOT allow you to do | |
57 | # exclusion on multiple endings! |
|
57 | # exclusion on multiple endings! | |
58 | def file_doesnt_endwith(test,endings): |
|
58 | def file_doesnt_endwith(test,endings): | |
59 | """Return true if test is a file and its name does NOT end with any |
|
59 | """Return true if test is a file and its name does NOT end with any | |
60 | of the strings listed in endings.""" |
|
60 | of the strings listed in endings.""" | |
61 | if not isfile(test): |
|
61 | if not isfile(test): | |
62 | return False |
|
62 | return False | |
63 | for e in endings: |
|
63 | for e in endings: | |
64 | if test.endswith(e): |
|
64 | if test.endswith(e): | |
65 | return False |
|
65 | return False | |
66 | return True |
|
66 | return True | |
67 |
|
67 | |||
68 | #--------------------------------------------------------------------------- |
|
68 | #--------------------------------------------------------------------------- | |
69 | # Basic project information |
|
69 | # Basic project information | |
70 | #--------------------------------------------------------------------------- |
|
70 | #--------------------------------------------------------------------------- | |
71 |
|
71 | |||
72 | # release.py contains version, authors, license, url, keywords, etc. |
|
72 | # release.py contains version, authors, license, url, keywords, etc. | |
73 | execfile(pjoin(repo_root, 'IPython','core','release.py'), globals()) |
|
73 | execfile(pjoin(repo_root, 'IPython','core','release.py'), globals()) | |
74 |
|
74 | |||
75 | # Create a dict with the basic information |
|
75 | # Create a dict with the basic information | |
76 | # This dict is eventually passed to setup after additional keys are added. |
|
76 | # This dict is eventually passed to setup after additional keys are added. | |
77 | setup_args = dict( |
|
77 | setup_args = dict( | |
78 | name = name, |
|
78 | name = name, | |
79 | version = version, |
|
79 | version = version, | |
80 | description = description, |
|
80 | description = description, | |
81 | long_description = long_description, |
|
81 | long_description = long_description, | |
82 | author = author, |
|
82 | author = author, | |
83 | author_email = author_email, |
|
83 | author_email = author_email, | |
84 | url = url, |
|
84 | url = url, | |
85 | download_url = download_url, |
|
85 | download_url = download_url, | |
86 | license = license, |
|
86 | license = license, | |
87 | platforms = platforms, |
|
87 | platforms = platforms, | |
88 | keywords = keywords, |
|
88 | keywords = keywords, | |
89 | classifiers = classifiers, |
|
89 | classifiers = classifiers, | |
90 | cmdclass = {'install_data': install_data_ext}, |
|
90 | cmdclass = {'install_data': install_data_ext}, | |
91 | ) |
|
91 | ) | |
92 |
|
92 | |||
93 |
|
93 | |||
94 | #--------------------------------------------------------------------------- |
|
94 | #--------------------------------------------------------------------------- | |
95 | # Find packages |
|
95 | # Find packages | |
96 | #--------------------------------------------------------------------------- |
|
96 | #--------------------------------------------------------------------------- | |
97 |
|
97 | |||
98 | def find_packages(): |
|
98 | def find_packages(): | |
99 | """ |
|
99 | """ | |
100 | Find all of IPython's packages. |
|
100 | Find all of IPython's packages. | |
101 | """ |
|
101 | """ | |
102 | excludes = ['deathrow', 'quarantine'] |
|
102 | excludes = ['deathrow', 'quarantine'] | |
103 | packages = [] |
|
103 | packages = [] | |
104 | for dir,subdirs,files in os.walk('IPython'): |
|
104 | for dir,subdirs,files in os.walk('IPython'): | |
105 | package = dir.replace(os.path.sep, '.') |
|
105 | package = dir.replace(os.path.sep, '.') | |
106 | if any(package.startswith('IPython.'+exc) for exc in excludes): |
|
106 | if any(package.startswith('IPython.'+exc) for exc in excludes): | |
107 | # package is to be excluded (e.g. deathrow) |
|
107 | # package is to be excluded (e.g. deathrow) | |
108 | continue |
|
108 | continue | |
109 | if '__init__.py' not in files: |
|
109 | if '__init__.py' not in files: | |
110 | # not a package |
|
110 | # not a package | |
111 | continue |
|
111 | continue | |
112 | packages.append(package) |
|
112 | packages.append(package) | |
113 | return packages |
|
113 | return packages | |
114 |
|
114 | |||
115 | #--------------------------------------------------------------------------- |
|
115 | #--------------------------------------------------------------------------- | |
116 | # Find package data |
|
116 | # Find package data | |
117 | #--------------------------------------------------------------------------- |
|
117 | #--------------------------------------------------------------------------- | |
118 |
|
118 | |||
119 | def find_package_data(): |
|
119 | def find_package_data(): | |
120 | """ |
|
120 | """ | |
121 | Find IPython's package_data. |
|
121 | Find IPython's package_data. | |
122 | """ |
|
122 | """ | |
123 | # This is not enough for these things to appear in an sdist. |
|
123 | # This is not enough for these things to appear in an sdist. | |
124 | # We need to muck with the MANIFEST to get this to work |
|
124 | # We need to muck with the MANIFEST to get this to work | |
125 |
|
125 | |||
126 | # exclude components and less from the walk; |
|
126 | # exclude components and less from the walk; | |
127 | # we will build the components separately |
|
127 | # we will build the components separately | |
128 | excludes = [ |
|
128 | excludes = [ | |
129 | pjoin('static', 'components'), |
|
129 | pjoin('static', 'components'), | |
130 | pjoin('static', '*', 'less'), |
|
130 | pjoin('static', '*', 'less'), | |
131 | ] |
|
131 | ] | |
132 |
|
132 | |||
133 | # walk notebook resources: |
|
133 | # walk notebook resources: | |
134 | cwd = os.getcwd() |
|
134 | cwd = os.getcwd() | |
135 | os.chdir(os.path.join('IPython', 'html')) |
|
135 | os.chdir(os.path.join('IPython', 'html')) | |
136 | static_data = [] |
|
136 | static_data = [] | |
137 | for parent, dirs, files in os.walk('static'): |
|
137 | for parent, dirs, files in os.walk('static'): | |
138 | if any(fnmatch(parent, pat) for pat in excludes): |
|
138 | if any(fnmatch(parent, pat) for pat in excludes): | |
139 | # prevent descending into subdirs |
|
139 | # prevent descending into subdirs | |
140 | dirs[:] = [] |
|
140 | dirs[:] = [] | |
141 | continue |
|
141 | continue | |
142 | for f in files: |
|
142 | for f in files: | |
143 | static_data.append(pjoin(parent, f)) |
|
143 | static_data.append(pjoin(parent, f)) | |
144 |
|
144 | |||
145 | components = pjoin("static", "components") |
|
145 | components = pjoin("static", "components") | |
146 | # select the components we actually need to install |
|
146 | # select the components we actually need to install | |
147 | # (there are lots of resources we bundle for sdist-reasons that we don't actually use) |
|
147 | # (there are lots of resources we bundle for sdist-reasons that we don't actually use) | |
148 | static_data.extend([ |
|
148 | static_data.extend([ | |
149 | pjoin(components, "backbone", "backbone-min.js"), |
|
149 | pjoin(components, "backbone", "backbone-min.js"), | |
150 | pjoin(components, "bootstrap", "js", "bootstrap.min.js"), |
|
150 | pjoin(components, "bootstrap", "js", "bootstrap.min.js"), | |
151 | pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"), |
|
151 | pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"), | |
152 | pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"), |
|
152 | pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"), | |
153 | pjoin(components, "font-awesome", "fonts", "*.*"), |
|
153 | pjoin(components, "font-awesome", "fonts", "*.*"), | |
154 | pjoin(components, "google-caja", "html-css-sanitizer-minified.js"), |
|
154 | pjoin(components, "google-caja", "html-css-sanitizer-minified.js"), | |
155 | pjoin(components, "highlight.js", "build", "highlight.pack.js"), |
|
155 | pjoin(components, "highlight.js", "build", "highlight.pack.js"), | |
156 | pjoin(components, "jquery", "jquery.min.js"), |
|
156 | pjoin(components, "jquery", "jquery.min.js"), | |
157 | pjoin(components, "jquery-ui", "ui", "minified", "jquery-ui.min.js"), |
|
157 | pjoin(components, "jquery-ui", "ui", "minified", "jquery-ui.min.js"), | |
158 | pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"), |
|
158 | pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"), | |
159 | pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"), |
|
159 | pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"), | |
160 | pjoin(components, "marked", "lib", "marked.js"), |
|
160 | pjoin(components, "marked", "lib", "marked.js"), | |
161 | pjoin(components, "requirejs", "require.js"), |
|
161 | pjoin(components, "requirejs", "require.js"), | |
162 | pjoin(components, "underscore", "underscore-min.js"), |
|
162 | pjoin(components, "underscore", "underscore-min.js"), | |
163 | pjoin(components, "moment", "moment.js"), |
|
163 | pjoin(components, "moment", "moment.js"), | |
164 | pjoin(components, "moment", "min", "moment.min.js"), |
|
164 | pjoin(components, "moment", "min", "moment.min.js"), | |
165 | pjoin(components, "term.js", "src", "term.js"), |
|
165 | pjoin(components, "term.js", "src", "term.js"), | |
166 | pjoin(components, "text-encoding", "lib", "encoding.js"), |
|
166 | pjoin(components, "text-encoding", "lib", "encoding.js"), | |
167 | ]) |
|
167 | ]) | |
168 |
|
168 | |||
169 | # Ship all of Codemirror's CSS and JS |
|
169 | # Ship all of Codemirror's CSS and JS | |
170 | for parent, dirs, files in os.walk(pjoin(components, 'codemirror')): |
|
170 | for parent, dirs, files in os.walk(pjoin(components, 'codemirror')): | |
171 | for f in files: |
|
171 | for f in files: | |
172 | if f.endswith(('.js', '.css')): |
|
172 | if f.endswith(('.js', '.css')): | |
173 | static_data.append(pjoin(parent, f)) |
|
173 | static_data.append(pjoin(parent, f)) | |
174 |
|
174 | |||
175 | os.chdir(os.path.join('tests',)) |
|
175 | os.chdir(os.path.join('tests',)) | |
176 | js_tests = glob('*.js') + glob('*/*.js') |
|
176 | js_tests = glob('*.js') + glob('*/*.js') | |
177 |
|
177 | |||
178 | os.chdir(os.path.join(cwd, 'IPython', 'nbconvert')) |
|
178 | os.chdir(os.path.join(cwd, 'IPython', 'nbconvert')) | |
179 | nbconvert_templates = [os.path.join(dirpath, '*.*') |
|
179 | nbconvert_templates = [os.path.join(dirpath, '*.*') | |
180 | for dirpath, _, _ in os.walk('templates')] |
|
180 | for dirpath, _, _ in os.walk('templates')] | |
181 |
|
181 | |||
182 | os.chdir(cwd) |
|
182 | os.chdir(cwd) | |
183 |
|
183 | |||
184 | package_data = { |
|
184 | package_data = { | |
185 | 'IPython.config.profile' : ['README*', '*/*.py'], |
|
185 | 'IPython.config.profile' : ['README*', '*/*.py'], | |
186 | 'IPython.core.tests' : ['*.png', '*.jpg'], |
|
186 | 'IPython.core.tests' : ['*.png', '*.jpg'], | |
187 | 'IPython.lib.tests' : ['*.wav'], |
|
187 | 'IPython.lib.tests' : ['*.wav'], | |
188 | 'IPython.testing.plugin' : ['*.txt'], |
|
188 | 'IPython.testing.plugin' : ['*.txt'], | |
189 | 'IPython.html' : ['templates/*'] + static_data, |
|
189 | 'IPython.html' : ['templates/*'] + static_data, | |
190 | 'IPython.html.tests' : js_tests, |
|
190 | 'IPython.html.tests' : js_tests, | |
191 | 'IPython.qt.console' : ['resources/icon/*.svg'], |
|
191 | 'IPython.qt.console' : ['resources/icon/*.svg'], | |
192 | 'IPython.nbconvert' : nbconvert_templates + |
|
192 | 'IPython.nbconvert' : nbconvert_templates + | |
193 | [ |
|
193 | [ | |
194 | 'tests/files/*.*', |
|
194 | 'tests/files/*.*', | |
195 | 'exporters/tests/files/*.*', |
|
195 | 'exporters/tests/files/*.*', | |
196 | 'preprocessors/tests/files/*.*', |
|
196 | 'preprocessors/tests/files/*.*', | |
197 | ], |
|
197 | ], | |
198 | 'IPython.nbconvert.filters' : ['marked.js'], |
|
198 | 'IPython.nbconvert.filters' : ['marked.js'], | |
199 | 'IPython.nbformat' : [ |
|
199 | 'IPython.nbformat' : [ | |
200 | 'tests/*.ipynb', |
|
200 | 'tests/*.ipynb', | |
201 | 'v3/nbformat.v3.schema.json', |
|
201 | 'v3/nbformat.v3.schema.json', | |
|
202 | 'v4/nbformat.v4.schema.json', | |||
202 | ] |
|
203 | ] | |
203 | } |
|
204 | } | |
204 |
|
205 | |||
205 | return package_data |
|
206 | return package_data | |
206 |
|
207 | |||
207 |
|
208 | |||
208 | def check_package_data(package_data): |
|
209 | def check_package_data(package_data): | |
209 | """verify that package_data globs make sense""" |
|
210 | """verify that package_data globs make sense""" | |
210 | print("checking package data") |
|
211 | print("checking package data") | |
211 | for pkg, data in package_data.items(): |
|
212 | for pkg, data in package_data.items(): | |
212 | pkg_root = pjoin(*pkg.split('.')) |
|
213 | pkg_root = pjoin(*pkg.split('.')) | |
213 | for d in data: |
|
214 | for d in data: | |
214 | path = pjoin(pkg_root, d) |
|
215 | path = pjoin(pkg_root, d) | |
215 | if '*' in path: |
|
216 | if '*' in path: | |
216 | assert len(glob(path)) > 0, "No files match pattern %s" % path |
|
217 | assert len(glob(path)) > 0, "No files match pattern %s" % path | |
217 | else: |
|
218 | else: | |
218 | assert os.path.exists(path), "Missing package data: %s" % path |
|
219 | assert os.path.exists(path), "Missing package data: %s" % path | |
219 |
|
220 | |||
220 |
|
221 | |||
221 | def check_package_data_first(command): |
|
222 | def check_package_data_first(command): | |
222 | """decorator for checking package_data before running a given command |
|
223 | """decorator for checking package_data before running a given command | |
223 |
|
224 | |||
224 | Probably only needs to wrap build_py |
|
225 | Probably only needs to wrap build_py | |
225 | """ |
|
226 | """ | |
226 | class DecoratedCommand(command): |
|
227 | class DecoratedCommand(command): | |
227 | def run(self): |
|
228 | def run(self): | |
228 | check_package_data(self.package_data) |
|
229 | check_package_data(self.package_data) | |
229 | command.run(self) |
|
230 | command.run(self) | |
230 | return DecoratedCommand |
|
231 | return DecoratedCommand | |
231 |
|
232 | |||
232 |
|
233 | |||
233 | #--------------------------------------------------------------------------- |
|
234 | #--------------------------------------------------------------------------- | |
234 | # Find data files |
|
235 | # Find data files | |
235 | #--------------------------------------------------------------------------- |
|
236 | #--------------------------------------------------------------------------- | |
236 |
|
237 | |||
237 | def make_dir_struct(tag,base,out_base): |
|
238 | def make_dir_struct(tag,base,out_base): | |
238 | """Make the directory structure of all files below a starting dir. |
|
239 | """Make the directory structure of all files below a starting dir. | |
239 |
|
240 | |||
240 | This is just a convenience routine to help build a nested directory |
|
241 | This is just a convenience routine to help build a nested directory | |
241 | hierarchy because distutils is too stupid to do this by itself. |
|
242 | hierarchy because distutils is too stupid to do this by itself. | |
242 |
|
243 | |||
243 | XXX - this needs a proper docstring! |
|
244 | XXX - this needs a proper docstring! | |
244 | """ |
|
245 | """ | |
245 |
|
246 | |||
246 | # we'll use these a lot below |
|
247 | # we'll use these a lot below | |
247 | lbase = len(base) |
|
248 | lbase = len(base) | |
248 | pathsep = os.path.sep |
|
249 | pathsep = os.path.sep | |
249 | lpathsep = len(pathsep) |
|
250 | lpathsep = len(pathsep) | |
250 |
|
251 | |||
251 | out = [] |
|
252 | out = [] | |
252 | for (dirpath,dirnames,filenames) in os.walk(base): |
|
253 | for (dirpath,dirnames,filenames) in os.walk(base): | |
253 | # we need to strip out the dirpath from the base to map it to the |
|
254 | # we need to strip out the dirpath from the base to map it to the | |
254 | # output (installation) path. This requires possibly stripping the |
|
255 | # output (installation) path. This requires possibly stripping the | |
255 | # path separator, because otherwise pjoin will not work correctly |
|
256 | # path separator, because otherwise pjoin will not work correctly | |
256 | # (pjoin('foo/','/bar') returns '/bar'). |
|
257 | # (pjoin('foo/','/bar') returns '/bar'). | |
257 |
|
258 | |||
258 | dp_eff = dirpath[lbase:] |
|
259 | dp_eff = dirpath[lbase:] | |
259 | if dp_eff.startswith(pathsep): |
|
260 | if dp_eff.startswith(pathsep): | |
260 | dp_eff = dp_eff[lpathsep:] |
|
261 | dp_eff = dp_eff[lpathsep:] | |
261 | # The output path must be anchored at the out_base marker |
|
262 | # The output path must be anchored at the out_base marker | |
262 | out_path = pjoin(out_base,dp_eff) |
|
263 | out_path = pjoin(out_base,dp_eff) | |
263 | # Now we can generate the final filenames. Since os.walk only produces |
|
264 | # Now we can generate the final filenames. Since os.walk only produces | |
264 | # filenames, we must join back with the dirpath to get full valid file |
|
265 | # filenames, we must join back with the dirpath to get full valid file | |
265 | # paths: |
|
266 | # paths: | |
266 | pfiles = [pjoin(dirpath,f) for f in filenames] |
|
267 | pfiles = [pjoin(dirpath,f) for f in filenames] | |
267 | # Finally, generate the entry we need, which is a pari of (output |
|
268 | # Finally, generate the entry we need, which is a pari of (output | |
268 | # path, files) for use as a data_files parameter in install_data. |
|
269 | # path, files) for use as a data_files parameter in install_data. | |
269 | out.append((out_path, pfiles)) |
|
270 | out.append((out_path, pfiles)) | |
270 |
|
271 | |||
271 | return out |
|
272 | return out | |
272 |
|
273 | |||
273 |
|
274 | |||
274 | def find_data_files(): |
|
275 | def find_data_files(): | |
275 | """ |
|
276 | """ | |
276 | Find IPython's data_files. |
|
277 | Find IPython's data_files. | |
277 |
|
278 | |||
278 | Just man pages at this point. |
|
279 | Just man pages at this point. | |
279 | """ |
|
280 | """ | |
280 |
|
281 | |||
281 | manpagebase = pjoin('share', 'man', 'man1') |
|
282 | manpagebase = pjoin('share', 'man', 'man1') | |
282 |
|
283 | |||
283 | # Simple file lists can be made by hand |
|
284 | # Simple file lists can be made by hand | |
284 | manpages = [f for f in glob(pjoin('docs','man','*.1.gz')) if isfile(f)] |
|
285 | manpages = [f for f in glob(pjoin('docs','man','*.1.gz')) if isfile(f)] | |
285 | if not manpages: |
|
286 | if not manpages: | |
286 | # When running from a source tree, the manpages aren't gzipped |
|
287 | # When running from a source tree, the manpages aren't gzipped | |
287 | manpages = [f for f in glob(pjoin('docs','man','*.1')) if isfile(f)] |
|
288 | manpages = [f for f in glob(pjoin('docs','man','*.1')) if isfile(f)] | |
288 |
|
289 | |||
289 | # And assemble the entire output list |
|
290 | # And assemble the entire output list | |
290 | data_files = [ (manpagebase, manpages) ] |
|
291 | data_files = [ (manpagebase, manpages) ] | |
291 |
|
292 | |||
292 | return data_files |
|
293 | return data_files | |
293 |
|
294 | |||
294 |
|
295 | |||
295 | def make_man_update_target(manpage): |
|
296 | def make_man_update_target(manpage): | |
296 | """Return a target_update-compliant tuple for the given manpage. |
|
297 | """Return a target_update-compliant tuple for the given manpage. | |
297 |
|
298 | |||
298 | Parameters |
|
299 | Parameters | |
299 | ---------- |
|
300 | ---------- | |
300 | manpage : string |
|
301 | manpage : string | |
301 | Name of the manpage, must include the section number (trailing number). |
|
302 | Name of the manpage, must include the section number (trailing number). | |
302 |
|
303 | |||
303 | Example |
|
304 | Example | |
304 | ------- |
|
305 | ------- | |
305 |
|
306 | |||
306 | >>> make_man_update_target('ipython.1') #doctest: +NORMALIZE_WHITESPACE |
|
307 | >>> make_man_update_target('ipython.1') #doctest: +NORMALIZE_WHITESPACE | |
307 | ('docs/man/ipython.1.gz', |
|
308 | ('docs/man/ipython.1.gz', | |
308 | ['docs/man/ipython.1'], |
|
309 | ['docs/man/ipython.1'], | |
309 | 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz') |
|
310 | 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz') | |
310 | """ |
|
311 | """ | |
311 | man_dir = pjoin('docs', 'man') |
|
312 | man_dir = pjoin('docs', 'man') | |
312 | manpage_gz = manpage + '.gz' |
|
313 | manpage_gz = manpage + '.gz' | |
313 | manpath = pjoin(man_dir, manpage) |
|
314 | manpath = pjoin(man_dir, manpage) | |
314 | manpath_gz = pjoin(man_dir, manpage_gz) |
|
315 | manpath_gz = pjoin(man_dir, manpage_gz) | |
315 | gz_cmd = ( "cd %(man_dir)s && gzip -9c %(manpage)s > %(manpage_gz)s" % |
|
316 | gz_cmd = ( "cd %(man_dir)s && gzip -9c %(manpage)s > %(manpage_gz)s" % | |
316 | locals() ) |
|
317 | locals() ) | |
317 | return (manpath_gz, [manpath], gz_cmd) |
|
318 | return (manpath_gz, [manpath], gz_cmd) | |
318 |
|
319 | |||
319 | # The two functions below are copied from IPython.utils.path, so we don't need |
|
320 | # The two functions below are copied from IPython.utils.path, so we don't need | |
320 | # to import IPython during setup, which fails on Python 3. |
|
321 | # to import IPython during setup, which fails on Python 3. | |
321 |
|
322 | |||
322 | def target_outdated(target,deps): |
|
323 | def target_outdated(target,deps): | |
323 | """Determine whether a target is out of date. |
|
324 | """Determine whether a target is out of date. | |
324 |
|
325 | |||
325 | target_outdated(target,deps) -> 1/0 |
|
326 | target_outdated(target,deps) -> 1/0 | |
326 |
|
327 | |||
327 | deps: list of filenames which MUST exist. |
|
328 | deps: list of filenames which MUST exist. | |
328 | target: single filename which may or may not exist. |
|
329 | target: single filename which may or may not exist. | |
329 |
|
330 | |||
330 | If target doesn't exist or is older than any file listed in deps, return |
|
331 | If target doesn't exist or is older than any file listed in deps, return | |
331 | true, otherwise return false. |
|
332 | true, otherwise return false. | |
332 | """ |
|
333 | """ | |
333 | try: |
|
334 | try: | |
334 | target_time = os.path.getmtime(target) |
|
335 | target_time = os.path.getmtime(target) | |
335 | except os.error: |
|
336 | except os.error: | |
336 | return 1 |
|
337 | return 1 | |
337 | for dep in deps: |
|
338 | for dep in deps: | |
338 | dep_time = os.path.getmtime(dep) |
|
339 | dep_time = os.path.getmtime(dep) | |
339 | if dep_time > target_time: |
|
340 | if dep_time > target_time: | |
340 | #print "For target",target,"Dep failed:",dep # dbg |
|
341 | #print "For target",target,"Dep failed:",dep # dbg | |
341 | #print "times (dep,tar):",dep_time,target_time # dbg |
|
342 | #print "times (dep,tar):",dep_time,target_time # dbg | |
342 | return 1 |
|
343 | return 1 | |
343 | return 0 |
|
344 | return 0 | |
344 |
|
345 | |||
345 |
|
346 | |||
346 | def target_update(target,deps,cmd): |
|
347 | def target_update(target,deps,cmd): | |
347 | """Update a target with a given command given a list of dependencies. |
|
348 | """Update a target with a given command given a list of dependencies. | |
348 |
|
349 | |||
349 | target_update(target,deps,cmd) -> runs cmd if target is outdated. |
|
350 | target_update(target,deps,cmd) -> runs cmd if target is outdated. | |
350 |
|
351 | |||
351 | This is just a wrapper around target_outdated() which calls the given |
|
352 | This is just a wrapper around target_outdated() which calls the given | |
352 | command if target is outdated.""" |
|
353 | command if target is outdated.""" | |
353 |
|
354 | |||
354 | if target_outdated(target,deps): |
|
355 | if target_outdated(target,deps): | |
355 | os.system(cmd) |
|
356 | os.system(cmd) | |
356 |
|
357 | |||
357 | #--------------------------------------------------------------------------- |
|
358 | #--------------------------------------------------------------------------- | |
358 | # Find scripts |
|
359 | # Find scripts | |
359 | #--------------------------------------------------------------------------- |
|
360 | #--------------------------------------------------------------------------- | |
360 |
|
361 | |||
361 | def find_entry_points(): |
|
362 | def find_entry_points(): | |
362 | """Find IPython's scripts. |
|
363 | """Find IPython's scripts. | |
363 |
|
364 | |||
364 | if entry_points is True: |
|
365 | if entry_points is True: | |
365 | return setuptools entry_point-style definitions |
|
366 | return setuptools entry_point-style definitions | |
366 | else: |
|
367 | else: | |
367 | return file paths of plain scripts [default] |
|
368 | return file paths of plain scripts [default] | |
368 |
|
369 | |||
369 | suffix is appended to script names if entry_points is True, so that the |
|
370 | suffix is appended to script names if entry_points is True, so that the | |
370 | Python 3 scripts get named "ipython3" etc. |
|
371 | Python 3 scripts get named "ipython3" etc. | |
371 | """ |
|
372 | """ | |
372 | ep = [ |
|
373 | ep = [ | |
373 | 'ipython%s = IPython:start_ipython', |
|
374 | 'ipython%s = IPython:start_ipython', | |
374 | 'ipcontroller%s = IPython.parallel.apps.ipcontrollerapp:launch_new_instance', |
|
375 | 'ipcontroller%s = IPython.parallel.apps.ipcontrollerapp:launch_new_instance', | |
375 | 'ipengine%s = IPython.parallel.apps.ipengineapp:launch_new_instance', |
|
376 | 'ipengine%s = IPython.parallel.apps.ipengineapp:launch_new_instance', | |
376 | 'ipcluster%s = IPython.parallel.apps.ipclusterapp:launch_new_instance', |
|
377 | 'ipcluster%s = IPython.parallel.apps.ipclusterapp:launch_new_instance', | |
377 | 'iptest%s = IPython.testing.iptestcontroller:main', |
|
378 | 'iptest%s = IPython.testing.iptestcontroller:main', | |
378 | ] |
|
379 | ] | |
379 | suffix = str(sys.version_info[0]) |
|
380 | suffix = str(sys.version_info[0]) | |
380 | return [e % '' for e in ep] + [e % suffix for e in ep] |
|
381 | return [e % '' for e in ep] + [e % suffix for e in ep] | |
381 |
|
382 | |||
382 | script_src = """#!{executable} |
|
383 | script_src = """#!{executable} | |
383 | # This script was automatically generated by setup.py |
|
384 | # This script was automatically generated by setup.py | |
384 | if __name__ == '__main__': |
|
385 | if __name__ == '__main__': | |
385 | from {mod} import {func} |
|
386 | from {mod} import {func} | |
386 | {func}() |
|
387 | {func}() | |
387 | """ |
|
388 | """ | |
388 |
|
389 | |||
389 | class build_scripts_entrypt(build_scripts): |
|
390 | class build_scripts_entrypt(build_scripts): | |
390 | def run(self): |
|
391 | def run(self): | |
391 | self.mkpath(self.build_dir) |
|
392 | self.mkpath(self.build_dir) | |
392 | outfiles = [] |
|
393 | outfiles = [] | |
393 | for script in find_entry_points(): |
|
394 | for script in find_entry_points(): | |
394 | name, entrypt = script.split('=') |
|
395 | name, entrypt = script.split('=') | |
395 | name = name.strip() |
|
396 | name = name.strip() | |
396 | entrypt = entrypt.strip() |
|
397 | entrypt = entrypt.strip() | |
397 | outfile = os.path.join(self.build_dir, name) |
|
398 | outfile = os.path.join(self.build_dir, name) | |
398 | outfiles.append(outfile) |
|
399 | outfiles.append(outfile) | |
399 | print('Writing script to', outfile) |
|
400 | print('Writing script to', outfile) | |
400 |
|
401 | |||
401 | mod, func = entrypt.split(':') |
|
402 | mod, func = entrypt.split(':') | |
402 | with open(outfile, 'w') as f: |
|
403 | with open(outfile, 'w') as f: | |
403 | f.write(script_src.format(executable=sys.executable, |
|
404 | f.write(script_src.format(executable=sys.executable, | |
404 | mod=mod, func=func)) |
|
405 | mod=mod, func=func)) | |
405 |
|
406 | |||
406 | return outfiles, outfiles |
|
407 | return outfiles, outfiles | |
407 |
|
408 | |||
408 | class install_lib_symlink(Command): |
|
409 | class install_lib_symlink(Command): | |
409 | user_options = [ |
|
410 | user_options = [ | |
410 | ('install-dir=', 'd', "directory to install to"), |
|
411 | ('install-dir=', 'd', "directory to install to"), | |
411 | ] |
|
412 | ] | |
412 |
|
413 | |||
413 | def initialize_options(self): |
|
414 | def initialize_options(self): | |
414 | self.install_dir = None |
|
415 | self.install_dir = None | |
415 |
|
416 | |||
416 | def finalize_options(self): |
|
417 | def finalize_options(self): | |
417 | self.set_undefined_options('symlink', |
|
418 | self.set_undefined_options('symlink', | |
418 | ('install_lib', 'install_dir'), |
|
419 | ('install_lib', 'install_dir'), | |
419 | ) |
|
420 | ) | |
420 |
|
421 | |||
421 | def run(self): |
|
422 | def run(self): | |
422 | if sys.platform == 'win32': |
|
423 | if sys.platform == 'win32': | |
423 | raise Exception("This doesn't work on Windows.") |
|
424 | raise Exception("This doesn't work on Windows.") | |
424 | pkg = os.path.join(os.getcwd(), 'IPython') |
|
425 | pkg = os.path.join(os.getcwd(), 'IPython') | |
425 | dest = os.path.join(self.install_dir, 'IPython') |
|
426 | dest = os.path.join(self.install_dir, 'IPython') | |
426 | if os.path.islink(dest): |
|
427 | if os.path.islink(dest): | |
427 | print('removing existing symlink at %s' % dest) |
|
428 | print('removing existing symlink at %s' % dest) | |
428 | os.unlink(dest) |
|
429 | os.unlink(dest) | |
429 | print('symlinking %s -> %s' % (pkg, dest)) |
|
430 | print('symlinking %s -> %s' % (pkg, dest)) | |
430 | os.symlink(pkg, dest) |
|
431 | os.symlink(pkg, dest) | |
431 |
|
432 | |||
432 | class unsymlink(install): |
|
433 | class unsymlink(install): | |
433 | def run(self): |
|
434 | def run(self): | |
434 | dest = os.path.join(self.install_lib, 'IPython') |
|
435 | dest = os.path.join(self.install_lib, 'IPython') | |
435 | if os.path.islink(dest): |
|
436 | if os.path.islink(dest): | |
436 | print('removing symlink at %s' % dest) |
|
437 | print('removing symlink at %s' % dest) | |
437 | os.unlink(dest) |
|
438 | os.unlink(dest) | |
438 | else: |
|
439 | else: | |
439 | print('No symlink exists at %s' % dest) |
|
440 | print('No symlink exists at %s' % dest) | |
440 |
|
441 | |||
441 | class install_symlinked(install): |
|
442 | class install_symlinked(install): | |
442 | def run(self): |
|
443 | def run(self): | |
443 | if sys.platform == 'win32': |
|
444 | if sys.platform == 'win32': | |
444 | raise Exception("This doesn't work on Windows.") |
|
445 | raise Exception("This doesn't work on Windows.") | |
445 |
|
446 | |||
446 | # Run all sub-commands (at least those that need to be run) |
|
447 | # Run all sub-commands (at least those that need to be run) | |
447 | for cmd_name in self.get_sub_commands(): |
|
448 | for cmd_name in self.get_sub_commands(): | |
448 | self.run_command(cmd_name) |
|
449 | self.run_command(cmd_name) | |
449 |
|
450 | |||
450 | # 'sub_commands': a list of commands this command might have to run to |
|
451 | # 'sub_commands': a list of commands this command might have to run to | |
451 | # get its work done. See cmd.py for more info. |
|
452 | # get its work done. See cmd.py for more info. | |
452 | sub_commands = [('install_lib_symlink', lambda self:True), |
|
453 | sub_commands = [('install_lib_symlink', lambda self:True), | |
453 | ('install_scripts_sym', lambda self:True), |
|
454 | ('install_scripts_sym', lambda self:True), | |
454 | ] |
|
455 | ] | |
455 |
|
456 | |||
456 | class install_scripts_for_symlink(install_scripts): |
|
457 | class install_scripts_for_symlink(install_scripts): | |
457 | """Redefined to get options from 'symlink' instead of 'install'. |
|
458 | """Redefined to get options from 'symlink' instead of 'install'. | |
458 |
|
459 | |||
459 | I love distutils almost as much as I love setuptools. |
|
460 | I love distutils almost as much as I love setuptools. | |
460 | """ |
|
461 | """ | |
461 | def finalize_options(self): |
|
462 | def finalize_options(self): | |
462 | self.set_undefined_options('build', ('build_scripts', 'build_dir')) |
|
463 | self.set_undefined_options('build', ('build_scripts', 'build_dir')) | |
463 | self.set_undefined_options('symlink', |
|
464 | self.set_undefined_options('symlink', | |
464 | ('install_scripts', 'install_dir'), |
|
465 | ('install_scripts', 'install_dir'), | |
465 | ('force', 'force'), |
|
466 | ('force', 'force'), | |
466 | ('skip_build', 'skip_build'), |
|
467 | ('skip_build', 'skip_build'), | |
467 | ) |
|
468 | ) | |
468 |
|
469 | |||
469 | #--------------------------------------------------------------------------- |
|
470 | #--------------------------------------------------------------------------- | |
470 | # Verify all dependencies |
|
471 | # Verify all dependencies | |
471 | #--------------------------------------------------------------------------- |
|
472 | #--------------------------------------------------------------------------- | |
472 |
|
473 | |||
473 | def check_for_dependencies(): |
|
474 | def check_for_dependencies(): | |
474 | """Check for IPython's dependencies. |
|
475 | """Check for IPython's dependencies. | |
475 |
|
476 | |||
476 | This function should NOT be called if running under setuptools! |
|
477 | This function should NOT be called if running under setuptools! | |
477 | """ |
|
478 | """ | |
478 | from setupext.setupext import ( |
|
479 | from setupext.setupext import ( | |
479 | print_line, print_raw, print_status, |
|
480 | print_line, print_raw, print_status, | |
480 | check_for_sphinx, check_for_pygments, |
|
481 | check_for_sphinx, check_for_pygments, | |
481 | check_for_nose, check_for_pexpect, |
|
482 | check_for_nose, check_for_pexpect, | |
482 | check_for_pyzmq, check_for_readline, |
|
483 | check_for_pyzmq, check_for_readline, | |
483 | check_for_jinja2, check_for_tornado |
|
484 | check_for_jinja2, check_for_tornado | |
484 | ) |
|
485 | ) | |
485 | print_line() |
|
486 | print_line() | |
486 | print_raw("BUILDING IPYTHON") |
|
487 | print_raw("BUILDING IPYTHON") | |
487 | print_status('python', sys.version) |
|
488 | print_status('python', sys.version) | |
488 | print_status('platform', sys.platform) |
|
489 | print_status('platform', sys.platform) | |
489 | if sys.platform == 'win32': |
|
490 | if sys.platform == 'win32': | |
490 | print_status('Windows version', sys.getwindowsversion()) |
|
491 | print_status('Windows version', sys.getwindowsversion()) | |
491 |
|
492 | |||
492 | print_raw("") |
|
493 | print_raw("") | |
493 | print_raw("OPTIONAL DEPENDENCIES") |
|
494 | print_raw("OPTIONAL DEPENDENCIES") | |
494 |
|
495 | |||
495 | check_for_sphinx() |
|
496 | check_for_sphinx() | |
496 | check_for_pygments() |
|
497 | check_for_pygments() | |
497 | check_for_nose() |
|
498 | check_for_nose() | |
498 | if os.name == 'posix': |
|
499 | if os.name == 'posix': | |
499 | check_for_pexpect() |
|
500 | check_for_pexpect() | |
500 | check_for_pyzmq() |
|
501 | check_for_pyzmq() | |
501 | check_for_tornado() |
|
502 | check_for_tornado() | |
502 | check_for_readline() |
|
503 | check_for_readline() | |
503 | check_for_jinja2() |
|
504 | check_for_jinja2() | |
504 |
|
505 | |||
505 | #--------------------------------------------------------------------------- |
|
506 | #--------------------------------------------------------------------------- | |
506 | # VCS related |
|
507 | # VCS related | |
507 | #--------------------------------------------------------------------------- |
|
508 | #--------------------------------------------------------------------------- | |
508 |
|
509 | |||
509 | # utils.submodule has checks for submodule status |
|
510 | # utils.submodule has checks for submodule status | |
510 | execfile(pjoin('IPython','utils','submodule.py'), globals()) |
|
511 | execfile(pjoin('IPython','utils','submodule.py'), globals()) | |
511 |
|
512 | |||
512 | class UpdateSubmodules(Command): |
|
513 | class UpdateSubmodules(Command): | |
513 | """Update git submodules |
|
514 | """Update git submodules | |
514 |
|
515 | |||
515 | IPython's external javascript dependencies live in a separate repo. |
|
516 | IPython's external javascript dependencies live in a separate repo. | |
516 | """ |
|
517 | """ | |
517 | description = "Update git submodules" |
|
518 | description = "Update git submodules" | |
518 | user_options = [] |
|
519 | user_options = [] | |
519 |
|
520 | |||
520 | def initialize_options(self): |
|
521 | def initialize_options(self): | |
521 | pass |
|
522 | pass | |
522 |
|
523 | |||
523 | def finalize_options(self): |
|
524 | def finalize_options(self): | |
524 | pass |
|
525 | pass | |
525 |
|
526 | |||
526 | def run(self): |
|
527 | def run(self): | |
527 | failure = False |
|
528 | failure = False | |
528 | try: |
|
529 | try: | |
529 | self.spawn('git submodule init'.split()) |
|
530 | self.spawn('git submodule init'.split()) | |
530 | self.spawn('git submodule update --recursive'.split()) |
|
531 | self.spawn('git submodule update --recursive'.split()) | |
531 | except Exception as e: |
|
532 | except Exception as e: | |
532 | failure = e |
|
533 | failure = e | |
533 | print(e) |
|
534 | print(e) | |
534 |
|
535 | |||
535 | if not check_submodule_status(repo_root) == 'clean': |
|
536 | if not check_submodule_status(repo_root) == 'clean': | |
536 | print("submodules could not be checked out") |
|
537 | print("submodules could not be checked out") | |
537 | sys.exit(1) |
|
538 | sys.exit(1) | |
538 |
|
539 | |||
539 |
|
540 | |||
540 | def git_prebuild(pkg_dir, build_cmd=build_py): |
|
541 | def git_prebuild(pkg_dir, build_cmd=build_py): | |
541 | """Return extended build or sdist command class for recording commit |
|
542 | """Return extended build or sdist command class for recording commit | |
542 |
|
543 | |||
543 | records git commit in IPython.utils._sysinfo.commit |
|
544 | records git commit in IPython.utils._sysinfo.commit | |
544 |
|
545 | |||
545 | for use in IPython.utils.sysinfo.sys_info() calls after installation. |
|
546 | for use in IPython.utils.sysinfo.sys_info() calls after installation. | |
546 |
|
547 | |||
547 | Also ensures that submodules exist prior to running |
|
548 | Also ensures that submodules exist prior to running | |
548 | """ |
|
549 | """ | |
549 |
|
550 | |||
550 | class MyBuildPy(build_cmd): |
|
551 | class MyBuildPy(build_cmd): | |
551 | ''' Subclass to write commit data into installation tree ''' |
|
552 | ''' Subclass to write commit data into installation tree ''' | |
552 | def run(self): |
|
553 | def run(self): | |
553 | build_cmd.run(self) |
|
554 | build_cmd.run(self) | |
554 | # this one will only fire for build commands |
|
555 | # this one will only fire for build commands | |
555 | if hasattr(self, 'build_lib'): |
|
556 | if hasattr(self, 'build_lib'): | |
556 | self._record_commit(self.build_lib) |
|
557 | self._record_commit(self.build_lib) | |
557 |
|
558 | |||
558 | def make_release_tree(self, base_dir, files): |
|
559 | def make_release_tree(self, base_dir, files): | |
559 | # this one will fire for sdist |
|
560 | # this one will fire for sdist | |
560 | build_cmd.make_release_tree(self, base_dir, files) |
|
561 | build_cmd.make_release_tree(self, base_dir, files) | |
561 | self._record_commit(base_dir) |
|
562 | self._record_commit(base_dir) | |
562 |
|
563 | |||
563 | def _record_commit(self, base_dir): |
|
564 | def _record_commit(self, base_dir): | |
564 | import subprocess |
|
565 | import subprocess | |
565 | proc = subprocess.Popen('git rev-parse --short HEAD', |
|
566 | proc = subprocess.Popen('git rev-parse --short HEAD', | |
566 | stdout=subprocess.PIPE, |
|
567 | stdout=subprocess.PIPE, | |
567 | stderr=subprocess.PIPE, |
|
568 | stderr=subprocess.PIPE, | |
568 | shell=True) |
|
569 | shell=True) | |
569 | repo_commit, _ = proc.communicate() |
|
570 | repo_commit, _ = proc.communicate() | |
570 | repo_commit = repo_commit.strip().decode("ascii") |
|
571 | repo_commit = repo_commit.strip().decode("ascii") | |
571 |
|
572 | |||
572 | out_pth = pjoin(base_dir, pkg_dir, 'utils', '_sysinfo.py') |
|
573 | out_pth = pjoin(base_dir, pkg_dir, 'utils', '_sysinfo.py') | |
573 | if os.path.isfile(out_pth) and not repo_commit: |
|
574 | if os.path.isfile(out_pth) and not repo_commit: | |
574 | # nothing to write, don't clobber |
|
575 | # nothing to write, don't clobber | |
575 | return |
|
576 | return | |
576 |
|
577 | |||
577 | print("writing git commit '%s' to %s" % (repo_commit, out_pth)) |
|
578 | print("writing git commit '%s' to %s" % (repo_commit, out_pth)) | |
578 |
|
579 | |||
579 | # remove to avoid overwriting original via hard link |
|
580 | # remove to avoid overwriting original via hard link | |
580 | try: |
|
581 | try: | |
581 | os.remove(out_pth) |
|
582 | os.remove(out_pth) | |
582 | except (IOError, OSError): |
|
583 | except (IOError, OSError): | |
583 | pass |
|
584 | pass | |
584 | with open(out_pth, 'w') as out_file: |
|
585 | with open(out_pth, 'w') as out_file: | |
585 | out_file.writelines([ |
|
586 | out_file.writelines([ | |
586 | '# GENERATED BY setup.py\n', |
|
587 | '# GENERATED BY setup.py\n', | |
587 | 'commit = u"%s"\n' % repo_commit, |
|
588 | 'commit = u"%s"\n' % repo_commit, | |
588 | ]) |
|
589 | ]) | |
589 | return require_submodules(MyBuildPy) |
|
590 | return require_submodules(MyBuildPy) | |
590 |
|
591 | |||
591 |
|
592 | |||
592 | def require_submodules(command): |
|
593 | def require_submodules(command): | |
593 | """decorator for instructing a command to check for submodules before running""" |
|
594 | """decorator for instructing a command to check for submodules before running""" | |
594 | class DecoratedCommand(command): |
|
595 | class DecoratedCommand(command): | |
595 | def run(self): |
|
596 | def run(self): | |
596 | if not check_submodule_status(repo_root) == 'clean': |
|
597 | if not check_submodule_status(repo_root) == 'clean': | |
597 | print("submodules missing! Run `setup.py submodule` and try again") |
|
598 | print("submodules missing! Run `setup.py submodule` and try again") | |
598 | sys.exit(1) |
|
599 | sys.exit(1) | |
599 | command.run(self) |
|
600 | command.run(self) | |
600 | return DecoratedCommand |
|
601 | return DecoratedCommand | |
601 |
|
602 | |||
602 | #--------------------------------------------------------------------------- |
|
603 | #--------------------------------------------------------------------------- | |
603 | # bdist related |
|
604 | # bdist related | |
604 | #--------------------------------------------------------------------------- |
|
605 | #--------------------------------------------------------------------------- | |
605 |
|
606 | |||
606 | def get_bdist_wheel(): |
|
607 | def get_bdist_wheel(): | |
607 | """Construct bdist_wheel command for building wheels |
|
608 | """Construct bdist_wheel command for building wheels | |
608 |
|
609 | |||
609 | Constructs py2-none-any tag, instead of py2.7-none-any |
|
610 | Constructs py2-none-any tag, instead of py2.7-none-any | |
610 | """ |
|
611 | """ | |
611 | class RequiresWheel(Command): |
|
612 | class RequiresWheel(Command): | |
612 | description = "Dummy command for missing bdist_wheel" |
|
613 | description = "Dummy command for missing bdist_wheel" | |
613 | user_options = [] |
|
614 | user_options = [] | |
614 |
|
615 | |||
615 | def initialize_options(self): |
|
616 | def initialize_options(self): | |
616 | pass |
|
617 | pass | |
617 |
|
618 | |||
618 | def finalize_options(self): |
|
619 | def finalize_options(self): | |
619 | pass |
|
620 | pass | |
620 |
|
621 | |||
621 | def run(self): |
|
622 | def run(self): | |
622 | print("bdist_wheel requires the wheel package") |
|
623 | print("bdist_wheel requires the wheel package") | |
623 | sys.exit(1) |
|
624 | sys.exit(1) | |
624 |
|
625 | |||
625 | if 'setuptools' not in sys.modules: |
|
626 | if 'setuptools' not in sys.modules: | |
626 | return RequiresWheel |
|
627 | return RequiresWheel | |
627 | else: |
|
628 | else: | |
628 | try: |
|
629 | try: | |
629 | from wheel.bdist_wheel import bdist_wheel, read_pkg_info, write_pkg_info |
|
630 | from wheel.bdist_wheel import bdist_wheel, read_pkg_info, write_pkg_info | |
630 | except ImportError: |
|
631 | except ImportError: | |
631 | return RequiresWheel |
|
632 | return RequiresWheel | |
632 |
|
633 | |||
633 | class bdist_wheel_tag(bdist_wheel): |
|
634 | class bdist_wheel_tag(bdist_wheel): | |
634 |
|
635 | |||
635 | def add_requirements(self, metadata_path): |
|
636 | def add_requirements(self, metadata_path): | |
636 | """transform platform-dependent requirements""" |
|
637 | """transform platform-dependent requirements""" | |
637 | pkg_info = read_pkg_info(metadata_path) |
|
638 | pkg_info = read_pkg_info(metadata_path) | |
638 | # pkg_info is an email.Message object (?!) |
|
639 | # pkg_info is an email.Message object (?!) | |
639 | # we have to remove the unconditional 'readline' and/or 'pyreadline' entries |
|
640 | # we have to remove the unconditional 'readline' and/or 'pyreadline' entries | |
640 | # and transform them to conditionals |
|
641 | # and transform them to conditionals | |
641 | requires = pkg_info.get_all('Requires-Dist') |
|
642 | requires = pkg_info.get_all('Requires-Dist') | |
642 | del pkg_info['Requires-Dist'] |
|
643 | del pkg_info['Requires-Dist'] | |
643 | def _remove_startswith(lis, prefix): |
|
644 | def _remove_startswith(lis, prefix): | |
644 | """like list.remove, but with startswith instead of ==""" |
|
645 | """like list.remove, but with startswith instead of ==""" | |
645 | found = False |
|
646 | found = False | |
646 | for idx, item in enumerate(lis): |
|
647 | for idx, item in enumerate(lis): | |
647 | if item.startswith(prefix): |
|
648 | if item.startswith(prefix): | |
648 | found = True |
|
649 | found = True | |
649 | break |
|
650 | break | |
650 | if found: |
|
651 | if found: | |
651 | lis.pop(idx) |
|
652 | lis.pop(idx) | |
652 |
|
653 | |||
653 | for pkg in ("gnureadline", "pyreadline", "mock"): |
|
654 | for pkg in ("gnureadline", "pyreadline", "mock"): | |
654 | _remove_startswith(requires, pkg) |
|
655 | _remove_startswith(requires, pkg) | |
655 | requires.append("gnureadline; sys.platform == 'darwin' and platform.python_implementation == 'CPython'") |
|
656 | requires.append("gnureadline; sys.platform == 'darwin' and platform.python_implementation == 'CPython'") | |
656 | requires.append("pyreadline (>=2.0); extra == 'terminal' and sys.platform == 'win32' and platform.python_implementation == 'CPython'") |
|
657 | requires.append("pyreadline (>=2.0); extra == 'terminal' and sys.platform == 'win32' and platform.python_implementation == 'CPython'") | |
657 | requires.append("pyreadline (>=2.0); extra == 'all' and sys.platform == 'win32' and platform.python_implementation == 'CPython'") |
|
658 | requires.append("pyreadline (>=2.0); extra == 'all' and sys.platform == 'win32' and platform.python_implementation == 'CPython'") | |
658 | requires.append("mock; extra == 'test' and python_version < '3.3'") |
|
659 | requires.append("mock; extra == 'test' and python_version < '3.3'") | |
659 | for r in requires: |
|
660 | for r in requires: | |
660 | pkg_info['Requires-Dist'] = r |
|
661 | pkg_info['Requires-Dist'] = r | |
661 | write_pkg_info(metadata_path, pkg_info) |
|
662 | write_pkg_info(metadata_path, pkg_info) | |
662 |
|
663 | |||
663 | return bdist_wheel_tag |
|
664 | return bdist_wheel_tag | |
664 |
|
665 | |||
665 | #--------------------------------------------------------------------------- |
|
666 | #--------------------------------------------------------------------------- | |
666 | # Notebook related |
|
667 | # Notebook related | |
667 | #--------------------------------------------------------------------------- |
|
668 | #--------------------------------------------------------------------------- | |
668 |
|
669 | |||
669 | class CompileCSS(Command): |
|
670 | class CompileCSS(Command): | |
670 | """Recompile Notebook CSS |
|
671 | """Recompile Notebook CSS | |
671 |
|
672 | |||
672 | Regenerate the compiled CSS from LESS sources. |
|
673 | Regenerate the compiled CSS from LESS sources. | |
673 |
|
674 | |||
674 | Requires various dev dependencies, such as invoke and lessc. |
|
675 | Requires various dev dependencies, such as invoke and lessc. | |
675 | """ |
|
676 | """ | |
676 | description = "Recompile Notebook CSS" |
|
677 | description = "Recompile Notebook CSS" | |
677 | user_options = [ |
|
678 | user_options = [ | |
678 | ('minify', 'x', "minify CSS"), |
|
679 | ('minify', 'x', "minify CSS"), | |
679 | ('force', 'f', "force recompilation of CSS"), |
|
680 | ('force', 'f', "force recompilation of CSS"), | |
680 | ] |
|
681 | ] | |
681 |
|
682 | |||
682 | def initialize_options(self): |
|
683 | def initialize_options(self): | |
683 | self.minify = False |
|
684 | self.minify = False | |
684 | self.force = False |
|
685 | self.force = False | |
685 |
|
686 | |||
686 | def finalize_options(self): |
|
687 | def finalize_options(self): | |
687 | self.minify = bool(self.minify) |
|
688 | self.minify = bool(self.minify) | |
688 | self.force = bool(self.force) |
|
689 | self.force = bool(self.force) | |
689 |
|
690 | |||
690 | def run(self): |
|
691 | def run(self): | |
691 | cmd = ['invoke', 'css'] |
|
692 | cmd = ['invoke', 'css'] | |
692 | if self.minify: |
|
693 | if self.minify: | |
693 | cmd.append('--minify') |
|
694 | cmd.append('--minify') | |
694 | if self.force: |
|
695 | if self.force: | |
695 | cmd.append('--force') |
|
696 | cmd.append('--force') | |
696 | check_call(cmd, cwd=pjoin(repo_root, "IPython", "html")) |
|
697 | check_call(cmd, cwd=pjoin(repo_root, "IPython", "html")) | |
697 |
|
698 | |||
698 |
|
699 | |||
699 | class JavascriptVersion(Command): |
|
700 | class JavascriptVersion(Command): | |
700 | """write the javascript version to notebook javascript""" |
|
701 | """write the javascript version to notebook javascript""" | |
701 | description = "Write IPython version to javascript" |
|
702 | description = "Write IPython version to javascript" | |
702 | user_options = [] |
|
703 | user_options = [] | |
703 |
|
704 | |||
704 | def initialize_options(self): |
|
705 | def initialize_options(self): | |
705 | pass |
|
706 | pass | |
706 |
|
707 | |||
707 | def finalize_options(self): |
|
708 | def finalize_options(self): | |
708 | pass |
|
709 | pass | |
709 |
|
710 | |||
710 | def run(self): |
|
711 | def run(self): | |
711 | nsfile = pjoin(repo_root, "IPython", "html", "static", "base", "js", "namespace.js") |
|
712 | nsfile = pjoin(repo_root, "IPython", "html", "static", "base", "js", "namespace.js") | |
712 | with open(nsfile) as f: |
|
713 | with open(nsfile) as f: | |
713 | lines = f.readlines() |
|
714 | lines = f.readlines() | |
714 | with open(nsfile, 'w') as f: |
|
715 | with open(nsfile, 'w') as f: | |
715 | for line in lines: |
|
716 | for line in lines: | |
716 | if line.startswith("IPython.version"): |
|
717 | if line.startswith("IPython.version"): | |
717 | line = 'IPython.version = "{0}";\n'.format(version) |
|
718 | line = 'IPython.version = "{0}";\n'.format(version) | |
718 | f.write(line) |
|
719 | f.write(line) | |
719 |
|
720 | |||
720 |
|
721 | |||
721 | def css_js_prerelease(command, strict=True): |
|
722 | def css_js_prerelease(command, strict=True): | |
722 | """decorator for building js/minified css prior to a release""" |
|
723 | """decorator for building js/minified css prior to a release""" | |
723 | class DecoratedCommand(command): |
|
724 | class DecoratedCommand(command): | |
724 | def run(self): |
|
725 | def run(self): | |
725 | self.distribution.run_command('jsversion') |
|
726 | self.distribution.run_command('jsversion') | |
726 | css = self.distribution.get_command_obj('css') |
|
727 | css = self.distribution.get_command_obj('css') | |
727 | css.minify = True |
|
728 | css.minify = True | |
728 | try: |
|
729 | try: | |
729 | self.distribution.run_command('css') |
|
730 | self.distribution.run_command('css') | |
730 | except Exception as e: |
|
731 | except Exception as e: | |
731 | if strict: |
|
732 | if strict: | |
732 | raise |
|
733 | raise | |
733 | else: |
|
734 | else: | |
734 | log.warn("Failed to build css sourcemaps: %s" % e) |
|
735 | log.warn("Failed to build css sourcemaps: %s" % e) | |
735 | command.run(self) |
|
736 | command.run(self) | |
736 | return DecoratedCommand |
|
737 | return DecoratedCommand |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now