Show More
@@ -1,38 +1,38 b'' | |||
|
1 | 1 | """The basic dict based notebook format. |
|
2 | 2 | |
|
3 | 3 | The Python representation of a notebook is a nested structure of |
|
4 | 4 | dictionary subclasses that support attribute access |
|
5 | 5 | (IPython.utils.ipstruct.Struct). The functions in this module are merely |
|
6 | 6 | helpers to build the structs in the right form. |
|
7 | 7 | """ |
|
8 | 8 | |
|
9 | 9 | # Copyright (c) IPython Development Team. |
|
10 | 10 | # Distributed under the terms of the Modified BSD License. |
|
11 | 11 | |
|
12 | 12 | import pprint |
|
13 | 13 | import uuid |
|
14 | 14 | |
|
15 | 15 | from IPython.utils.ipstruct import Struct |
|
16 | 16 | from IPython.utils.py3compat import cast_unicode, unicode_type |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | # Change this when incrementing the nbformat version |
|
20 | 20 | nbformat = 4 |
|
21 | 21 | nbformat_minor = 0 |
|
22 |
nbformat_schema = ' |
|
|
22 | nbformat_schema = 'nbformat.v4.schema.json' | |
|
23 | 23 | |
|
24 | 24 | class NotebookNode(Struct): |
|
25 | 25 | pass |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | def from_dict(d): |
|
29 | 29 | if isinstance(d, dict): |
|
30 | 30 | newd = NotebookNode() |
|
31 | 31 | for k,v in d.items(): |
|
32 | 32 | newd[k] = from_dict(v) |
|
33 | 33 | return newd |
|
34 | 34 | elif isinstance(d, (tuple, list)): |
|
35 | 35 | return [from_dict(i) for i in d] |
|
36 | 36 | else: |
|
37 | 37 | return d |
|
38 | 38 |
@@ -1,347 +1,346 b'' | |||
|
1 | 1 | { |
|
2 | 2 | "$schema": "http://json-schema.org/draft-04/schema#", |
|
3 | 3 | "description": "IPython Notebook v4.0 JSON schema.", |
|
4 | 4 | "type": "object", |
|
5 | 5 | "additionalProperties": false, |
|
6 | 6 | "required": ["metadata", "nbformat_minor", "nbformat", "cells"], |
|
7 | 7 | "properties": { |
|
8 | 8 | "metadata": { |
|
9 | 9 | "description": "Notebook root-level metadata.", |
|
10 | 10 | "type": "object", |
|
11 | 11 | "additionalProperties": true, |
|
12 | 12 | "properties": { |
|
13 | 13 | "kernel_info": { |
|
14 | 14 | "description": "Kernel information.", |
|
15 | 15 | "type": "object", |
|
16 | 16 | "required": ["name", "language"], |
|
17 | 17 | "properties": { |
|
18 | 18 | "name": { |
|
19 | 19 | "description": "Name of the kernel specification.", |
|
20 | 20 | "type": "string" |
|
21 | 21 | }, |
|
22 | 22 | "language": { |
|
23 | 23 | "description": "The programming language which this kernel runs.", |
|
24 | 24 | "type": "string" |
|
25 | 25 | }, |
|
26 | 26 | "codemirror_mode": { |
|
27 | 27 | "description": "The codemirror mode to use for code in this language.", |
|
28 | 28 | "type": "string" |
|
29 | 29 | } |
|
30 | 30 | } |
|
31 | 31 | }, |
|
32 | 32 | "signature": { |
|
33 | 33 | "description": "Hash of the notebook.", |
|
34 | 34 | "type": "string" |
|
35 | 35 | }, |
|
36 | 36 | "orig_nbformat": { |
|
37 | 37 | "description": "Original notebook format (major number) before converting the notebook between versions.", |
|
38 | 38 | "type": "integer", |
|
39 | 39 | "minimum": 1 |
|
40 | 40 | } |
|
41 | 41 | } |
|
42 | 42 | }, |
|
43 | 43 | "nbformat_minor": { |
|
44 | 44 | "description": "Notebook format (minor number). Incremented for backward compatible changes to the notebook format.", |
|
45 | 45 | "type": "integer", |
|
46 | 46 | "minimum": 0 |
|
47 | 47 | }, |
|
48 | 48 | "nbformat": { |
|
49 | 49 | "description": "Notebook format (major number). Incremented between backwards incompatible changes to the notebook format.", |
|
50 | 50 | "type": "integer", |
|
51 | 51 | "minimum": 4, |
|
52 | 52 | "maximum": 4 |
|
53 | 53 | }, |
|
54 | 54 | "cells": { |
|
55 | 55 | "description": "Array of cells of the current notebook.", |
|
56 | 56 | "type": "array", |
|
57 | 57 | "items": { |
|
58 | 58 | "type": "object", |
|
59 | 59 | "oneOf": [ |
|
60 | 60 | {"$ref": "#/definitions/raw_cell"}, |
|
61 | 61 | {"$ref": "#/definitions/markdown_cell"}, |
|
62 | 62 | {"$ref": "#/definitions/heading_cell"}, |
|
63 | 63 | {"$ref": "#/definitions/code_cell"} |
|
64 | 64 | ] |
|
65 | 65 | } |
|
66 | 66 | } |
|
67 | 67 | }, |
|
68 | 68 | |
|
69 | 69 | "definitions": { |
|
70 | 70 | |
|
71 | 71 | "raw_cell": { |
|
72 | 72 | "description": "Notebook raw nbconvert cell.", |
|
73 | 73 | "type": "object", |
|
74 | 74 | "additionalProperties": false, |
|
75 | 75 | "required": ["cell_type", "metadata", "source"], |
|
76 | 76 | "properties": { |
|
77 | 77 | "cell_type": { |
|
78 | 78 | "description": "String identifying the type of cell.", |
|
79 | 79 | "enum": ["raw"] |
|
80 | 80 | }, |
|
81 | 81 | "metadata": { |
|
82 | 82 | "description": "Cell-level metadata.", |
|
83 | 83 | "type": "object", |
|
84 | 84 | "additionalProperties": true, |
|
85 | 85 | "properties": { |
|
86 | 86 | "format": { |
|
87 | 87 | "description": "Raw cell metadata format for nbconvert.", |
|
88 | 88 | "type": "string" |
|
89 | 89 | }, |
|
90 | 90 | "name": {"$ref": "#/definitions/misc/metadata_name"}, |
|
91 | 91 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} |
|
92 | 92 | } |
|
93 | 93 | }, |
|
94 | 94 | "source": {"$ref": "#/definitions/misc/source"} |
|
95 | 95 | } |
|
96 | 96 | }, |
|
97 | 97 | |
|
98 | 98 | "markdown_cell": { |
|
99 | 99 | "description": "Notebook markdown cell.", |
|
100 | 100 | "type": "object", |
|
101 | 101 | "additionalProperties": false, |
|
102 | 102 | "required": ["cell_type", "metadata", "source"], |
|
103 | 103 | "properties": { |
|
104 | 104 | "cell_type": { |
|
105 | 105 | "description": "String identifying the type of cell.", |
|
106 | 106 | "enum": ["markdown"] |
|
107 | 107 | }, |
|
108 | 108 | "metadata": { |
|
109 | 109 | "description": "Cell-level metadata.", |
|
110 | 110 | "type": "object", |
|
111 | 111 | "properties": { |
|
112 | 112 | "name": {"$ref": "#/definitions/misc/metadata_name"}, |
|
113 | 113 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} |
|
114 | 114 | }, |
|
115 | 115 | "additionalProperties": true |
|
116 | 116 | }, |
|
117 | 117 | "source": {"$ref": "#/definitions/misc/source"} |
|
118 | 118 | } |
|
119 | 119 | }, |
|
120 | 120 | |
|
121 | 121 | "heading_cell": { |
|
122 | 122 | "description": "Notebook heading cell.", |
|
123 | 123 | "type": "object", |
|
124 | 124 | "additionalProperties": false, |
|
125 | 125 | "required": ["cell_type", "metadata", "source", "level"], |
|
126 | 126 | "properties": { |
|
127 | 127 | "cell_type": { |
|
128 | 128 | "description": "String identifying the type of cell.", |
|
129 | 129 | "enum": ["heading"] |
|
130 | 130 | }, |
|
131 | 131 | "metadata": { |
|
132 | 132 | "description": "Cell-level metadata.", |
|
133 | 133 | "type": "object", |
|
134 | 134 | "properties": { |
|
135 | 135 | "name": {"$ref": "#/definitions/misc/metadata_name"}, |
|
136 | 136 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} |
|
137 | 137 | }, |
|
138 | 138 | "additionalProperties": true |
|
139 | 139 | }, |
|
140 | 140 | "source": {"$ref": "#/definitions/misc/source"}, |
|
141 | 141 | "level": { |
|
142 | 142 | "description": "Level of heading cells.", |
|
143 | 143 | "type": "integer", |
|
144 |
"minimum": 1 |
|
|
145 | "maximum": 6 | |
|
144 | "minimum": 1 | |
|
146 | 145 | } |
|
147 | 146 | } |
|
148 | 147 | }, |
|
149 | 148 | |
|
150 | 149 | "code_cell": { |
|
151 | 150 | "description": "Notebook code cell.", |
|
152 | 151 | "type": "object", |
|
153 | 152 | "additionalProperties": false, |
|
154 | 153 | "required": ["cell_type", "metadata", "source", "outputs", "prompt_number"], |
|
155 | 154 | "properties": { |
|
156 | 155 | "cell_type": { |
|
157 | 156 | "description": "String identifying the type of cell.", |
|
158 | 157 | "enum": ["code"] |
|
159 | 158 | }, |
|
160 | 159 | "metadata": { |
|
161 | 160 | "description": "Cell-level metadata.", |
|
162 | 161 | "type": "object", |
|
163 | 162 | "additionalProperties": true, |
|
164 | 163 | "properties": { |
|
165 | 164 | "collapsed": { |
|
166 | 165 | "description": "Whether the cell is collapsed/expanded.", |
|
167 | 166 | "type": "boolean" |
|
168 | 167 | }, |
|
169 | 168 | "autoscroll": { |
|
170 | 169 | "description": "Whether the cell's output is scrolled, unscrolled, or autoscrolled.", |
|
171 | 170 | "enum": [true, false, "auto"] |
|
172 | 171 | }, |
|
173 | 172 | "name": {"$ref": "#/definitions/misc/metadata_name"}, |
|
174 | 173 | "tags": {"$ref": "#/definitions/misc/metadata_tags"} |
|
175 | 174 | } |
|
176 | 175 | }, |
|
177 | 176 | "source": {"$ref": "#/definitions/misc/source"}, |
|
178 | 177 | "outputs": { |
|
179 | 178 | "description": "Execution, display, or stream outputs.", |
|
180 | 179 | "type": "array", |
|
181 | 180 | "items": {"$ref": "#/definitions/output"} |
|
182 | 181 | }, |
|
183 | 182 | "prompt_number": { |
|
184 | 183 | "description": "The code cell's prompt number. Will be null if the cell has not been run.", |
|
185 | 184 | "type": ["integer", "null"], |
|
186 | 185 | "minimum": 0 |
|
187 | 186 | } |
|
188 | 187 | } |
|
189 | 188 | }, |
|
190 | 189 | "output": { |
|
191 | 190 | "type": "object", |
|
192 | 191 | "oneOf": [ |
|
193 | 192 | {"$ref": "#/definitions/execute_result"}, |
|
194 | 193 | {"$ref": "#/definitions/display_data"}, |
|
195 | 194 | {"$ref": "#/definitions/stream"}, |
|
196 | 195 | {"$ref": "#/definitions/error"} |
|
197 | 196 | ] |
|
198 | 197 | }, |
|
199 | 198 | "execute_result": { |
|
200 | 199 | "description": "Result of executing a code cell.", |
|
201 | 200 | "type": "object", |
|
202 | 201 | "additionalProperties": false, |
|
203 | 202 | "required": ["output_type", "metadata", "prompt_number"], |
|
204 | 203 | "properties": { |
|
205 | 204 | "output_type": { |
|
206 | 205 | "description": "Type of cell output.", |
|
207 | 206 | "enum": ["execute_result"] |
|
208 | 207 | }, |
|
209 | 208 | "prompt_number": { |
|
210 | 209 | "description": "A result's prompt number.", |
|
211 | 210 | "type": ["integer"], |
|
212 | 211 | "minimum": 0 |
|
213 | 212 | }, |
|
214 | 213 | "application/json": { |
|
215 | 214 | "type": "object" |
|
216 | 215 | }, |
|
217 | 216 | "metadata": {"$ref": "#/definitions/misc/output_metadata"} |
|
218 | 217 | }, |
|
219 | 218 | "patternProperties": { |
|
220 | 219 | "^(?!application/json$)[a-zA-Z0-9]+/[a-zA-Z0-9\\-\\+\\.]+$": { |
|
221 | 220 | "description": "mimetype output (e.g. text/plain), represented as either an array of strings or a string.", |
|
222 | 221 | "$ref": "#/definitions/misc/multiline_string" |
|
223 | 222 | } |
|
224 | 223 | } |
|
225 | 224 | }, |
|
226 | 225 | |
|
227 | 226 | "display_data": { |
|
228 | 227 | "description": "Data displayed as a result of code cell execution.", |
|
229 | 228 | "type": "object", |
|
230 | 229 | "additionalProperties": false, |
|
231 | 230 | "required": ["output_type", "metadata"], |
|
232 | 231 | "properties": { |
|
233 | 232 | "output_type": { |
|
234 | 233 | "description": "Type of cell output.", |
|
235 | 234 | "enum": ["display_data"] |
|
236 | 235 | }, |
|
237 | 236 | "application/json": { |
|
238 | 237 | "type": "object" |
|
239 | 238 | }, |
|
240 | 239 | "metadata": {"$ref": "#/definitions/misc/output_metadata"} |
|
241 | 240 | }, |
|
242 | 241 | "patternProperties": { |
|
243 | 242 | "^(?!application/json$)[a-zA-Z0-9]+/[a-zA-Z0-9\\-\\+\\.]+$": { |
|
244 | 243 | "description": "mimetype output (e.g. text/plain), represented as either an array of strings or a string.", |
|
245 | 244 | "$ref": "#/definitions/misc/multiline_string" |
|
246 | 245 | } |
|
247 | 246 | } |
|
248 | 247 | }, |
|
249 | 248 | |
|
250 | 249 | "stream": { |
|
251 | 250 | "description": "Stream output from a code cell.", |
|
252 | 251 | "type": "object", |
|
253 | 252 | "additionalProperties": false, |
|
254 | 253 | "required": ["output_type", "metadata", "stream", "text"], |
|
255 | 254 | "properties": { |
|
256 | 255 | "output_type": { |
|
257 | 256 | "description": "Type of cell output.", |
|
258 | 257 | "enum": ["stream"] |
|
259 | 258 | }, |
|
260 | 259 | "metadata": {"$ref": "#/definitions/misc/output_metadata"}, |
|
261 | 260 | "stream": { |
|
262 | 261 | "description": "The stream type/destination.", |
|
263 | 262 | "type": "string" |
|
264 | 263 | }, |
|
265 | 264 | "text": { |
|
266 | 265 | "description": "The stream's text output, represented as an array of strings.", |
|
267 | 266 | "$ref": "#/definitions/misc/multiline_string" |
|
268 | 267 | } |
|
269 | 268 | } |
|
270 | 269 | }, |
|
271 | 270 | |
|
272 | 271 | "error": { |
|
273 | 272 | "description": "Output of an error that occurred during code cell execution.", |
|
274 | 273 | "type": "object", |
|
275 | 274 | "additionalProperties": false, |
|
276 | 275 | "required": ["output_type", "metadata", "ename", "evalue", "traceback"], |
|
277 | 276 | "properties": { |
|
278 | 277 | "output_type": { |
|
279 | 278 | "description": "Type of cell output.", |
|
280 | 279 | "enum": ["error"] |
|
281 | 280 | }, |
|
282 | 281 | "metadata": {"$ref": "#/definitions/misc/output_metadata"}, |
|
283 | 282 | "ename": { |
|
284 | 283 | "description": "The name of the error.", |
|
285 | 284 | "type": "string" |
|
286 | 285 | }, |
|
287 | 286 | "evalue": { |
|
288 | 287 | "description": "The value, or message, of the error.", |
|
289 | 288 | "type": "string" |
|
290 | 289 | }, |
|
291 | 290 | "traceback": { |
|
292 | 291 | "description": "The error's traceback, represented as an array of strings.", |
|
293 | 292 | "type": "array", |
|
294 | 293 | "items": {"type": "string"} |
|
295 | 294 | } |
|
296 | 295 | } |
|
297 | 296 | }, |
|
298 | 297 | |
|
299 | 298 | "misc": { |
|
300 | 299 | "metadata_name": { |
|
301 | 300 | "description": "The cell's name. If present, must be a non-empty string.", |
|
302 | 301 | "type": "string", |
|
303 | 302 | "pattern": "^.+$" |
|
304 | 303 | }, |
|
305 | 304 | "metadata_tags": { |
|
306 | 305 | "description": "The cell's tags. Tags must be unique, and must not contain commas.", |
|
307 | 306 | "type": "array", |
|
308 | 307 | "uniqueItems": true, |
|
309 | 308 | "items": { |
|
310 | 309 | "type": "string", |
|
311 | 310 | "pattern": "^[^,]+$" |
|
312 | 311 | } |
|
313 | 312 | }, |
|
314 | 313 | "source": { |
|
315 | 314 | "description": "Contents of the cell, represented as an array of lines.", |
|
316 | 315 | "$ref": "#/definitions/misc/multiline_string" |
|
317 | 316 | }, |
|
318 | 317 | "prompt_number": { |
|
319 | 318 | "description": "The code cell's prompt number. Will be null if the cell has not been run.", |
|
320 | 319 | "type": ["integer", "null"], |
|
321 | 320 | "minimum": 0 |
|
322 | 321 | }, |
|
323 | 322 | "mimetype": { |
|
324 | 323 | "patternProperties": { |
|
325 | 324 | "^[a-zA-Z0-9\\-\\+]+/[a-zA-Z0-9\\-\\+]+": { |
|
326 | 325 | "description": "The cell's mimetype output (e.g. text/plain), represented as either an array of strings or a string.", |
|
327 | 326 | "$ref": "#/definitions/misc/multiline_string" |
|
328 | 327 | } |
|
329 | 328 | } |
|
330 | 329 | }, |
|
331 | 330 | "output_metadata": { |
|
332 | 331 | "description": "Cell output metadata.", |
|
333 | 332 | "type": "object", |
|
334 | 333 | "additionalProperties": true |
|
335 | 334 | }, |
|
336 | 335 | "multiline_string": { |
|
337 | 336 | "oneOf" : [ |
|
338 | 337 | {"type": "string"}, |
|
339 | 338 | { |
|
340 | 339 | "type": "array", |
|
341 | 340 | "items": {"type": "string"} |
|
342 | 341 | } |
|
343 | 342 | ] |
|
344 | 343 | } |
|
345 | 344 | } |
|
346 | 345 | } |
|
347 | 346 | } |
@@ -1,126 +1,130 b'' | |||
|
1 | 1 | """Tests for nbformat validation""" |
|
2 | 2 | |
|
3 | 3 | # Copyright (c) IPython Development Team. |
|
4 | 4 | # Distributed under the terms of the Modified BSD License. |
|
5 | 5 | |
|
6 | 6 | import io |
|
7 | 7 | import os |
|
8 | 8 | |
|
9 | 9 | import nose.tools as nt |
|
10 | 10 | |
|
11 |
from ..validator import |
|
|
11 | from IPython.nbformat.validator import validate, ValidationError | |
|
12 | 12 | from ..nbjson import reads |
|
13 | from ..nbbase import nbformat | |
|
13 | 14 | from ..compose import ( |
|
14 | 15 | new_code_cell, new_heading_cell, new_markdown_cell, new_notebook, |
|
15 | 16 | new_output, new_raw_cell, |
|
16 | 17 | ) |
|
17 | 18 | |
|
19 | def validate4(obj, ref=None): | |
|
20 | return validate(obj, ref, version=nbformat) | |
|
21 | ||
|
18 | 22 | def test_valid_code_cell(): |
|
19 | 23 | cell = new_code_cell() |
|
20 | validate(cell, 'code_cell') | |
|
24 | validate4(cell, 'code_cell') | |
|
21 | 25 | |
|
22 | 26 | def test_invalid_code_cell(): |
|
23 | 27 | cell = new_code_cell() |
|
24 | 28 | |
|
25 | 29 | cell['source'] = 5 |
|
26 | 30 | with nt.assert_raises(ValidationError): |
|
27 | validate(cell, 'code_cell') | |
|
31 | validate4(cell, 'code_cell') | |
|
28 | 32 | |
|
29 | 33 | cell = new_code_cell() |
|
30 | 34 | del cell['metadata'] |
|
31 | 35 | |
|
32 | 36 | with nt.assert_raises(ValidationError): |
|
33 | validate(cell, 'code_cell') | |
|
37 | validate4(cell, 'code_cell') | |
|
34 | 38 | |
|
35 | 39 | cell = new_code_cell() |
|
36 | 40 | del cell['source'] |
|
37 | 41 | |
|
38 | 42 | with nt.assert_raises(ValidationError): |
|
39 | validate(cell, 'code_cell') | |
|
43 | validate4(cell, 'code_cell') | |
|
40 | 44 | |
|
41 | 45 | cell = new_code_cell() |
|
42 | 46 | del cell['cell_type'] |
|
43 | 47 | |
|
44 | 48 | with nt.assert_raises(ValidationError): |
|
45 | validate(cell, 'code_cell') | |
|
49 | validate4(cell, 'code_cell') | |
|
46 | 50 | |
|
47 | 51 | def test_invalid_markdown_cell(): |
|
48 | 52 | cell = new_markdown_cell() |
|
49 | 53 | |
|
50 | 54 | cell['source'] = 5 |
|
51 | 55 | with nt.assert_raises(ValidationError): |
|
52 | validate(cell, 'markdown_cell') | |
|
56 | validate4(cell, 'markdown_cell') | |
|
53 | 57 | |
|
54 | 58 | cell = new_markdown_cell() |
|
55 | 59 | del cell['metadata'] |
|
56 | 60 | |
|
57 | 61 | with nt.assert_raises(ValidationError): |
|
58 | validate(cell, 'markdown_cell') | |
|
62 | validate4(cell, 'markdown_cell') | |
|
59 | 63 | |
|
60 | 64 | cell = new_markdown_cell() |
|
61 | 65 | del cell['source'] |
|
62 | 66 | |
|
63 | 67 | with nt.assert_raises(ValidationError): |
|
64 | validate(cell, 'markdown_cell') | |
|
68 | validate4(cell, 'markdown_cell') | |
|
65 | 69 | |
|
66 | 70 | cell = new_markdown_cell() |
|
67 | 71 | del cell['cell_type'] |
|
68 | 72 | |
|
69 | 73 | with nt.assert_raises(ValidationError): |
|
70 | validate(cell, 'markdown_cell') | |
|
74 | validate4(cell, 'markdown_cell') | |
|
71 | 75 | |
|
72 | 76 | def test_invalid_heading_cell(): |
|
73 | 77 | cell = new_heading_cell() |
|
74 | 78 | |
|
75 | 79 | cell['source'] = 5 |
|
76 | 80 | with nt.assert_raises(ValidationError): |
|
77 | validate(cell, 'heading_cell') | |
|
81 | validate4(cell, 'heading_cell') | |
|
78 | 82 | |
|
79 | 83 | cell = new_heading_cell() |
|
80 | 84 | del cell['metadata'] |
|
81 | 85 | |
|
82 | 86 | with nt.assert_raises(ValidationError): |
|
83 | validate(cell, 'heading_cell') | |
|
87 | validate4(cell, 'heading_cell') | |
|
84 | 88 | |
|
85 | 89 | cell = new_heading_cell() |
|
86 | 90 | del cell['source'] |
|
87 | 91 | |
|
88 | 92 | with nt.assert_raises(ValidationError): |
|
89 | validate(cell, 'heading_cell') | |
|
93 | validate4(cell, 'heading_cell') | |
|
90 | 94 | |
|
91 | 95 | cell = new_heading_cell() |
|
92 | 96 | del cell['cell_type'] |
|
93 | 97 | |
|
94 | 98 | with nt.assert_raises(ValidationError): |
|
95 | validate(cell, 'heading_cell') | |
|
99 | validate4(cell, 'heading_cell') | |
|
96 | 100 | |
|
97 | 101 | def test_invalid_raw_cell(): |
|
98 | 102 | cell = new_raw_cell() |
|
99 | 103 | |
|
100 | 104 | cell['source'] = 5 |
|
101 | 105 | with nt.assert_raises(ValidationError): |
|
102 | validate(cell, 'raw_cell') | |
|
106 | validate4(cell, 'raw_cell') | |
|
103 | 107 | |
|
104 | 108 | cell = new_raw_cell() |
|
105 | 109 | del cell['metadata'] |
|
106 | 110 | |
|
107 | 111 | with nt.assert_raises(ValidationError): |
|
108 | validate(cell, 'raw_cell') | |
|
112 | validate4(cell, 'raw_cell') | |
|
109 | 113 | |
|
110 | 114 | cell = new_raw_cell() |
|
111 | 115 | del cell['source'] |
|
112 | 116 | |
|
113 | 117 | with nt.assert_raises(ValidationError): |
|
114 | validate(cell, 'raw_cell') | |
|
118 | validate4(cell, 'raw_cell') | |
|
115 | 119 | |
|
116 | 120 | cell = new_raw_cell() |
|
117 | 121 | del cell['cell_type'] |
|
118 | 122 | |
|
119 | 123 | with nt.assert_raises(ValidationError): |
|
120 | validate(cell, 'raw_cell') | |
|
124 | validate4(cell, 'raw_cell') | |
|
121 | 125 | |
|
122 | 126 | def test_sample_notebook(): |
|
123 | 127 | here = os.path.dirname(__file__) |
|
124 | 128 | with io.open(os.path.join(here, "v4-test.ipynb"), encoding='utf-8') as f: |
|
125 | 129 | nb = reads(f.read()) |
|
126 | validate(nb) | |
|
130 | validate4(nb) |
@@ -1,736 +1,737 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """ |
|
3 | 3 | This module defines the things that are used in setup.py for building IPython |
|
4 | 4 | |
|
5 | 5 | This includes: |
|
6 | 6 | |
|
7 | 7 | * The basic arguments to setup |
|
8 | 8 | * Functions for finding things like packages, package data, etc. |
|
9 | 9 | * A function for checking dependencies. |
|
10 | 10 | """ |
|
11 | 11 | |
|
12 | 12 | # Copyright (c) IPython Development Team. |
|
13 | 13 | # Distributed under the terms of the Modified BSD License. |
|
14 | 14 | |
|
15 | 15 | from __future__ import print_function |
|
16 | 16 | |
|
17 | 17 | import errno |
|
18 | 18 | import os |
|
19 | 19 | import sys |
|
20 | 20 | |
|
21 | 21 | from distutils import log |
|
22 | 22 | from distutils.command.build_py import build_py |
|
23 | 23 | from distutils.command.build_scripts import build_scripts |
|
24 | 24 | from distutils.command.install import install |
|
25 | 25 | from distutils.command.install_scripts import install_scripts |
|
26 | 26 | from distutils.cmd import Command |
|
27 | 27 | from fnmatch import fnmatch |
|
28 | 28 | from glob import glob |
|
29 | 29 | from subprocess import check_call |
|
30 | 30 | |
|
31 | 31 | from setupext import install_data_ext |
|
32 | 32 | |
|
33 | 33 | #------------------------------------------------------------------------------- |
|
34 | 34 | # Useful globals and utility functions |
|
35 | 35 | #------------------------------------------------------------------------------- |
|
36 | 36 | |
|
37 | 37 | # A few handy globals |
|
38 | 38 | isfile = os.path.isfile |
|
39 | 39 | pjoin = os.path.join |
|
40 | 40 | repo_root = os.path.dirname(os.path.abspath(__file__)) |
|
41 | 41 | |
|
42 | 42 | def oscmd(s): |
|
43 | 43 | print(">", s) |
|
44 | 44 | os.system(s) |
|
45 | 45 | |
|
46 | 46 | # Py3 compatibility hacks, without assuming IPython itself is installed with |
|
47 | 47 | # the full py3compat machinery. |
|
48 | 48 | |
|
49 | 49 | try: |
|
50 | 50 | execfile |
|
51 | 51 | except NameError: |
|
52 | 52 | def execfile(fname, globs, locs=None): |
|
53 | 53 | locs = locs or globs |
|
54 | 54 | exec(compile(open(fname).read(), fname, "exec"), globs, locs) |
|
55 | 55 | |
|
56 | 56 | # A little utility we'll need below, since glob() does NOT allow you to do |
|
57 | 57 | # exclusion on multiple endings! |
|
58 | 58 | def file_doesnt_endwith(test,endings): |
|
59 | 59 | """Return true if test is a file and its name does NOT end with any |
|
60 | 60 | of the strings listed in endings.""" |
|
61 | 61 | if not isfile(test): |
|
62 | 62 | return False |
|
63 | 63 | for e in endings: |
|
64 | 64 | if test.endswith(e): |
|
65 | 65 | return False |
|
66 | 66 | return True |
|
67 | 67 | |
|
68 | 68 | #--------------------------------------------------------------------------- |
|
69 | 69 | # Basic project information |
|
70 | 70 | #--------------------------------------------------------------------------- |
|
71 | 71 | |
|
72 | 72 | # release.py contains version, authors, license, url, keywords, etc. |
|
73 | 73 | execfile(pjoin(repo_root, 'IPython','core','release.py'), globals()) |
|
74 | 74 | |
|
75 | 75 | # Create a dict with the basic information |
|
76 | 76 | # This dict is eventually passed to setup after additional keys are added. |
|
77 | 77 | setup_args = dict( |
|
78 | 78 | name = name, |
|
79 | 79 | version = version, |
|
80 | 80 | description = description, |
|
81 | 81 | long_description = long_description, |
|
82 | 82 | author = author, |
|
83 | 83 | author_email = author_email, |
|
84 | 84 | url = url, |
|
85 | 85 | download_url = download_url, |
|
86 | 86 | license = license, |
|
87 | 87 | platforms = platforms, |
|
88 | 88 | keywords = keywords, |
|
89 | 89 | classifiers = classifiers, |
|
90 | 90 | cmdclass = {'install_data': install_data_ext}, |
|
91 | 91 | ) |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | #--------------------------------------------------------------------------- |
|
95 | 95 | # Find packages |
|
96 | 96 | #--------------------------------------------------------------------------- |
|
97 | 97 | |
|
98 | 98 | def find_packages(): |
|
99 | 99 | """ |
|
100 | 100 | Find all of IPython's packages. |
|
101 | 101 | """ |
|
102 | 102 | excludes = ['deathrow', 'quarantine'] |
|
103 | 103 | packages = [] |
|
104 | 104 | for dir,subdirs,files in os.walk('IPython'): |
|
105 | 105 | package = dir.replace(os.path.sep, '.') |
|
106 | 106 | if any(package.startswith('IPython.'+exc) for exc in excludes): |
|
107 | 107 | # package is to be excluded (e.g. deathrow) |
|
108 | 108 | continue |
|
109 | 109 | if '__init__.py' not in files: |
|
110 | 110 | # not a package |
|
111 | 111 | continue |
|
112 | 112 | packages.append(package) |
|
113 | 113 | return packages |
|
114 | 114 | |
|
115 | 115 | #--------------------------------------------------------------------------- |
|
116 | 116 | # Find package data |
|
117 | 117 | #--------------------------------------------------------------------------- |
|
118 | 118 | |
|
119 | 119 | def find_package_data(): |
|
120 | 120 | """ |
|
121 | 121 | Find IPython's package_data. |
|
122 | 122 | """ |
|
123 | 123 | # This is not enough for these things to appear in an sdist. |
|
124 | 124 | # We need to muck with the MANIFEST to get this to work |
|
125 | 125 | |
|
126 | 126 | # exclude components and less from the walk; |
|
127 | 127 | # we will build the components separately |
|
128 | 128 | excludes = [ |
|
129 | 129 | pjoin('static', 'components'), |
|
130 | 130 | pjoin('static', '*', 'less'), |
|
131 | 131 | ] |
|
132 | 132 | |
|
133 | 133 | # walk notebook resources: |
|
134 | 134 | cwd = os.getcwd() |
|
135 | 135 | os.chdir(os.path.join('IPython', 'html')) |
|
136 | 136 | static_data = [] |
|
137 | 137 | for parent, dirs, files in os.walk('static'): |
|
138 | 138 | if any(fnmatch(parent, pat) for pat in excludes): |
|
139 | 139 | # prevent descending into subdirs |
|
140 | 140 | dirs[:] = [] |
|
141 | 141 | continue |
|
142 | 142 | for f in files: |
|
143 | 143 | static_data.append(pjoin(parent, f)) |
|
144 | 144 | |
|
145 | 145 | components = pjoin("static", "components") |
|
146 | 146 | # select the components we actually need to install |
|
147 | 147 | # (there are lots of resources we bundle for sdist-reasons that we don't actually use) |
|
148 | 148 | static_data.extend([ |
|
149 | 149 | pjoin(components, "backbone", "backbone-min.js"), |
|
150 | 150 | pjoin(components, "bootstrap", "js", "bootstrap.min.js"), |
|
151 | 151 | pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"), |
|
152 | 152 | pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"), |
|
153 | 153 | pjoin(components, "font-awesome", "fonts", "*.*"), |
|
154 | 154 | pjoin(components, "google-caja", "html-css-sanitizer-minified.js"), |
|
155 | 155 | pjoin(components, "highlight.js", "build", "highlight.pack.js"), |
|
156 | 156 | pjoin(components, "jquery", "jquery.min.js"), |
|
157 | 157 | pjoin(components, "jquery-ui", "ui", "minified", "jquery-ui.min.js"), |
|
158 | 158 | pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"), |
|
159 | 159 | pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"), |
|
160 | 160 | pjoin(components, "marked", "lib", "marked.js"), |
|
161 | 161 | pjoin(components, "requirejs", "require.js"), |
|
162 | 162 | pjoin(components, "underscore", "underscore-min.js"), |
|
163 | 163 | pjoin(components, "moment", "moment.js"), |
|
164 | 164 | pjoin(components, "moment", "min", "moment.min.js"), |
|
165 | 165 | pjoin(components, "term.js", "src", "term.js"), |
|
166 | 166 | pjoin(components, "text-encoding", "lib", "encoding.js"), |
|
167 | 167 | ]) |
|
168 | 168 | |
|
169 | 169 | # Ship all of Codemirror's CSS and JS |
|
170 | 170 | for parent, dirs, files in os.walk(pjoin(components, 'codemirror')): |
|
171 | 171 | for f in files: |
|
172 | 172 | if f.endswith(('.js', '.css')): |
|
173 | 173 | static_data.append(pjoin(parent, f)) |
|
174 | 174 | |
|
175 | 175 | os.chdir(os.path.join('tests',)) |
|
176 | 176 | js_tests = glob('*.js') + glob('*/*.js') |
|
177 | 177 | |
|
178 | 178 | os.chdir(os.path.join(cwd, 'IPython', 'nbconvert')) |
|
179 | 179 | nbconvert_templates = [os.path.join(dirpath, '*.*') |
|
180 | 180 | for dirpath, _, _ in os.walk('templates')] |
|
181 | 181 | |
|
182 | 182 | os.chdir(cwd) |
|
183 | 183 | |
|
184 | 184 | package_data = { |
|
185 | 185 | 'IPython.config.profile' : ['README*', '*/*.py'], |
|
186 | 186 | 'IPython.core.tests' : ['*.png', '*.jpg'], |
|
187 | 187 | 'IPython.lib.tests' : ['*.wav'], |
|
188 | 188 | 'IPython.testing.plugin' : ['*.txt'], |
|
189 | 189 | 'IPython.html' : ['templates/*'] + static_data, |
|
190 | 190 | 'IPython.html.tests' : js_tests, |
|
191 | 191 | 'IPython.qt.console' : ['resources/icon/*.svg'], |
|
192 | 192 | 'IPython.nbconvert' : nbconvert_templates + |
|
193 | 193 | [ |
|
194 | 194 | 'tests/files/*.*', |
|
195 | 195 | 'exporters/tests/files/*.*', |
|
196 | 196 | 'preprocessors/tests/files/*.*', |
|
197 | 197 | ], |
|
198 | 198 | 'IPython.nbconvert.filters' : ['marked.js'], |
|
199 | 199 | 'IPython.nbformat' : [ |
|
200 | 200 | 'tests/*.ipynb', |
|
201 | 201 | 'v3/nbformat.v3.schema.json', |
|
202 | 'v4/nbformat.v4.schema.json', | |
|
202 | 203 | ] |
|
203 | 204 | } |
|
204 | 205 | |
|
205 | 206 | return package_data |
|
206 | 207 | |
|
207 | 208 | |
|
208 | 209 | def check_package_data(package_data): |
|
209 | 210 | """verify that package_data globs make sense""" |
|
210 | 211 | print("checking package data") |
|
211 | 212 | for pkg, data in package_data.items(): |
|
212 | 213 | pkg_root = pjoin(*pkg.split('.')) |
|
213 | 214 | for d in data: |
|
214 | 215 | path = pjoin(pkg_root, d) |
|
215 | 216 | if '*' in path: |
|
216 | 217 | assert len(glob(path)) > 0, "No files match pattern %s" % path |
|
217 | 218 | else: |
|
218 | 219 | assert os.path.exists(path), "Missing package data: %s" % path |
|
219 | 220 | |
|
220 | 221 | |
|
221 | 222 | def check_package_data_first(command): |
|
222 | 223 | """decorator for checking package_data before running a given command |
|
223 | 224 | |
|
224 | 225 | Probably only needs to wrap build_py |
|
225 | 226 | """ |
|
226 | 227 | class DecoratedCommand(command): |
|
227 | 228 | def run(self): |
|
228 | 229 | check_package_data(self.package_data) |
|
229 | 230 | command.run(self) |
|
230 | 231 | return DecoratedCommand |
|
231 | 232 | |
|
232 | 233 | |
|
233 | 234 | #--------------------------------------------------------------------------- |
|
234 | 235 | # Find data files |
|
235 | 236 | #--------------------------------------------------------------------------- |
|
236 | 237 | |
|
237 | 238 | def make_dir_struct(tag,base,out_base): |
|
238 | 239 | """Make the directory structure of all files below a starting dir. |
|
239 | 240 | |
|
240 | 241 | This is just a convenience routine to help build a nested directory |
|
241 | 242 | hierarchy because distutils is too stupid to do this by itself. |
|
242 | 243 | |
|
243 | 244 | XXX - this needs a proper docstring! |
|
244 | 245 | """ |
|
245 | 246 | |
|
246 | 247 | # we'll use these a lot below |
|
247 | 248 | lbase = len(base) |
|
248 | 249 | pathsep = os.path.sep |
|
249 | 250 | lpathsep = len(pathsep) |
|
250 | 251 | |
|
251 | 252 | out = [] |
|
252 | 253 | for (dirpath,dirnames,filenames) in os.walk(base): |
|
253 | 254 | # we need to strip out the dirpath from the base to map it to the |
|
254 | 255 | # output (installation) path. This requires possibly stripping the |
|
255 | 256 | # path separator, because otherwise pjoin will not work correctly |
|
256 | 257 | # (pjoin('foo/','/bar') returns '/bar'). |
|
257 | 258 | |
|
258 | 259 | dp_eff = dirpath[lbase:] |
|
259 | 260 | if dp_eff.startswith(pathsep): |
|
260 | 261 | dp_eff = dp_eff[lpathsep:] |
|
261 | 262 | # The output path must be anchored at the out_base marker |
|
262 | 263 | out_path = pjoin(out_base,dp_eff) |
|
263 | 264 | # Now we can generate the final filenames. Since os.walk only produces |
|
264 | 265 | # filenames, we must join back with the dirpath to get full valid file |
|
265 | 266 | # paths: |
|
266 | 267 | pfiles = [pjoin(dirpath,f) for f in filenames] |
|
267 | 268 | # Finally, generate the entry we need, which is a pari of (output |
|
268 | 269 | # path, files) for use as a data_files parameter in install_data. |
|
269 | 270 | out.append((out_path, pfiles)) |
|
270 | 271 | |
|
271 | 272 | return out |
|
272 | 273 | |
|
273 | 274 | |
|
274 | 275 | def find_data_files(): |
|
275 | 276 | """ |
|
276 | 277 | Find IPython's data_files. |
|
277 | 278 | |
|
278 | 279 | Just man pages at this point. |
|
279 | 280 | """ |
|
280 | 281 | |
|
281 | 282 | manpagebase = pjoin('share', 'man', 'man1') |
|
282 | 283 | |
|
283 | 284 | # Simple file lists can be made by hand |
|
284 | 285 | manpages = [f for f in glob(pjoin('docs','man','*.1.gz')) if isfile(f)] |
|
285 | 286 | if not manpages: |
|
286 | 287 | # When running from a source tree, the manpages aren't gzipped |
|
287 | 288 | manpages = [f for f in glob(pjoin('docs','man','*.1')) if isfile(f)] |
|
288 | 289 | |
|
289 | 290 | # And assemble the entire output list |
|
290 | 291 | data_files = [ (manpagebase, manpages) ] |
|
291 | 292 | |
|
292 | 293 | return data_files |
|
293 | 294 | |
|
294 | 295 | |
|
295 | 296 | def make_man_update_target(manpage): |
|
296 | 297 | """Return a target_update-compliant tuple for the given manpage. |
|
297 | 298 | |
|
298 | 299 | Parameters |
|
299 | 300 | ---------- |
|
300 | 301 | manpage : string |
|
301 | 302 | Name of the manpage, must include the section number (trailing number). |
|
302 | 303 | |
|
303 | 304 | Example |
|
304 | 305 | ------- |
|
305 | 306 | |
|
306 | 307 | >>> make_man_update_target('ipython.1') #doctest: +NORMALIZE_WHITESPACE |
|
307 | 308 | ('docs/man/ipython.1.gz', |
|
308 | 309 | ['docs/man/ipython.1'], |
|
309 | 310 | 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz') |
|
310 | 311 | """ |
|
311 | 312 | man_dir = pjoin('docs', 'man') |
|
312 | 313 | manpage_gz = manpage + '.gz' |
|
313 | 314 | manpath = pjoin(man_dir, manpage) |
|
314 | 315 | manpath_gz = pjoin(man_dir, manpage_gz) |
|
315 | 316 | gz_cmd = ( "cd %(man_dir)s && gzip -9c %(manpage)s > %(manpage_gz)s" % |
|
316 | 317 | locals() ) |
|
317 | 318 | return (manpath_gz, [manpath], gz_cmd) |
|
318 | 319 | |
|
319 | 320 | # The two functions below are copied from IPython.utils.path, so we don't need |
|
320 | 321 | # to import IPython during setup, which fails on Python 3. |
|
321 | 322 | |
|
322 | 323 | def target_outdated(target,deps): |
|
323 | 324 | """Determine whether a target is out of date. |
|
324 | 325 | |
|
325 | 326 | target_outdated(target,deps) -> 1/0 |
|
326 | 327 | |
|
327 | 328 | deps: list of filenames which MUST exist. |
|
328 | 329 | target: single filename which may or may not exist. |
|
329 | 330 | |
|
330 | 331 | If target doesn't exist or is older than any file listed in deps, return |
|
331 | 332 | true, otherwise return false. |
|
332 | 333 | """ |
|
333 | 334 | try: |
|
334 | 335 | target_time = os.path.getmtime(target) |
|
335 | 336 | except os.error: |
|
336 | 337 | return 1 |
|
337 | 338 | for dep in deps: |
|
338 | 339 | dep_time = os.path.getmtime(dep) |
|
339 | 340 | if dep_time > target_time: |
|
340 | 341 | #print "For target",target,"Dep failed:",dep # dbg |
|
341 | 342 | #print "times (dep,tar):",dep_time,target_time # dbg |
|
342 | 343 | return 1 |
|
343 | 344 | return 0 |
|
344 | 345 | |
|
345 | 346 | |
|
346 | 347 | def target_update(target,deps,cmd): |
|
347 | 348 | """Update a target with a given command given a list of dependencies. |
|
348 | 349 | |
|
349 | 350 | target_update(target,deps,cmd) -> runs cmd if target is outdated. |
|
350 | 351 | |
|
351 | 352 | This is just a wrapper around target_outdated() which calls the given |
|
352 | 353 | command if target is outdated.""" |
|
353 | 354 | |
|
354 | 355 | if target_outdated(target,deps): |
|
355 | 356 | os.system(cmd) |
|
356 | 357 | |
|
357 | 358 | #--------------------------------------------------------------------------- |
|
358 | 359 | # Find scripts |
|
359 | 360 | #--------------------------------------------------------------------------- |
|
360 | 361 | |
|
361 | 362 | def find_entry_points(): |
|
362 | 363 | """Find IPython's scripts. |
|
363 | 364 | |
|
364 | 365 | if entry_points is True: |
|
365 | 366 | return setuptools entry_point-style definitions |
|
366 | 367 | else: |
|
367 | 368 | return file paths of plain scripts [default] |
|
368 | 369 | |
|
369 | 370 | suffix is appended to script names if entry_points is True, so that the |
|
370 | 371 | Python 3 scripts get named "ipython3" etc. |
|
371 | 372 | """ |
|
372 | 373 | ep = [ |
|
373 | 374 | 'ipython%s = IPython:start_ipython', |
|
374 | 375 | 'ipcontroller%s = IPython.parallel.apps.ipcontrollerapp:launch_new_instance', |
|
375 | 376 | 'ipengine%s = IPython.parallel.apps.ipengineapp:launch_new_instance', |
|
376 | 377 | 'ipcluster%s = IPython.parallel.apps.ipclusterapp:launch_new_instance', |
|
377 | 378 | 'iptest%s = IPython.testing.iptestcontroller:main', |
|
378 | 379 | ] |
|
379 | 380 | suffix = str(sys.version_info[0]) |
|
380 | 381 | return [e % '' for e in ep] + [e % suffix for e in ep] |
|
381 | 382 | |
|
382 | 383 | script_src = """#!{executable} |
|
383 | 384 | # This script was automatically generated by setup.py |
|
384 | 385 | if __name__ == '__main__': |
|
385 | 386 | from {mod} import {func} |
|
386 | 387 | {func}() |
|
387 | 388 | """ |
|
388 | 389 | |
|
389 | 390 | class build_scripts_entrypt(build_scripts): |
|
390 | 391 | def run(self): |
|
391 | 392 | self.mkpath(self.build_dir) |
|
392 | 393 | outfiles = [] |
|
393 | 394 | for script in find_entry_points(): |
|
394 | 395 | name, entrypt = script.split('=') |
|
395 | 396 | name = name.strip() |
|
396 | 397 | entrypt = entrypt.strip() |
|
397 | 398 | outfile = os.path.join(self.build_dir, name) |
|
398 | 399 | outfiles.append(outfile) |
|
399 | 400 | print('Writing script to', outfile) |
|
400 | 401 | |
|
401 | 402 | mod, func = entrypt.split(':') |
|
402 | 403 | with open(outfile, 'w') as f: |
|
403 | 404 | f.write(script_src.format(executable=sys.executable, |
|
404 | 405 | mod=mod, func=func)) |
|
405 | 406 | |
|
406 | 407 | return outfiles, outfiles |
|
407 | 408 | |
|
408 | 409 | class install_lib_symlink(Command): |
|
409 | 410 | user_options = [ |
|
410 | 411 | ('install-dir=', 'd', "directory to install to"), |
|
411 | 412 | ] |
|
412 | 413 | |
|
413 | 414 | def initialize_options(self): |
|
414 | 415 | self.install_dir = None |
|
415 | 416 | |
|
416 | 417 | def finalize_options(self): |
|
417 | 418 | self.set_undefined_options('symlink', |
|
418 | 419 | ('install_lib', 'install_dir'), |
|
419 | 420 | ) |
|
420 | 421 | |
|
421 | 422 | def run(self): |
|
422 | 423 | if sys.platform == 'win32': |
|
423 | 424 | raise Exception("This doesn't work on Windows.") |
|
424 | 425 | pkg = os.path.join(os.getcwd(), 'IPython') |
|
425 | 426 | dest = os.path.join(self.install_dir, 'IPython') |
|
426 | 427 | if os.path.islink(dest): |
|
427 | 428 | print('removing existing symlink at %s' % dest) |
|
428 | 429 | os.unlink(dest) |
|
429 | 430 | print('symlinking %s -> %s' % (pkg, dest)) |
|
430 | 431 | os.symlink(pkg, dest) |
|
431 | 432 | |
|
432 | 433 | class unsymlink(install): |
|
433 | 434 | def run(self): |
|
434 | 435 | dest = os.path.join(self.install_lib, 'IPython') |
|
435 | 436 | if os.path.islink(dest): |
|
436 | 437 | print('removing symlink at %s' % dest) |
|
437 | 438 | os.unlink(dest) |
|
438 | 439 | else: |
|
439 | 440 | print('No symlink exists at %s' % dest) |
|
440 | 441 | |
|
441 | 442 | class install_symlinked(install): |
|
442 | 443 | def run(self): |
|
443 | 444 | if sys.platform == 'win32': |
|
444 | 445 | raise Exception("This doesn't work on Windows.") |
|
445 | 446 | |
|
446 | 447 | # Run all sub-commands (at least those that need to be run) |
|
447 | 448 | for cmd_name in self.get_sub_commands(): |
|
448 | 449 | self.run_command(cmd_name) |
|
449 | 450 | |
|
450 | 451 | # 'sub_commands': a list of commands this command might have to run to |
|
451 | 452 | # get its work done. See cmd.py for more info. |
|
452 | 453 | sub_commands = [('install_lib_symlink', lambda self:True), |
|
453 | 454 | ('install_scripts_sym', lambda self:True), |
|
454 | 455 | ] |
|
455 | 456 | |
|
456 | 457 | class install_scripts_for_symlink(install_scripts): |
|
457 | 458 | """Redefined to get options from 'symlink' instead of 'install'. |
|
458 | 459 | |
|
459 | 460 | I love distutils almost as much as I love setuptools. |
|
460 | 461 | """ |
|
461 | 462 | def finalize_options(self): |
|
462 | 463 | self.set_undefined_options('build', ('build_scripts', 'build_dir')) |
|
463 | 464 | self.set_undefined_options('symlink', |
|
464 | 465 | ('install_scripts', 'install_dir'), |
|
465 | 466 | ('force', 'force'), |
|
466 | 467 | ('skip_build', 'skip_build'), |
|
467 | 468 | ) |
|
468 | 469 | |
|
469 | 470 | #--------------------------------------------------------------------------- |
|
470 | 471 | # Verify all dependencies |
|
471 | 472 | #--------------------------------------------------------------------------- |
|
472 | 473 | |
|
473 | 474 | def check_for_dependencies(): |
|
474 | 475 | """Check for IPython's dependencies. |
|
475 | 476 | |
|
476 | 477 | This function should NOT be called if running under setuptools! |
|
477 | 478 | """ |
|
478 | 479 | from setupext.setupext import ( |
|
479 | 480 | print_line, print_raw, print_status, |
|
480 | 481 | check_for_sphinx, check_for_pygments, |
|
481 | 482 | check_for_nose, check_for_pexpect, |
|
482 | 483 | check_for_pyzmq, check_for_readline, |
|
483 | 484 | check_for_jinja2, check_for_tornado |
|
484 | 485 | ) |
|
485 | 486 | print_line() |
|
486 | 487 | print_raw("BUILDING IPYTHON") |
|
487 | 488 | print_status('python', sys.version) |
|
488 | 489 | print_status('platform', sys.platform) |
|
489 | 490 | if sys.platform == 'win32': |
|
490 | 491 | print_status('Windows version', sys.getwindowsversion()) |
|
491 | 492 | |
|
492 | 493 | print_raw("") |
|
493 | 494 | print_raw("OPTIONAL DEPENDENCIES") |
|
494 | 495 | |
|
495 | 496 | check_for_sphinx() |
|
496 | 497 | check_for_pygments() |
|
497 | 498 | check_for_nose() |
|
498 | 499 | if os.name == 'posix': |
|
499 | 500 | check_for_pexpect() |
|
500 | 501 | check_for_pyzmq() |
|
501 | 502 | check_for_tornado() |
|
502 | 503 | check_for_readline() |
|
503 | 504 | check_for_jinja2() |
|
504 | 505 | |
|
505 | 506 | #--------------------------------------------------------------------------- |
|
506 | 507 | # VCS related |
|
507 | 508 | #--------------------------------------------------------------------------- |
|
508 | 509 | |
|
509 | 510 | # utils.submodule has checks for submodule status |
|
510 | 511 | execfile(pjoin('IPython','utils','submodule.py'), globals()) |
|
511 | 512 | |
|
512 | 513 | class UpdateSubmodules(Command): |
|
513 | 514 | """Update git submodules |
|
514 | 515 | |
|
515 | 516 | IPython's external javascript dependencies live in a separate repo. |
|
516 | 517 | """ |
|
517 | 518 | description = "Update git submodules" |
|
518 | 519 | user_options = [] |
|
519 | 520 | |
|
520 | 521 | def initialize_options(self): |
|
521 | 522 | pass |
|
522 | 523 | |
|
523 | 524 | def finalize_options(self): |
|
524 | 525 | pass |
|
525 | 526 | |
|
526 | 527 | def run(self): |
|
527 | 528 | failure = False |
|
528 | 529 | try: |
|
529 | 530 | self.spawn('git submodule init'.split()) |
|
530 | 531 | self.spawn('git submodule update --recursive'.split()) |
|
531 | 532 | except Exception as e: |
|
532 | 533 | failure = e |
|
533 | 534 | print(e) |
|
534 | 535 | |
|
535 | 536 | if not check_submodule_status(repo_root) == 'clean': |
|
536 | 537 | print("submodules could not be checked out") |
|
537 | 538 | sys.exit(1) |
|
538 | 539 | |
|
539 | 540 | |
|
540 | 541 | def git_prebuild(pkg_dir, build_cmd=build_py): |
|
541 | 542 | """Return extended build or sdist command class for recording commit |
|
542 | 543 | |
|
543 | 544 | records git commit in IPython.utils._sysinfo.commit |
|
544 | 545 | |
|
545 | 546 | for use in IPython.utils.sysinfo.sys_info() calls after installation. |
|
546 | 547 | |
|
547 | 548 | Also ensures that submodules exist prior to running |
|
548 | 549 | """ |
|
549 | 550 | |
|
550 | 551 | class MyBuildPy(build_cmd): |
|
551 | 552 | ''' Subclass to write commit data into installation tree ''' |
|
552 | 553 | def run(self): |
|
553 | 554 | build_cmd.run(self) |
|
554 | 555 | # this one will only fire for build commands |
|
555 | 556 | if hasattr(self, 'build_lib'): |
|
556 | 557 | self._record_commit(self.build_lib) |
|
557 | 558 | |
|
558 | 559 | def make_release_tree(self, base_dir, files): |
|
559 | 560 | # this one will fire for sdist |
|
560 | 561 | build_cmd.make_release_tree(self, base_dir, files) |
|
561 | 562 | self._record_commit(base_dir) |
|
562 | 563 | |
|
563 | 564 | def _record_commit(self, base_dir): |
|
564 | 565 | import subprocess |
|
565 | 566 | proc = subprocess.Popen('git rev-parse --short HEAD', |
|
566 | 567 | stdout=subprocess.PIPE, |
|
567 | 568 | stderr=subprocess.PIPE, |
|
568 | 569 | shell=True) |
|
569 | 570 | repo_commit, _ = proc.communicate() |
|
570 | 571 | repo_commit = repo_commit.strip().decode("ascii") |
|
571 | 572 | |
|
572 | 573 | out_pth = pjoin(base_dir, pkg_dir, 'utils', '_sysinfo.py') |
|
573 | 574 | if os.path.isfile(out_pth) and not repo_commit: |
|
574 | 575 | # nothing to write, don't clobber |
|
575 | 576 | return |
|
576 | 577 | |
|
577 | 578 | print("writing git commit '%s' to %s" % (repo_commit, out_pth)) |
|
578 | 579 | |
|
579 | 580 | # remove to avoid overwriting original via hard link |
|
580 | 581 | try: |
|
581 | 582 | os.remove(out_pth) |
|
582 | 583 | except (IOError, OSError): |
|
583 | 584 | pass |
|
584 | 585 | with open(out_pth, 'w') as out_file: |
|
585 | 586 | out_file.writelines([ |
|
586 | 587 | '# GENERATED BY setup.py\n', |
|
587 | 588 | 'commit = u"%s"\n' % repo_commit, |
|
588 | 589 | ]) |
|
589 | 590 | return require_submodules(MyBuildPy) |
|
590 | 591 | |
|
591 | 592 | |
|
592 | 593 | def require_submodules(command): |
|
593 | 594 | """decorator for instructing a command to check for submodules before running""" |
|
594 | 595 | class DecoratedCommand(command): |
|
595 | 596 | def run(self): |
|
596 | 597 | if not check_submodule_status(repo_root) == 'clean': |
|
597 | 598 | print("submodules missing! Run `setup.py submodule` and try again") |
|
598 | 599 | sys.exit(1) |
|
599 | 600 | command.run(self) |
|
600 | 601 | return DecoratedCommand |
|
601 | 602 | |
|
602 | 603 | #--------------------------------------------------------------------------- |
|
603 | 604 | # bdist related |
|
604 | 605 | #--------------------------------------------------------------------------- |
|
605 | 606 | |
|
606 | 607 | def get_bdist_wheel(): |
|
607 | 608 | """Construct bdist_wheel command for building wheels |
|
608 | 609 | |
|
609 | 610 | Constructs py2-none-any tag, instead of py2.7-none-any |
|
610 | 611 | """ |
|
611 | 612 | class RequiresWheel(Command): |
|
612 | 613 | description = "Dummy command for missing bdist_wheel" |
|
613 | 614 | user_options = [] |
|
614 | 615 | |
|
615 | 616 | def initialize_options(self): |
|
616 | 617 | pass |
|
617 | 618 | |
|
618 | 619 | def finalize_options(self): |
|
619 | 620 | pass |
|
620 | 621 | |
|
621 | 622 | def run(self): |
|
622 | 623 | print("bdist_wheel requires the wheel package") |
|
623 | 624 | sys.exit(1) |
|
624 | 625 | |
|
625 | 626 | if 'setuptools' not in sys.modules: |
|
626 | 627 | return RequiresWheel |
|
627 | 628 | else: |
|
628 | 629 | try: |
|
629 | 630 | from wheel.bdist_wheel import bdist_wheel, read_pkg_info, write_pkg_info |
|
630 | 631 | except ImportError: |
|
631 | 632 | return RequiresWheel |
|
632 | 633 | |
|
633 | 634 | class bdist_wheel_tag(bdist_wheel): |
|
634 | 635 | |
|
635 | 636 | def add_requirements(self, metadata_path): |
|
636 | 637 | """transform platform-dependent requirements""" |
|
637 | 638 | pkg_info = read_pkg_info(metadata_path) |
|
638 | 639 | # pkg_info is an email.Message object (?!) |
|
639 | 640 | # we have to remove the unconditional 'readline' and/or 'pyreadline' entries |
|
640 | 641 | # and transform them to conditionals |
|
641 | 642 | requires = pkg_info.get_all('Requires-Dist') |
|
642 | 643 | del pkg_info['Requires-Dist'] |
|
643 | 644 | def _remove_startswith(lis, prefix): |
|
644 | 645 | """like list.remove, but with startswith instead of ==""" |
|
645 | 646 | found = False |
|
646 | 647 | for idx, item in enumerate(lis): |
|
647 | 648 | if item.startswith(prefix): |
|
648 | 649 | found = True |
|
649 | 650 | break |
|
650 | 651 | if found: |
|
651 | 652 | lis.pop(idx) |
|
652 | 653 | |
|
653 | 654 | for pkg in ("gnureadline", "pyreadline", "mock"): |
|
654 | 655 | _remove_startswith(requires, pkg) |
|
655 | 656 | requires.append("gnureadline; sys.platform == 'darwin' and platform.python_implementation == 'CPython'") |
|
656 | 657 | requires.append("pyreadline (>=2.0); extra == 'terminal' and sys.platform == 'win32' and platform.python_implementation == 'CPython'") |
|
657 | 658 | requires.append("pyreadline (>=2.0); extra == 'all' and sys.platform == 'win32' and platform.python_implementation == 'CPython'") |
|
658 | 659 | requires.append("mock; extra == 'test' and python_version < '3.3'") |
|
659 | 660 | for r in requires: |
|
660 | 661 | pkg_info['Requires-Dist'] = r |
|
661 | 662 | write_pkg_info(metadata_path, pkg_info) |
|
662 | 663 | |
|
663 | 664 | return bdist_wheel_tag |
|
664 | 665 | |
|
665 | 666 | #--------------------------------------------------------------------------- |
|
666 | 667 | # Notebook related |
|
667 | 668 | #--------------------------------------------------------------------------- |
|
668 | 669 | |
|
669 | 670 | class CompileCSS(Command): |
|
670 | 671 | """Recompile Notebook CSS |
|
671 | 672 | |
|
672 | 673 | Regenerate the compiled CSS from LESS sources. |
|
673 | 674 | |
|
674 | 675 | Requires various dev dependencies, such as invoke and lessc. |
|
675 | 676 | """ |
|
676 | 677 | description = "Recompile Notebook CSS" |
|
677 | 678 | user_options = [ |
|
678 | 679 | ('minify', 'x', "minify CSS"), |
|
679 | 680 | ('force', 'f', "force recompilation of CSS"), |
|
680 | 681 | ] |
|
681 | 682 | |
|
682 | 683 | def initialize_options(self): |
|
683 | 684 | self.minify = False |
|
684 | 685 | self.force = False |
|
685 | 686 | |
|
686 | 687 | def finalize_options(self): |
|
687 | 688 | self.minify = bool(self.minify) |
|
688 | 689 | self.force = bool(self.force) |
|
689 | 690 | |
|
690 | 691 | def run(self): |
|
691 | 692 | cmd = ['invoke', 'css'] |
|
692 | 693 | if self.minify: |
|
693 | 694 | cmd.append('--minify') |
|
694 | 695 | if self.force: |
|
695 | 696 | cmd.append('--force') |
|
696 | 697 | check_call(cmd, cwd=pjoin(repo_root, "IPython", "html")) |
|
697 | 698 | |
|
698 | 699 | |
|
699 | 700 | class JavascriptVersion(Command): |
|
700 | 701 | """write the javascript version to notebook javascript""" |
|
701 | 702 | description = "Write IPython version to javascript" |
|
702 | 703 | user_options = [] |
|
703 | 704 | |
|
704 | 705 | def initialize_options(self): |
|
705 | 706 | pass |
|
706 | 707 | |
|
707 | 708 | def finalize_options(self): |
|
708 | 709 | pass |
|
709 | 710 | |
|
710 | 711 | def run(self): |
|
711 | 712 | nsfile = pjoin(repo_root, "IPython", "html", "static", "base", "js", "namespace.js") |
|
712 | 713 | with open(nsfile) as f: |
|
713 | 714 | lines = f.readlines() |
|
714 | 715 | with open(nsfile, 'w') as f: |
|
715 | 716 | for line in lines: |
|
716 | 717 | if line.startswith("IPython.version"): |
|
717 | 718 | line = 'IPython.version = "{0}";\n'.format(version) |
|
718 | 719 | f.write(line) |
|
719 | 720 | |
|
720 | 721 | |
|
721 | 722 | def css_js_prerelease(command, strict=True): |
|
722 | 723 | """decorator for building js/minified css prior to a release""" |
|
723 | 724 | class DecoratedCommand(command): |
|
724 | 725 | def run(self): |
|
725 | 726 | self.distribution.run_command('jsversion') |
|
726 | 727 | css = self.distribution.get_command_obj('css') |
|
727 | 728 | css.minify = True |
|
728 | 729 | try: |
|
729 | 730 | self.distribution.run_command('css') |
|
730 | 731 | except Exception as e: |
|
731 | 732 | if strict: |
|
732 | 733 | raise |
|
733 | 734 | else: |
|
734 | 735 | log.warn("Failed to build css sourcemaps: %s" % e) |
|
735 | 736 | command.run(self) |
|
736 | 737 | return DecoratedCommand |
|
1 | NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now