##// END OF EJS Templates
Use Draft4 JSON Schema for both v3 and v4...
MinRK -
Show More
@@ -0,0 +1,363 b''
1 {
2 "$schema": "http://json-schema.org/draft-04/schema#",
3 "description": "IPython Notebook v3.0 JSON schema.",
4 "type": "object",
5 "additionalProperties": false,
6 "required": ["metadata", "nbformat_minor", "nbformat", "worksheets"],
7 "properties": {
8 "metadata": {
9 "description": "Notebook root-level metadata.",
10 "type": "object",
11 "additionalProperties": true,
12 "properties": {
13 "kernel_info": {
14 "description": "Kernel information.",
15 "type": "object",
16 "required": ["name", "language"],
17 "properties": {
18 "name": {
19 "description": "Name of the kernel specification.",
20 "type": "string"
21 },
22 "language": {
23 "description": "The programming language which this kernel runs.",
24 "type": "string"
25 },
26 "codemirror_mode": {
27 "description": "The codemirror mode to use for code in this language.",
28 "type": "string"
29 }
30 }
31 },
32 "signature": {
33 "description": "Hash of the notebook.",
34 "type": "string"
35 }
36 }
37 },
38 "nbformat_minor": {
39 "description": "Notebook format (minor number). Incremented for backward compatible changes to the notebook format.",
40 "type": "integer",
41 "minimum": 0
42 },
43 "nbformat": {
44 "description": "Notebook format (major number). Incremented between backwards incompatible changes to the notebook format.",
45 "type": "integer",
46 "minimum": 3,
47 "maximum": 3
48 },
49 "orig_nbformat": {
50 "description": "Original notebook format (major number) before converting the notebook between versions.",
51 "type": "integer",
52 "minimum": 1
53 },
54 "worksheets" : {
55 "description": "Array of worksheets",
56 "type": "array",
57 "items": {"$ref": "#/definitions/worksheet"}
58 }
59 },
60
61 "definitions": {
62 "worksheet": {
63 "additionalProperties": false,
64 "required" : ["cells"],
65 "properties":{
66 "cells": {
67 "description": "Array of cells of the current notebook.",
68 "type": "array",
69 "items": {
70 "type": "object",
71 "oneOf": [
72 {"$ref": "#/definitions/raw_cell"},
73 {"$ref": "#/definitions/markdown_cell"},
74 {"$ref": "#/definitions/heading_cell"},
75 {"$ref": "#/definitions/code_cell"}
76 ]
77 }
78 },
79 "metadata": {
80 "type": "object",
81 "description": "metadata of the current worksheet"
82 }
83 }
84 },
85 "raw_cell": {
86 "description": "Notebook raw nbconvert cell.",
87 "type": "object",
88 "additionalProperties": false,
89 "required": ["cell_type", "source"],
90 "properties": {
91 "cell_type": {
92 "description": "String identifying the type of cell.",
93 "enum": ["raw"]
94 },
95 "metadata": {
96 "description": "Cell-level metadata.",
97 "type": "object",
98 "additionalProperties": true,
99 "properties": {
100 "format": {
101 "description": "Raw cell metadata format for nbconvert.",
102 "type": "string"
103 },
104 "name": {"$ref": "#/definitions/misc/metadata_name"},
105 "tags": {"$ref": "#/definitions/misc/metadata_tags"}
106 }
107 },
108 "source": {"$ref": "#/definitions/misc/source"}
109 }
110 },
111
112 "markdown_cell": {
113 "description": "Notebook markdown cell.",
114 "type": "object",
115 "additionalProperties": false,
116 "required": ["cell_type", "source"],
117 "properties": {
118 "cell_type": {
119 "description": "String identifying the type of cell.",
120 "enum": ["markdown"]
121 },
122 "metadata": {
123 "description": "Cell-level metadata.",
124 "type": "object",
125 "properties": {
126 "name": {"$ref": "#/definitions/misc/metadata_name"},
127 "tags": {"$ref": "#/definitions/misc/metadata_tags"}
128 },
129 "additionalProperties": true
130 },
131 "source": {"$ref": "#/definitions/misc/source"}
132 }
133 },
134
135 "heading_cell": {
136 "description": "Notebook heading cell.",
137 "type": "object",
138 "additionalProperties": false,
139 "required": ["cell_type", "source", "level"],
140 "properties": {
141 "cell_type": {
142 "description": "String identifying the type of cell.",
143 "enum": ["heading"]
144 },
145 "metadata": {
146 "description": "Cell-level metadata.",
147 "type": "object",
148 "additionalProperties": true
149 },
150 "source": {"$ref": "#/definitions/misc/source"},
151 "level": {
152 "description": "Level of heading cells.",
153 "type": "integer",
154 "minimum": 1
155 }
156 }
157 },
158
159 "code_cell": {
160 "description": "Notebook code cell.",
161 "type": "object",
162 "additionalProperties": false,
163 "required": ["cell_type", "input", "outputs", "collapsed", "language"],
164 "properties": {
165 "cell_type": {
166 "description": "String identifying the type of cell.",
167 "enum": ["code"]
168 },
169 "language": {
170 "description": "The cell's language (always Python)",
171 "type": "string"
172 },
173 "collapsed": {
174 "description": "Whether the cell is collapsed/expanded.",
175 "type": "boolean"
176 },
177 "metadata": {
178 "description": "Cell-level metadata.",
179 "type": "object",
180 "additionalProperties": true
181 },
182 "input": {"$ref": "#/definitions/misc/source"},
183 "outputs": {
184 "description": "Execution, display, or stream outputs.",
185 "type": "array",
186 "items": {"$ref": "#/definitions/output"}
187 },
188 "prompt_number": {
189 "description": "The code cell's prompt number. Will be null if the cell has not been run.",
190 "type": ["integer", "null"],
191 "minimum": 0
192 }
193 }
194 },
195 "output": {
196 "type": "object",
197 "oneOf": [
198 {"$ref": "#/definitions/pyout"},
199 {"$ref": "#/definitions/display_data"},
200 {"$ref": "#/definitions/stream"},
201 {"$ref": "#/definitions/pyerr"}
202 ]
203 },
204 "pyout": {
205 "description": "Result of executing a code cell.",
206 "type": "object",
207 "additionalProperties": false,
208 "required": ["output_type", "prompt_number"],
209 "properties": {
210 "output_type": {
211 "description": "Type of cell output.",
212 "enum": ["pyout"]
213 },
214 "prompt_number": {
215 "description": "A result's prompt number.",
216 "type": ["integer"],
217 "minimum": 0
218 },
219 "text": {"$ref": "#/definitions/misc/multiline_string"},
220 "latex": {"$ref": "#/definitions/misc/multiline_string"},
221 "png": {"$ref": "#/definitions/misc/multiline_string"},
222 "jpeg": {"$ref": "#/definitions/misc/multiline_string"},
223 "svg": {"$ref": "#/definitions/misc/multiline_string"},
224 "html": {"$ref": "#/definitions/misc/multiline_string"},
225 "javascript": {"$ref": "#/definitions/misc/multiline_string"},
226 "json": {"$ref": "#/definitions/misc/multiline_string"},
227 "pdf": {"$ref": "#/definitions/misc/multiline_string"},
228 "metadata": {"$ref": "#/definitions/misc/output_metadata"}
229 },
230 "patternProperties": {
231 "^[a-zA-Z0-9]+/[a-zA-Z0-9\\-\\+\\.]+$": {
232 "description": "mimetype output (e.g. text/plain), represented as either an array of strings or a string.",
233 "$ref": "#/definitions/misc/multiline_string"
234 }
235 }
236 },
237
238 "display_data": {
239 "description": "Data displayed as a result of code cell execution.",
240 "type": "object",
241 "additionalProperties": false,
242 "required": ["output_type"],
243 "properties": {
244 "output_type": {
245 "description": "Type of cell output.",
246 "enum": ["display_data"]
247 },
248 "text": {"$ref": "#/definitions/misc/multiline_string"},
249 "latex": {"$ref": "#/definitions/misc/multiline_string"},
250 "png": {"$ref": "#/definitions/misc/multiline_string"},
251 "jpeg": {"$ref": "#/definitions/misc/multiline_string"},
252 "svg": {"$ref": "#/definitions/misc/multiline_string"},
253 "html": {"$ref": "#/definitions/misc/multiline_string"},
254 "javascript": {"$ref": "#/definitions/misc/multiline_string"},
255 "json": {"$ref": "#/definitions/misc/multiline_string"},
256 "pdf": {"$ref": "#/definitions/misc/multiline_string"},
257 "metadata": {"$ref": "#/definitions/misc/output_metadata"}
258 },
259 "patternProperties": {
260 "[a-zA-Z0-9]+/[a-zA-Z0-9\\-\\+\\.]+$": {
261 "description": "mimetype output (e.g. text/plain), represented as either an array of strings or a string.",
262 "$ref": "#/definitions/misc/multiline_string"
263 }
264 }
265 },
266
267 "stream": {
268 "description": "Stream output from a code cell.",
269 "type": "object",
270 "additionalProperties": false,
271 "required": ["output_type", "stream", "text"],
272 "properties": {
273 "output_type": {
274 "description": "Type of cell output.",
275 "enum": ["stream"]
276 },
277 "stream": {
278 "description": "The stream type/destination.",
279 "type": "string"
280 },
281 "text": {
282 "description": "The stream's text output, represented as an array of strings.",
283 "$ref": "#/definitions/misc/multiline_string"
284 }
285 }
286 },
287
288 "pyerr": {
289 "description": "Output of an error that occurred during code cell execution.",
290 "type": "object",
291 "additionalProperties": false,
292 "required": ["output_type", "ename", "evalue", "traceback"],
293 "properties": {
294 "output_type": {
295 "description": "Type of cell output.",
296 "enum": ["pyerr"]
297 },
298 "metadata": {"$ref": "#/definitions/misc/output_metadata"},
299 "ename": {
300 "description": "The name of the error.",
301 "type": "string"
302 },
303 "evalue": {
304 "description": "The value, or message, of the error.",
305 "type": "string"
306 },
307 "traceback": {
308 "description": "The error's traceback, represented as an array of strings.",
309 "type": "array",
310 "items": {"type": "string"}
311 }
312 }
313 },
314
315 "misc": {
316 "metadata_name": {
317 "description": "The cell's name. If present, must be a non-empty string.",
318 "type": "string",
319 "pattern": "^.+$"
320 },
321 "metadata_tags": {
322 "description": "The cell's tags. Tags must be unique, and must not contain commas.",
323 "type": "array",
324 "uniqueItems": true,
325 "items": {
326 "type": "string",
327 "pattern": "^[^,]+$"
328 }
329 },
330 "source": {
331 "description": "Contents of the cell, represented as an array of lines.",
332 "$ref": "#/definitions/misc/multiline_string"
333 },
334 "prompt_number": {
335 "description": "The code cell's prompt number. Will be null if the cell has not been run.",
336 "type": ["integer", "null"],
337 "minimum": 0
338 },
339 "mimetype": {
340 "patternProperties": {
341 "^[a-zA-Z0-9\\-\\+]+/[a-zA-Z0-9\\-\\+]+": {
342 "description": "The cell's mimetype output (e.g. text/plain), represented as either an array of strings or a string.",
343 "$ref": "#/definitions/misc/multiline_string"
344 }
345 }
346 },
347 "output_metadata": {
348 "description": "Cell output metadata.",
349 "type": "object",
350 "additionalProperties": true
351 },
352 "multiline_string": {
353 "oneOf" : [
354 {"type": "string"},
355 {
356 "type": "array",
357 "items": {"type": "string"}
358 }
359 ]
360 }
361 }
362 }
363 }
@@ -1,212 +1,213 b''
1 """The official API for working with notebooks in the current format version."""
1 """The official API for working with notebooks in the current format version."""
2
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
3 from __future__ import print_function
6 from __future__ import print_function
4
7
5 import re
8 import re
6
9
7 from IPython.utils.py3compat import unicode_type
10 from IPython.utils.py3compat import unicode_type
8
11
9 from IPython.nbformat.v3 import (
12 from IPython.nbformat.v3 import (
10 NotebookNode,
13 NotebookNode,
11 new_code_cell, new_text_cell, new_notebook, new_output, new_worksheet,
14 new_code_cell, new_text_cell, new_notebook, new_output, new_worksheet,
12 parse_filename, new_metadata, new_author, new_heading_cell, nbformat,
15 parse_filename, new_metadata, new_author, new_heading_cell, nbformat,
13 nbformat_minor, nbformat_schema, to_notebook_json
16 nbformat_minor, nbformat_schema, to_notebook_json
14 )
17 )
15 from IPython.nbformat import v3 as _v_latest
18 from IPython.nbformat import v3 as _v_latest
16
19
17 from .reader import reads as reader_reads
20 from .reader import reads as reader_reads
18 from .reader import versions
21 from .reader import versions
19 from .convert import convert
22 from .convert import convert
20 from .validator import validate
23 from .validator import validate, ValidationError
21
24
22 from IPython.utils.log import get_logger
25 from IPython.utils.log import get_logger
23
26
24 __all__ = ['NotebookNode', 'new_code_cell', 'new_text_cell', 'new_notebook',
27 __all__ = ['NotebookNode', 'new_code_cell', 'new_text_cell', 'new_notebook',
25 'new_output', 'new_worksheet', 'parse_filename', 'new_metadata', 'new_author',
28 'new_output', 'new_worksheet', 'parse_filename', 'new_metadata', 'new_author',
26 'new_heading_cell', 'nbformat', 'nbformat_minor', 'nbformat_schema',
29 'new_heading_cell', 'nbformat', 'nbformat_minor', 'nbformat_schema',
27 'to_notebook_json', 'convert', 'validate', 'NBFormatError', 'parse_py',
30 'to_notebook_json', 'convert', 'validate', 'NBFormatError', 'parse_py',
28 'reads_json', 'writes_json', 'reads_py', 'writes_py', 'reads', 'writes', 'read',
31 'reads_json', 'writes_json', 'reads_py', 'writes_py', 'reads', 'writes', 'read',
29 'write']
32 'write']
30
33
31 current_nbformat = nbformat
34 current_nbformat = nbformat
32 current_nbformat_minor = nbformat_minor
35 current_nbformat_minor = nbformat_minor
33 current_nbformat_module = _v_latest.__name__
36 current_nbformat_module = _v_latest.__name__
34
37
35
38
36 class NBFormatError(ValueError):
39 class NBFormatError(ValueError):
37 pass
40 pass
38
41
39
42
40 def parse_py(s, **kwargs):
43 def parse_py(s, **kwargs):
41 """Parse a string into a (nbformat, string) tuple."""
44 """Parse a string into a (nbformat, string) tuple."""
42 nbf = current_nbformat
45 nbf = current_nbformat
43 nbm = current_nbformat_minor
46 nbm = current_nbformat_minor
44
47
45 pattern = r'# <nbformat>(?P<nbformat>\d+[\.\d+]*)</nbformat>'
48 pattern = r'# <nbformat>(?P<nbformat>\d+[\.\d+]*)</nbformat>'
46 m = re.search(pattern,s)
49 m = re.search(pattern,s)
47 if m is not None:
50 if m is not None:
48 digits = m.group('nbformat').split('.')
51 digits = m.group('nbformat').split('.')
49 nbf = int(digits[0])
52 nbf = int(digits[0])
50 if len(digits) > 1:
53 if len(digits) > 1:
51 nbm = int(digits[1])
54 nbm = int(digits[1])
52
55
53 return nbf, nbm, s
56 return nbf, nbm, s
54
57
55
58
56 def reads_json(nbjson, **kwargs):
59 def reads_json(nbjson, **kwargs):
57 """Read a JSON notebook from a string and return the NotebookNode
60 """Read a JSON notebook from a string and return the NotebookNode
58 object. Report if any JSON format errors are detected.
61 object. Report if any JSON format errors are detected.
59
62
60 """
63 """
61 nb = reader_reads(nbjson, **kwargs)
64 nb = reader_reads(nbjson, **kwargs)
62 nb_current = convert(nb, current_nbformat)
65 nb_current = convert(nb, current_nbformat)
63 errors = validate(nb_current)
66 try:
64 if errors:
67 validate(nb_current)
65 get_logger().error(
68 except ValidationError as e:
66 "Notebook JSON is invalid (%d errors detected during read)",
69 get_logger().error("Notebook JSON is invalid: %s", e)
67 len(errors))
68 return nb_current
70 return nb_current
69
71
70
72
71 def writes_json(nb, **kwargs):
73 def writes_json(nb, **kwargs):
72 """Take a NotebookNode object and write out a JSON string. Report if
74 """Take a NotebookNode object and write out a JSON string. Report if
73 any JSON format errors are detected.
75 any JSON format errors are detected.
74
76
75 """
77 """
76 errors = validate(nb)
78 try:
77 if errors:
79 validate(nb)
78 get_logger().error(
80 except ValidationError as e:
79 "Notebook JSON is invalid (%d errors detected during write)",
81 get_logger().error("Notebook JSON is invalid: %s", e)
80 len(errors))
81 nbjson = versions[current_nbformat].writes_json(nb, **kwargs)
82 nbjson = versions[current_nbformat].writes_json(nb, **kwargs)
82 return nbjson
83 return nbjson
83
84
84
85
85 def reads_py(s, **kwargs):
86 def reads_py(s, **kwargs):
86 """Read a .py notebook from a string and return the NotebookNode object."""
87 """Read a .py notebook from a string and return the NotebookNode object."""
87 nbf, nbm, s = parse_py(s, **kwargs)
88 nbf, nbm, s = parse_py(s, **kwargs)
88 if nbf in (2, 3):
89 if nbf in (2, 3):
89 nb = versions[nbf].to_notebook_py(s, **kwargs)
90 nb = versions[nbf].to_notebook_py(s, **kwargs)
90 else:
91 else:
91 raise NBFormatError('Unsupported PY nbformat version: %i' % nbf)
92 raise NBFormatError('Unsupported PY nbformat version: %i' % nbf)
92 return nb
93 return nb
93
94
94
95
95 def writes_py(nb, **kwargs):
96 def writes_py(nb, **kwargs):
96 # nbformat 3 is the latest format that supports py
97 # nbformat 3 is the latest format that supports py
97 return versions[3].writes_py(nb, **kwargs)
98 return versions[3].writes_py(nb, **kwargs)
98
99
99
100
100 # High level API
101 # High level API
101
102
102
103
103 def reads(s, format, **kwargs):
104 def reads(s, format, **kwargs):
104 """Read a notebook from a string and return the NotebookNode object.
105 """Read a notebook from a string and return the NotebookNode object.
105
106
106 This function properly handles notebooks of any version. The notebook
107 This function properly handles notebooks of any version. The notebook
107 returned will always be in the current version's format.
108 returned will always be in the current version's format.
108
109
109 Parameters
110 Parameters
110 ----------
111 ----------
111 s : unicode
112 s : unicode
112 The raw unicode string to read the notebook from.
113 The raw unicode string to read the notebook from.
113 format : (u'json', u'ipynb', u'py')
114 format : (u'json', u'ipynb', u'py')
114 The format that the string is in.
115 The format that the string is in.
115
116
116 Returns
117 Returns
117 -------
118 -------
118 nb : NotebookNode
119 nb : NotebookNode
119 The notebook that was read.
120 The notebook that was read.
120 """
121 """
121 format = unicode_type(format)
122 format = unicode_type(format)
122 if format == u'json' or format == u'ipynb':
123 if format == u'json' or format == u'ipynb':
123 return reads_json(s, **kwargs)
124 return reads_json(s, **kwargs)
124 elif format == u'py':
125 elif format == u'py':
125 return reads_py(s, **kwargs)
126 return reads_py(s, **kwargs)
126 else:
127 else:
127 raise NBFormatError('Unsupported format: %s' % format)
128 raise NBFormatError('Unsupported format: %s' % format)
128
129
129
130
130 def writes(nb, format, **kwargs):
131 def writes(nb, format, **kwargs):
131 """Write a notebook to a string in a given format in the current nbformat version.
132 """Write a notebook to a string in a given format in the current nbformat version.
132
133
133 This function always writes the notebook in the current nbformat version.
134 This function always writes the notebook in the current nbformat version.
134
135
135 Parameters
136 Parameters
136 ----------
137 ----------
137 nb : NotebookNode
138 nb : NotebookNode
138 The notebook to write.
139 The notebook to write.
139 format : (u'json', u'ipynb', u'py')
140 format : (u'json', u'ipynb', u'py')
140 The format to write the notebook in.
141 The format to write the notebook in.
141
142
142 Returns
143 Returns
143 -------
144 -------
144 s : unicode
145 s : unicode
145 The notebook string.
146 The notebook string.
146 """
147 """
147 format = unicode_type(format)
148 format = unicode_type(format)
148 if format == u'json' or format == u'ipynb':
149 if format == u'json' or format == u'ipynb':
149 return writes_json(nb, **kwargs)
150 return writes_json(nb, **kwargs)
150 elif format == u'py':
151 elif format == u'py':
151 return writes_py(nb, **kwargs)
152 return writes_py(nb, **kwargs)
152 else:
153 else:
153 raise NBFormatError('Unsupported format: %s' % format)
154 raise NBFormatError('Unsupported format: %s' % format)
154
155
155
156
156 def read(fp, format, **kwargs):
157 def read(fp, format, **kwargs):
157 """Read a notebook from a file and return the NotebookNode object.
158 """Read a notebook from a file and return the NotebookNode object.
158
159
159 This function properly handles notebooks of any version. The notebook
160 This function properly handles notebooks of any version. The notebook
160 returned will always be in the current version's format.
161 returned will always be in the current version's format.
161
162
162 Parameters
163 Parameters
163 ----------
164 ----------
164 fp : file
165 fp : file
165 Any file-like object with a read method.
166 Any file-like object with a read method.
166 format : (u'json', u'ipynb', u'py')
167 format : (u'json', u'ipynb', u'py')
167 The format that the string is in.
168 The format that the string is in.
168
169
169 Returns
170 Returns
170 -------
171 -------
171 nb : NotebookNode
172 nb : NotebookNode
172 The notebook that was read.
173 The notebook that was read.
173 """
174 """
174 return reads(fp.read(), format, **kwargs)
175 return reads(fp.read(), format, **kwargs)
175
176
176
177
177 def write(nb, fp, format, **kwargs):
178 def write(nb, fp, format, **kwargs):
178 """Write a notebook to a file in a given format in the current nbformat version.
179 """Write a notebook to a file in a given format in the current nbformat version.
179
180
180 This function always writes the notebook in the current nbformat version.
181 This function always writes the notebook in the current nbformat version.
181
182
182 Parameters
183 Parameters
183 ----------
184 ----------
184 nb : NotebookNode
185 nb : NotebookNode
185 The notebook to write.
186 The notebook to write.
186 fp : file
187 fp : file
187 Any file-like object with a write method.
188 Any file-like object with a write method.
188 format : (u'json', u'ipynb', u'py')
189 format : (u'json', u'ipynb', u'py')
189 The format to write the notebook in.
190 The format to write the notebook in.
190
191
191 Returns
192 Returns
192 -------
193 -------
193 s : unicode
194 s : unicode
194 The notebook string.
195 The notebook string.
195 """
196 """
196 return fp.write(writes(nb, format, **kwargs))
197 return fp.write(writes(nb, format, **kwargs))
197
198
198 def _convert_to_metadata():
199 def _convert_to_metadata():
199 """Convert to a notebook having notebook metadata."""
200 """Convert to a notebook having notebook metadata."""
200 import glob
201 import glob
201 for fname in glob.glob('*.ipynb'):
202 for fname in glob.glob('*.ipynb'):
202 print('Converting file:',fname)
203 print('Converting file:',fname)
203 with open(fname,'r') as f:
204 with open(fname,'r') as f:
204 nb = read(f,u'json')
205 nb = read(f,u'json')
205 md = new_metadata()
206 md = new_metadata()
206 if u'name' in nb:
207 if u'name' in nb:
207 md.name = nb.name
208 md.name = nb.name
208 del nb[u'name']
209 del nb[u'name']
209 nb.metadata = md
210 nb.metadata = md
210 with open(fname,'w') as f:
211 with open(fname,'w') as f:
211 write(nb, f, u'json')
212 write(nb, f, u'json')
212
213
@@ -1,73 +1,45 b''
1 """
1 """Test nbformat.validator"""
2 Contains tests class for validator.py
3 """
4 #-----------------------------------------------------------------------------
5 # Copyright (C) 2014 The IPython Development Team
6 #
7 # Distributed under the terms of the BSD License. The full license is in
8 # the file COPYING, distributed as part of this software.
9 #-----------------------------------------------------------------------------
10
2
11 #-----------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
12 # Imports
4 # Distributed under the terms of the Modified BSD License.
13 #-----------------------------------------------------------------------------
14
5
15 import os
6 import os
16
7
17 from .base import TestsBase
8 from .base import TestsBase
18 from jsonschema import SchemaError
9 from jsonschema import ValidationError
19 from ..current import read
10 from ..current import read
20 from ..validator import schema_path, isvalid, validate, resolve_ref
11 from ..validator import isvalid, validate
21
12
22
13
23 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
24 # Classes and functions
15 # Classes and functions
25 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
26
17
27 class TestValidator(TestsBase):
18 class TestValidator(TestsBase):
28
19
29 def test_schema_path(self):
30 """Test that the schema path exists"""
31 self.assertEqual(os.path.exists(schema_path), True)
32
33 def test_nb2(self):
20 def test_nb2(self):
34 """Test that a v2 notebook converted to v3 passes validation"""
21 """Test that a v2 notebook converted to v3 passes validation"""
35 with self.fopen(u'test2.ipynb', u'r') as f:
22 with self.fopen(u'test2.ipynb', u'r') as f:
36 nb = read(f, u'json')
23 nb = read(f, u'json')
37 self.assertEqual(validate(nb), [])
24 validate(nb)
38 self.assertEqual(isvalid(nb), True)
25 self.assertEqual(isvalid(nb), True)
39
26
40 def test_nb3(self):
27 def test_nb3(self):
41 """Test that a v3 notebook passes validation"""
28 """Test that a v3 notebook passes validation"""
42 with self.fopen(u'test3.ipynb', u'r') as f:
29 with self.fopen(u'test3.ipynb', u'r') as f:
43 nb = read(f, u'json')
30 nb = read(f, u'json')
44 self.assertEqual(validate(nb), [])
31 validate(nb)
45 self.assertEqual(isvalid(nb), True)
32 self.assertEqual(isvalid(nb), True)
46
33
47 def test_invalid(self):
34 def test_invalid(self):
48 """Test than an invalid notebook does not pass validation"""
35 """Test than an invalid notebook does not pass validation"""
49 # this notebook has a few different errors:
36 # this notebook has a few different errors:
50 # - the name is an integer, rather than a string
37 # - the name is an integer, rather than a string
51 # - one cell is missing its source
38 # - one cell is missing its source
52 # - one cell has an invalid level
39 # - one cell has an invalid level
53 with self.fopen(u'invalid.ipynb', u'r') as f:
40 with self.fopen(u'invalid.ipynb', u'r') as f:
54 nb = read(f, u'json')
41 nb = read(f, u'json')
55 self.assertEqual(len(validate(nb)), 3)
42 with self.assertRaises(ValidationError):
43 validate(nb)
56 self.assertEqual(isvalid(nb), False)
44 self.assertEqual(isvalid(nb), False)
57
45
58 def test_resolve_ref(self):
59 """Test that references are correctly resolved"""
60 # make sure it resolves the ref correctly
61 json = {"abc": "def", "ghi": {"$ref": "/abc"}}
62 resolved = resolve_ref(json)
63 self.assertEqual(resolved, {"abc": "def", "ghi": "def"})
64
65 # make sure it throws an error if the ref is not by itself
66 json = {"abc": "def", "ghi": {"$ref": "/abc", "foo": "bar"}}
67 with self.assertRaises(SchemaError):
68 resolved = resolve_ref(json)
69
70 # make sure it can handle json with no reference
71 json = {"abc": "def"}
72 resolved = resolve_ref(json)
73 self.assertEqual(resolved, json)
@@ -1,205 +1,205 b''
1 """The basic dict based notebook format.
1 """The basic dict based notebook format.
2
2
3 The Python representation of a notebook is a nested structure of
3 The Python representation of a notebook is a nested structure of
4 dictionary subclasses that support attribute access
4 dictionary subclasses that support attribute access
5 (IPython.utils.ipstruct.Struct). The functions in this module are merely
5 (IPython.utils.ipstruct.Struct). The functions in this module are merely
6 helpers to build the structs in the right form.
6 helpers to build the structs in the right form.
7 """
7 """
8
8
9 # Copyright (c) IPython Development Team.
9 # Copyright (c) IPython Development Team.
10 # Distributed under the terms of the Modified BSD License.
10 # Distributed under the terms of the Modified BSD License.
11
11
12 import pprint
12 import pprint
13 import uuid
13 import uuid
14
14
15 from IPython.utils.ipstruct import Struct
15 from IPython.utils.ipstruct import Struct
16 from IPython.utils.py3compat import cast_unicode, unicode_type
16 from IPython.utils.py3compat import cast_unicode, unicode_type
17
17
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19 # Code
19 # Code
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21
21
22 # Change this when incrementing the nbformat version
22 # Change this when incrementing the nbformat version
23 nbformat = 3
23 nbformat = 3
24 nbformat_minor = 0
24 nbformat_minor = 0
25 nbformat_schema = 'v3.withref.json'
25 nbformat_schema = 'nbformat.v3.schema.json'
26
26
27 class NotebookNode(Struct):
27 class NotebookNode(Struct):
28 pass
28 pass
29
29
30
30
31 def from_dict(d):
31 def from_dict(d):
32 if isinstance(d, dict):
32 if isinstance(d, dict):
33 newd = NotebookNode()
33 newd = NotebookNode()
34 for k,v in d.items():
34 for k,v in d.items():
35 newd[k] = from_dict(v)
35 newd[k] = from_dict(v)
36 return newd
36 return newd
37 elif isinstance(d, (tuple, list)):
37 elif isinstance(d, (tuple, list)):
38 return [from_dict(i) for i in d]
38 return [from_dict(i) for i in d]
39 else:
39 else:
40 return d
40 return d
41
41
42
42
43 def new_output(output_type, output_text=None, output_png=None,
43 def new_output(output_type, output_text=None, output_png=None,
44 output_html=None, output_svg=None, output_latex=None, output_json=None,
44 output_html=None, output_svg=None, output_latex=None, output_json=None,
45 output_javascript=None, output_jpeg=None, prompt_number=None,
45 output_javascript=None, output_jpeg=None, prompt_number=None,
46 ename=None, evalue=None, traceback=None, stream=None, metadata=None):
46 ename=None, evalue=None, traceback=None, stream=None, metadata=None):
47 """Create a new output, to go in the ``cell.outputs`` list of a code cell.
47 """Create a new output, to go in the ``cell.outputs`` list of a code cell.
48 """
48 """
49 output = NotebookNode()
49 output = NotebookNode()
50 output.output_type = unicode_type(output_type)
50 output.output_type = unicode_type(output_type)
51
51
52 if metadata is None:
52 if metadata is None:
53 metadata = {}
53 metadata = {}
54 if not isinstance(metadata, dict):
54 if not isinstance(metadata, dict):
55 raise TypeError("metadata must be dict")
55 raise TypeError("metadata must be dict")
56 output.metadata = metadata
56 output.metadata = metadata
57
57
58 if output_type != 'pyerr':
58 if output_type != 'pyerr':
59 if output_text is not None:
59 if output_text is not None:
60 output.text = cast_unicode(output_text)
60 output.text = cast_unicode(output_text)
61 if output_png is not None:
61 if output_png is not None:
62 output.png = cast_unicode(output_png)
62 output.png = cast_unicode(output_png)
63 if output_jpeg is not None:
63 if output_jpeg is not None:
64 output.jpeg = cast_unicode(output_jpeg)
64 output.jpeg = cast_unicode(output_jpeg)
65 if output_html is not None:
65 if output_html is not None:
66 output.html = cast_unicode(output_html)
66 output.html = cast_unicode(output_html)
67 if output_svg is not None:
67 if output_svg is not None:
68 output.svg = cast_unicode(output_svg)
68 output.svg = cast_unicode(output_svg)
69 if output_latex is not None:
69 if output_latex is not None:
70 output.latex = cast_unicode(output_latex)
70 output.latex = cast_unicode(output_latex)
71 if output_json is not None:
71 if output_json is not None:
72 output.json = cast_unicode(output_json)
72 output.json = cast_unicode(output_json)
73 if output_javascript is not None:
73 if output_javascript is not None:
74 output.javascript = cast_unicode(output_javascript)
74 output.javascript = cast_unicode(output_javascript)
75
75
76 if output_type == u'pyout':
76 if output_type == u'pyout':
77 if prompt_number is not None:
77 if prompt_number is not None:
78 output.prompt_number = int(prompt_number)
78 output.prompt_number = int(prompt_number)
79
79
80 if output_type == u'pyerr':
80 if output_type == u'pyerr':
81 if ename is not None:
81 if ename is not None:
82 output.ename = cast_unicode(ename)
82 output.ename = cast_unicode(ename)
83 if evalue is not None:
83 if evalue is not None:
84 output.evalue = cast_unicode(evalue)
84 output.evalue = cast_unicode(evalue)
85 if traceback is not None:
85 if traceback is not None:
86 output.traceback = [cast_unicode(frame) for frame in list(traceback)]
86 output.traceback = [cast_unicode(frame) for frame in list(traceback)]
87
87
88 if output_type == u'stream':
88 if output_type == u'stream':
89 output.stream = 'stdout' if stream is None else cast_unicode(stream)
89 output.stream = 'stdout' if stream is None else cast_unicode(stream)
90
90
91 return output
91 return output
92
92
93
93
94 def new_code_cell(input=None, prompt_number=None, outputs=None,
94 def new_code_cell(input=None, prompt_number=None, outputs=None,
95 language=u'python', collapsed=False, metadata=None):
95 language=u'python', collapsed=False, metadata=None):
96 """Create a new code cell with input and output"""
96 """Create a new code cell with input and output"""
97 cell = NotebookNode()
97 cell = NotebookNode()
98 cell.cell_type = u'code'
98 cell.cell_type = u'code'
99 if language is not None:
99 if language is not None:
100 cell.language = cast_unicode(language)
100 cell.language = cast_unicode(language)
101 if input is not None:
101 if input is not None:
102 cell.input = cast_unicode(input)
102 cell.input = cast_unicode(input)
103 if prompt_number is not None:
103 if prompt_number is not None:
104 cell.prompt_number = int(prompt_number)
104 cell.prompt_number = int(prompt_number)
105 if outputs is None:
105 if outputs is None:
106 cell.outputs = []
106 cell.outputs = []
107 else:
107 else:
108 cell.outputs = outputs
108 cell.outputs = outputs
109 if collapsed is not None:
109 if collapsed is not None:
110 cell.collapsed = bool(collapsed)
110 cell.collapsed = bool(collapsed)
111 cell.metadata = NotebookNode(metadata or {})
111 cell.metadata = NotebookNode(metadata or {})
112
112
113 return cell
113 return cell
114
114
115 def new_text_cell(cell_type, source=None, rendered=None, metadata=None):
115 def new_text_cell(cell_type, source=None, rendered=None, metadata=None):
116 """Create a new text cell."""
116 """Create a new text cell."""
117 cell = NotebookNode()
117 cell = NotebookNode()
118 # VERSIONHACK: plaintext -> raw
118 # VERSIONHACK: plaintext -> raw
119 # handle never-released plaintext name for raw cells
119 # handle never-released plaintext name for raw cells
120 if cell_type == 'plaintext':
120 if cell_type == 'plaintext':
121 cell_type = 'raw'
121 cell_type = 'raw'
122 if source is not None:
122 if source is not None:
123 cell.source = cast_unicode(source)
123 cell.source = cast_unicode(source)
124 if rendered is not None:
124 if rendered is not None:
125 cell.rendered = cast_unicode(rendered)
125 cell.rendered = cast_unicode(rendered)
126 cell.metadata = NotebookNode(metadata or {})
126 cell.metadata = NotebookNode(metadata or {})
127 cell.cell_type = cell_type
127 cell.cell_type = cell_type
128 return cell
128 return cell
129
129
130
130
131 def new_heading_cell(source=None, rendered=None, level=1, metadata=None):
131 def new_heading_cell(source=None, rendered=None, level=1, metadata=None):
132 """Create a new section cell with a given integer level."""
132 """Create a new section cell with a given integer level."""
133 cell = NotebookNode()
133 cell = NotebookNode()
134 cell.cell_type = u'heading'
134 cell.cell_type = u'heading'
135 if source is not None:
135 if source is not None:
136 cell.source = cast_unicode(source)
136 cell.source = cast_unicode(source)
137 if rendered is not None:
137 if rendered is not None:
138 cell.rendered = cast_unicode(rendered)
138 cell.rendered = cast_unicode(rendered)
139 cell.level = int(level)
139 cell.level = int(level)
140 cell.metadata = NotebookNode(metadata or {})
140 cell.metadata = NotebookNode(metadata or {})
141 return cell
141 return cell
142
142
143
143
144 def new_worksheet(name=None, cells=None, metadata=None):
144 def new_worksheet(name=None, cells=None, metadata=None):
145 """Create a worksheet by name with with a list of cells."""
145 """Create a worksheet by name with with a list of cells."""
146 ws = NotebookNode()
146 ws = NotebookNode()
147 if name is not None:
147 if name is not None:
148 ws.name = cast_unicode(name)
148 ws.name = cast_unicode(name)
149 if cells is None:
149 if cells is None:
150 ws.cells = []
150 ws.cells = []
151 else:
151 else:
152 ws.cells = list(cells)
152 ws.cells = list(cells)
153 ws.metadata = NotebookNode(metadata or {})
153 ws.metadata = NotebookNode(metadata or {})
154 return ws
154 return ws
155
155
156
156
157 def new_notebook(name=None, metadata=None, worksheets=None):
157 def new_notebook(name=None, metadata=None, worksheets=None):
158 """Create a notebook by name, id and a list of worksheets."""
158 """Create a notebook by name, id and a list of worksheets."""
159 nb = NotebookNode()
159 nb = NotebookNode()
160 nb.nbformat = nbformat
160 nb.nbformat = nbformat
161 nb.nbformat_minor = nbformat_minor
161 nb.nbformat_minor = nbformat_minor
162 if worksheets is None:
162 if worksheets is None:
163 nb.worksheets = []
163 nb.worksheets = []
164 else:
164 else:
165 nb.worksheets = list(worksheets)
165 nb.worksheets = list(worksheets)
166 if metadata is None:
166 if metadata is None:
167 nb.metadata = new_metadata()
167 nb.metadata = new_metadata()
168 else:
168 else:
169 nb.metadata = NotebookNode(metadata)
169 nb.metadata = NotebookNode(metadata)
170 if name is not None:
170 if name is not None:
171 nb.metadata.name = cast_unicode(name)
171 nb.metadata.name = cast_unicode(name)
172 return nb
172 return nb
173
173
174
174
175 def new_metadata(name=None, authors=None, license=None, created=None,
175 def new_metadata(name=None, authors=None, license=None, created=None,
176 modified=None, gistid=None):
176 modified=None, gistid=None):
177 """Create a new metadata node."""
177 """Create a new metadata node."""
178 metadata = NotebookNode()
178 metadata = NotebookNode()
179 if name is not None:
179 if name is not None:
180 metadata.name = cast_unicode(name)
180 metadata.name = cast_unicode(name)
181 if authors is not None:
181 if authors is not None:
182 metadata.authors = list(authors)
182 metadata.authors = list(authors)
183 if created is not None:
183 if created is not None:
184 metadata.created = cast_unicode(created)
184 metadata.created = cast_unicode(created)
185 if modified is not None:
185 if modified is not None:
186 metadata.modified = cast_unicode(modified)
186 metadata.modified = cast_unicode(modified)
187 if license is not None:
187 if license is not None:
188 metadata.license = cast_unicode(license)
188 metadata.license = cast_unicode(license)
189 if gistid is not None:
189 if gistid is not None:
190 metadata.gistid = cast_unicode(gistid)
190 metadata.gistid = cast_unicode(gistid)
191 return metadata
191 return metadata
192
192
193 def new_author(name=None, email=None, affiliation=None, url=None):
193 def new_author(name=None, email=None, affiliation=None, url=None):
194 """Create a new author."""
194 """Create a new author."""
195 author = NotebookNode()
195 author = NotebookNode()
196 if name is not None:
196 if name is not None:
197 author.name = cast_unicode(name)
197 author.name = cast_unicode(name)
198 if email is not None:
198 if email is not None:
199 author.email = cast_unicode(email)
199 author.email = cast_unicode(email)
200 if affiliation is not None:
200 if affiliation is not None:
201 author.affiliation = cast_unicode(affiliation)
201 author.affiliation = cast_unicode(affiliation)
202 if url is not None:
202 if url is not None:
203 author.url = cast_unicode(url)
203 author.url = cast_unicode(url)
204 return author
204 return author
205
205
@@ -1,112 +1,72 b''
1 # Copyright (c) IPython Development Team.
2 # Distributed under the terms of the Modified BSD License.
3
1 from __future__ import print_function
4 from __future__ import print_function
2 import json
5 import json
3 import os
6 import os
4
7
5 try:
8 try:
6 from jsonschema import SchemaError
9 from jsonschema import ValidationError
7 from jsonschema import Draft3Validator as Validator
10 from jsonschema import Draft4Validator as Validator
8 except ImportError as e:
11 except ImportError as e:
9 verbose_msg = """
12 verbose_msg = """
10
13
11 IPython depends on the jsonschema package: https://pypi.python.org/pypi/jsonschema
14 IPython notebook format depends on the jsonschema package:
12
15
13 Please install it first.
16 https://pypi.python.org/pypi/jsonschema
14 """
15 raise ImportError(str(e) + verbose_msg)
16
17 try:
18 import jsonpointer as jsonpointer
19 except ImportError as e:
20 verbose_msg = """
21
22 IPython depends on the jsonpointer package: https://pypi.python.org/pypi/jsonpointer
23
17
24 Please install it first.
18 Please install it first.
25 """
19 """
26 raise ImportError(str(e) + verbose_msg)
20 raise ImportError(str(e) + verbose_msg)
27
21
28 from IPython.utils.py3compat import iteritems
22 from IPython.utils.importstring import import_item
23
29
24
25 validators = {}
30
26
31 from .current import nbformat, nbformat_schema
27 def get_validator(version=None):
32 schema_path = os.path.join(
28 """Load the JSON schema into a Validator"""
33 os.path.dirname(__file__), "v%d" % nbformat, nbformat_schema)
29 if version is None:
30 from .current import nbformat as version
34
31
32 if version not in validators:
33 v = import_item("IPython.nbformat.v%s" % version)
34 schema_path = os.path.join(os.path.dirname(v.__file__), v.nbformat_schema)
35 with open(schema_path) as f:
36 schema_json = json.load(f)
37 validators[version] = Validator(schema_json)
38 return validators[version]
35
39
36 def isvalid(nbjson):
40 def isvalid(nbjson, ref=None, version=None):
37 """Checks whether the given notebook JSON conforms to the current
41 """Checks whether the given notebook JSON conforms to the current
38 notebook format schema. Returns True if the JSON is valid, and
42 notebook format schema. Returns True if the JSON is valid, and
39 False otherwise.
43 False otherwise.
40
44
41 To see the individual errors that were encountered, please use the
45 To see the individual errors that were encountered, please use the
42 `validate` function instead.
46 `validate` function instead.
43
44 """
47 """
45
48 try:
46 errors = validate(nbjson)
49 validate(nbjson, ref, version)
47 return errors == []
50 except ValidationError:
51 return False
52 else:
53 return True
48
54
49
55
50 def validate(nbjson):
56 def validate(nbjson, ref=None, version=None):
51 """Checks whether the given notebook JSON conforms to the current
57 """Checks whether the given notebook JSON conforms to the current
52 notebook format schema, and returns the list of errors.
58 notebook format schema.
53
54 """
55
56 # load the schema file
57 with open(schema_path, 'r') as fh:
58 schema_json = json.load(fh)
59
60 # resolve internal references
61 schema = resolve_ref(schema_json)
62 schema = jsonpointer.resolve_pointer(schema, '/notebook')
63
64 # count how many errors there are
65 v = Validator(schema)
66 errors = list(v.iter_errors(nbjson))
67 return errors
68
69
70 def resolve_ref(json, schema=None):
71 """Resolve internal references within the given JSON. This essentially
72 means that dictionaries of this form:
73
74 {"$ref": "/somepointer"}
75
76 will be replaced with the resolved reference to `/somepointer`.
77 This only supports local reference to the same JSON file.
78
59
60 Raises ValidationError if not valid.
79 """
61 """
62 if version is None:
63 from .current import nbformat
64 version = nbjson.get('nbformat', nbformat)
80
65
81 if not schema:
66 validator = get_validator(version)
82 schema = json
83
84 # if it's a list, resolve references for each item in the list
85 if type(json) is list:
86 resolved = []
87 for item in json:
88 resolved.append(resolve_ref(item, schema=schema))
89
90 # if it's a dictionary, resolve references for each item in the
91 # dictionary
92 elif type(json) is dict:
93 resolved = {}
94 for key, ref in iteritems(json):
95
96 # if the key is equal to $ref, then replace the entire
97 # dictionary with the resolved value
98 if key == '$ref':
99 if len(json) != 1:
100 raise SchemaError(
101 "objects containing a $ref should only have one item")
102 pointer = jsonpointer.resolve_pointer(schema, ref)
103 resolved = resolve_ref(pointer, schema=schema)
104
105 else:
106 resolved[key] = resolve_ref(ref, schema=schema)
107
67
108 # otherwise it's a normal object, so just return it
68 if ref:
69 return validator.validate(nbjson, {'$ref' : '#/definitions/%s' % ref})
109 else:
70 else:
110 resolved = json
71 return validator.validate(nbjson)
111
72
112 return resolved
@@ -1,680 +1,680 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """IPython Test Process Controller
2 """IPython Test Process Controller
3
3
4 This module runs one or more subprocesses which will actually run the IPython
4 This module runs one or more subprocesses which will actually run the IPython
5 test suite.
5 test suite.
6
6
7 """
7 """
8
8
9 # Copyright (c) IPython Development Team.
9 # Copyright (c) IPython Development Team.
10 # Distributed under the terms of the Modified BSD License.
10 # Distributed under the terms of the Modified BSD License.
11
11
12 from __future__ import print_function
12 from __future__ import print_function
13
13
14 import argparse
14 import argparse
15 import json
15 import json
16 import multiprocessing.pool
16 import multiprocessing.pool
17 import os
17 import os
18 import shutil
18 import shutil
19 import signal
19 import signal
20 import sys
20 import sys
21 import subprocess
21 import subprocess
22 import time
22 import time
23 import re
23 import re
24
24
25 from .iptest import have, test_group_names as py_test_group_names, test_sections, StreamCapturer
25 from .iptest import have, test_group_names as py_test_group_names, test_sections, StreamCapturer
26 from IPython.utils.path import compress_user
26 from IPython.utils.path import compress_user
27 from IPython.utils.py3compat import bytes_to_str
27 from IPython.utils.py3compat import bytes_to_str
28 from IPython.utils.sysinfo import get_sys_info
28 from IPython.utils.sysinfo import get_sys_info
29 from IPython.utils.tempdir import TemporaryDirectory
29 from IPython.utils.tempdir import TemporaryDirectory
30 from IPython.utils.text import strip_ansi
30 from IPython.utils.text import strip_ansi
31
31
32 try:
32 try:
33 # Python >= 3.3
33 # Python >= 3.3
34 from subprocess import TimeoutExpired
34 from subprocess import TimeoutExpired
35 def popen_wait(p, timeout):
35 def popen_wait(p, timeout):
36 return p.wait(timeout)
36 return p.wait(timeout)
37 except ImportError:
37 except ImportError:
38 class TimeoutExpired(Exception):
38 class TimeoutExpired(Exception):
39 pass
39 pass
40 def popen_wait(p, timeout):
40 def popen_wait(p, timeout):
41 """backport of Popen.wait from Python 3"""
41 """backport of Popen.wait from Python 3"""
42 for i in range(int(10 * timeout)):
42 for i in range(int(10 * timeout)):
43 if p.poll() is not None:
43 if p.poll() is not None:
44 return
44 return
45 time.sleep(0.1)
45 time.sleep(0.1)
46 if p.poll() is None:
46 if p.poll() is None:
47 raise TimeoutExpired
47 raise TimeoutExpired
48
48
49 NOTEBOOK_SHUTDOWN_TIMEOUT = 10
49 NOTEBOOK_SHUTDOWN_TIMEOUT = 10
50
50
51 class TestController(object):
51 class TestController(object):
52 """Run tests in a subprocess
52 """Run tests in a subprocess
53 """
53 """
54 #: str, IPython test suite to be executed.
54 #: str, IPython test suite to be executed.
55 section = None
55 section = None
56 #: list, command line arguments to be executed
56 #: list, command line arguments to be executed
57 cmd = None
57 cmd = None
58 #: dict, extra environment variables to set for the subprocess
58 #: dict, extra environment variables to set for the subprocess
59 env = None
59 env = None
60 #: list, TemporaryDirectory instances to clear up when the process finishes
60 #: list, TemporaryDirectory instances to clear up when the process finishes
61 dirs = None
61 dirs = None
62 #: subprocess.Popen instance
62 #: subprocess.Popen instance
63 process = None
63 process = None
64 #: str, process stdout+stderr
64 #: str, process stdout+stderr
65 stdout = None
65 stdout = None
66
66
67 def __init__(self):
67 def __init__(self):
68 self.cmd = []
68 self.cmd = []
69 self.env = {}
69 self.env = {}
70 self.dirs = []
70 self.dirs = []
71
71
72 def setup(self):
72 def setup(self):
73 """Create temporary directories etc.
73 """Create temporary directories etc.
74
74
75 This is only called when we know the test group will be run. Things
75 This is only called when we know the test group will be run. Things
76 created here may be cleaned up by self.cleanup().
76 created here may be cleaned up by self.cleanup().
77 """
77 """
78 pass
78 pass
79
79
80 def launch(self, buffer_output=False):
80 def launch(self, buffer_output=False):
81 # print('*** ENV:', self.env) # dbg
81 # print('*** ENV:', self.env) # dbg
82 # print('*** CMD:', self.cmd) # dbg
82 # print('*** CMD:', self.cmd) # dbg
83 env = os.environ.copy()
83 env = os.environ.copy()
84 env.update(self.env)
84 env.update(self.env)
85 output = subprocess.PIPE if buffer_output else None
85 output = subprocess.PIPE if buffer_output else None
86 stdout = subprocess.STDOUT if buffer_output else None
86 stdout = subprocess.STDOUT if buffer_output else None
87 self.process = subprocess.Popen(self.cmd, stdout=output,
87 self.process = subprocess.Popen(self.cmd, stdout=output,
88 stderr=stdout, env=env)
88 stderr=stdout, env=env)
89
89
90 def wait(self):
90 def wait(self):
91 self.stdout, _ = self.process.communicate()
91 self.stdout, _ = self.process.communicate()
92 return self.process.returncode
92 return self.process.returncode
93
93
94 def print_extra_info(self):
94 def print_extra_info(self):
95 """Print extra information about this test run.
95 """Print extra information about this test run.
96
96
97 If we're running in parallel and showing the concise view, this is only
97 If we're running in parallel and showing the concise view, this is only
98 called if the test group fails. Otherwise, it's called before the test
98 called if the test group fails. Otherwise, it's called before the test
99 group is started.
99 group is started.
100
100
101 The base implementation does nothing, but it can be overridden by
101 The base implementation does nothing, but it can be overridden by
102 subclasses.
102 subclasses.
103 """
103 """
104 return
104 return
105
105
106 def cleanup_process(self):
106 def cleanup_process(self):
107 """Cleanup on exit by killing any leftover processes."""
107 """Cleanup on exit by killing any leftover processes."""
108 subp = self.process
108 subp = self.process
109 if subp is None or (subp.poll() is not None):
109 if subp is None or (subp.poll() is not None):
110 return # Process doesn't exist, or is already dead.
110 return # Process doesn't exist, or is already dead.
111
111
112 try:
112 try:
113 print('Cleaning up stale PID: %d' % subp.pid)
113 print('Cleaning up stale PID: %d' % subp.pid)
114 subp.kill()
114 subp.kill()
115 except: # (OSError, WindowsError) ?
115 except: # (OSError, WindowsError) ?
116 # This is just a best effort, if we fail or the process was
116 # This is just a best effort, if we fail or the process was
117 # really gone, ignore it.
117 # really gone, ignore it.
118 pass
118 pass
119 else:
119 else:
120 for i in range(10):
120 for i in range(10):
121 if subp.poll() is None:
121 if subp.poll() is None:
122 time.sleep(0.1)
122 time.sleep(0.1)
123 else:
123 else:
124 break
124 break
125
125
126 if subp.poll() is None:
126 if subp.poll() is None:
127 # The process did not die...
127 # The process did not die...
128 print('... failed. Manual cleanup may be required.')
128 print('... failed. Manual cleanup may be required.')
129
129
130 def cleanup(self):
130 def cleanup(self):
131 "Kill process if it's still alive, and clean up temporary directories"
131 "Kill process if it's still alive, and clean up temporary directories"
132 self.cleanup_process()
132 self.cleanup_process()
133 for td in self.dirs:
133 for td in self.dirs:
134 td.cleanup()
134 td.cleanup()
135
135
136 __del__ = cleanup
136 __del__ = cleanup
137
137
138
138
139 class PyTestController(TestController):
139 class PyTestController(TestController):
140 """Run Python tests using IPython.testing.iptest"""
140 """Run Python tests using IPython.testing.iptest"""
141 #: str, Python command to execute in subprocess
141 #: str, Python command to execute in subprocess
142 pycmd = None
142 pycmd = None
143
143
144 def __init__(self, section, options):
144 def __init__(self, section, options):
145 """Create new test runner."""
145 """Create new test runner."""
146 TestController.__init__(self)
146 TestController.__init__(self)
147 self.section = section
147 self.section = section
148 # pycmd is put into cmd[2] in PyTestController.launch()
148 # pycmd is put into cmd[2] in PyTestController.launch()
149 self.cmd = [sys.executable, '-c', None, section]
149 self.cmd = [sys.executable, '-c', None, section]
150 self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()"
150 self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()"
151 self.options = options
151 self.options = options
152
152
153 def setup(self):
153 def setup(self):
154 ipydir = TemporaryDirectory()
154 ipydir = TemporaryDirectory()
155 self.dirs.append(ipydir)
155 self.dirs.append(ipydir)
156 self.env['IPYTHONDIR'] = ipydir.name
156 self.env['IPYTHONDIR'] = ipydir.name
157 self.workingdir = workingdir = TemporaryDirectory()
157 self.workingdir = workingdir = TemporaryDirectory()
158 self.dirs.append(workingdir)
158 self.dirs.append(workingdir)
159 self.env['IPTEST_WORKING_DIR'] = workingdir.name
159 self.env['IPTEST_WORKING_DIR'] = workingdir.name
160 # This means we won't get odd effects from our own matplotlib config
160 # This means we won't get odd effects from our own matplotlib config
161 self.env['MPLCONFIGDIR'] = workingdir.name
161 self.env['MPLCONFIGDIR'] = workingdir.name
162
162
163 # From options:
163 # From options:
164 if self.options.xunit:
164 if self.options.xunit:
165 self.add_xunit()
165 self.add_xunit()
166 if self.options.coverage:
166 if self.options.coverage:
167 self.add_coverage()
167 self.add_coverage()
168 self.env['IPTEST_SUBPROC_STREAMS'] = self.options.subproc_streams
168 self.env['IPTEST_SUBPROC_STREAMS'] = self.options.subproc_streams
169 self.cmd.extend(self.options.extra_args)
169 self.cmd.extend(self.options.extra_args)
170
170
171 @property
171 @property
172 def will_run(self):
172 def will_run(self):
173 try:
173 try:
174 return test_sections[self.section].will_run
174 return test_sections[self.section].will_run
175 except KeyError:
175 except KeyError:
176 return True
176 return True
177
177
178 def add_xunit(self):
178 def add_xunit(self):
179 xunit_file = os.path.abspath(self.section + '.xunit.xml')
179 xunit_file = os.path.abspath(self.section + '.xunit.xml')
180 self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file])
180 self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file])
181
181
182 def add_coverage(self):
182 def add_coverage(self):
183 try:
183 try:
184 sources = test_sections[self.section].includes
184 sources = test_sections[self.section].includes
185 except KeyError:
185 except KeyError:
186 sources = ['IPython']
186 sources = ['IPython']
187
187
188 coverage_rc = ("[run]\n"
188 coverage_rc = ("[run]\n"
189 "data_file = {data_file}\n"
189 "data_file = {data_file}\n"
190 "source =\n"
190 "source =\n"
191 " {source}\n"
191 " {source}\n"
192 ).format(data_file=os.path.abspath('.coverage.'+self.section),
192 ).format(data_file=os.path.abspath('.coverage.'+self.section),
193 source="\n ".join(sources))
193 source="\n ".join(sources))
194 config_file = os.path.join(self.workingdir.name, '.coveragerc')
194 config_file = os.path.join(self.workingdir.name, '.coveragerc')
195 with open(config_file, 'w') as f:
195 with open(config_file, 'w') as f:
196 f.write(coverage_rc)
196 f.write(coverage_rc)
197
197
198 self.env['COVERAGE_PROCESS_START'] = config_file
198 self.env['COVERAGE_PROCESS_START'] = config_file
199 self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd
199 self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd
200
200
201 def launch(self, buffer_output=False):
201 def launch(self, buffer_output=False):
202 self.cmd[2] = self.pycmd
202 self.cmd[2] = self.pycmd
203 super(PyTestController, self).launch(buffer_output=buffer_output)
203 super(PyTestController, self).launch(buffer_output=buffer_output)
204
204
205
205
206 js_prefix = 'js/'
206 js_prefix = 'js/'
207
207
208 def get_js_test_dir():
208 def get_js_test_dir():
209 import IPython.html.tests as t
209 import IPython.html.tests as t
210 return os.path.join(os.path.dirname(t.__file__), '')
210 return os.path.join(os.path.dirname(t.__file__), '')
211
211
212 def all_js_groups():
212 def all_js_groups():
213 import glob
213 import glob
214 test_dir = get_js_test_dir()
214 test_dir = get_js_test_dir()
215 all_subdirs = glob.glob(test_dir + '[!_]*/')
215 all_subdirs = glob.glob(test_dir + '[!_]*/')
216 return [js_prefix+os.path.relpath(x, test_dir) for x in all_subdirs]
216 return [js_prefix+os.path.relpath(x, test_dir) for x in all_subdirs]
217
217
218 class JSController(TestController):
218 class JSController(TestController):
219 """Run CasperJS tests """
219 """Run CasperJS tests """
220 requirements = ['zmq', 'tornado', 'jinja2', 'casperjs', 'sqlite3',
220 requirements = ['zmq', 'tornado', 'jinja2', 'casperjs', 'sqlite3',
221 'jsonschema', 'jsonpointer']
221 'jsonschema']
222 display_slimer_output = False
222 display_slimer_output = False
223
223
224 def __init__(self, section, xunit=True, engine='phantomjs'):
224 def __init__(self, section, xunit=True, engine='phantomjs'):
225 """Create new test runner."""
225 """Create new test runner."""
226 TestController.__init__(self)
226 TestController.__init__(self)
227 self.engine = engine
227 self.engine = engine
228 self.section = section
228 self.section = section
229 self.xunit = xunit
229 self.xunit = xunit
230 self.slimer_failure = re.compile('^FAIL.*', flags=re.MULTILINE)
230 self.slimer_failure = re.compile('^FAIL.*', flags=re.MULTILINE)
231 js_test_dir = get_js_test_dir()
231 js_test_dir = get_js_test_dir()
232 includes = '--includes=' + os.path.join(js_test_dir,'util.js')
232 includes = '--includes=' + os.path.join(js_test_dir,'util.js')
233 test_cases = os.path.join(js_test_dir, self.section[len(js_prefix):])
233 test_cases = os.path.join(js_test_dir, self.section[len(js_prefix):])
234 self.cmd = ['casperjs', 'test', includes, test_cases, '--engine=%s' % self.engine]
234 self.cmd = ['casperjs', 'test', includes, test_cases, '--engine=%s' % self.engine]
235
235
236 def setup(self):
236 def setup(self):
237 self.ipydir = TemporaryDirectory()
237 self.ipydir = TemporaryDirectory()
238 self.nbdir = TemporaryDirectory()
238 self.nbdir = TemporaryDirectory()
239 self.dirs.append(self.ipydir)
239 self.dirs.append(self.ipydir)
240 self.dirs.append(self.nbdir)
240 self.dirs.append(self.nbdir)
241 os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir1', u'sub ∂ir 1a')))
241 os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir1', u'sub ∂ir 1a')))
242 os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir2', u'sub ∂ir 1b')))
242 os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir2', u'sub ∂ir 1b')))
243
243
244 if self.xunit:
244 if self.xunit:
245 self.add_xunit()
245 self.add_xunit()
246
246
247 # start the ipython notebook, so we get the port number
247 # start the ipython notebook, so we get the port number
248 self.server_port = 0
248 self.server_port = 0
249 self._init_server()
249 self._init_server()
250 if self.server_port:
250 if self.server_port:
251 self.cmd.append("--port=%i" % self.server_port)
251 self.cmd.append("--port=%i" % self.server_port)
252 else:
252 else:
253 # don't launch tests if the server didn't start
253 # don't launch tests if the server didn't start
254 self.cmd = [sys.executable, '-c', 'raise SystemExit(1)']
254 self.cmd = [sys.executable, '-c', 'raise SystemExit(1)']
255
255
256 def add_xunit(self):
256 def add_xunit(self):
257 xunit_file = os.path.abspath(self.section.replace('/','.') + '.xunit.xml')
257 xunit_file = os.path.abspath(self.section.replace('/','.') + '.xunit.xml')
258 self.cmd.append('--xunit=%s' % xunit_file)
258 self.cmd.append('--xunit=%s' % xunit_file)
259
259
260 def launch(self, buffer_output):
260 def launch(self, buffer_output):
261 # If the engine is SlimerJS, we need to buffer the output because
261 # If the engine is SlimerJS, we need to buffer the output because
262 # SlimerJS does not support exit codes, so CasperJS always returns 0.
262 # SlimerJS does not support exit codes, so CasperJS always returns 0.
263 if self.engine == 'slimerjs' and not buffer_output:
263 if self.engine == 'slimerjs' and not buffer_output:
264 self.display_slimer_output = True
264 self.display_slimer_output = True
265 return super(JSController, self).launch(buffer_output=True)
265 return super(JSController, self).launch(buffer_output=True)
266
266
267 else:
267 else:
268 return super(JSController, self).launch(buffer_output=buffer_output)
268 return super(JSController, self).launch(buffer_output=buffer_output)
269
269
270 def wait(self, *pargs, **kwargs):
270 def wait(self, *pargs, **kwargs):
271 """Wait for the JSController to finish"""
271 """Wait for the JSController to finish"""
272 ret = super(JSController, self).wait(*pargs, **kwargs)
272 ret = super(JSController, self).wait(*pargs, **kwargs)
273 # If this is a SlimerJS controller, check the captured stdout for
273 # If this is a SlimerJS controller, check the captured stdout for
274 # errors. Otherwise, just return the return code.
274 # errors. Otherwise, just return the return code.
275 if self.engine == 'slimerjs':
275 if self.engine == 'slimerjs':
276 stdout = bytes_to_str(self.stdout)
276 stdout = bytes_to_str(self.stdout)
277 if self.display_slimer_output:
277 if self.display_slimer_output:
278 print(stdout)
278 print(stdout)
279 if ret != 0:
279 if ret != 0:
280 # This could still happen e.g. if it's stopped by SIGINT
280 # This could still happen e.g. if it's stopped by SIGINT
281 return ret
281 return ret
282 return bool(self.slimer_failure.search(strip_ansi(stdout)))
282 return bool(self.slimer_failure.search(strip_ansi(stdout)))
283 else:
283 else:
284 return ret
284 return ret
285
285
286 def print_extra_info(self):
286 def print_extra_info(self):
287 print("Running tests with notebook directory %r" % self.nbdir.name)
287 print("Running tests with notebook directory %r" % self.nbdir.name)
288
288
289 @property
289 @property
290 def will_run(self):
290 def will_run(self):
291 return all(have[a] for a in self.requirements + [self.engine])
291 return all(have[a] for a in self.requirements + [self.engine])
292
292
293 def _init_server(self):
293 def _init_server(self):
294 "Start the notebook server in a separate process"
294 "Start the notebook server in a separate process"
295 self.server_command = command = [sys.executable,
295 self.server_command = command = [sys.executable,
296 '-m', 'IPython.html',
296 '-m', 'IPython.html',
297 '--no-browser',
297 '--no-browser',
298 '--ipython-dir', self.ipydir.name,
298 '--ipython-dir', self.ipydir.name,
299 '--notebook-dir', self.nbdir.name,
299 '--notebook-dir', self.nbdir.name,
300 ]
300 ]
301 # ipc doesn't work on Windows, and darwin has crazy-long temp paths,
301 # ipc doesn't work on Windows, and darwin has crazy-long temp paths,
302 # which run afoul of ipc's maximum path length.
302 # which run afoul of ipc's maximum path length.
303 if sys.platform.startswith('linux'):
303 if sys.platform.startswith('linux'):
304 command.append('--KernelManager.transport=ipc')
304 command.append('--KernelManager.transport=ipc')
305 self.stream_capturer = c = StreamCapturer()
305 self.stream_capturer = c = StreamCapturer()
306 c.start()
306 c.start()
307 self.server = subprocess.Popen(command, stdout=c.writefd, stderr=subprocess.STDOUT, cwd=self.nbdir.name)
307 self.server = subprocess.Popen(command, stdout=c.writefd, stderr=subprocess.STDOUT, cwd=self.nbdir.name)
308 self.server_info_file = os.path.join(self.ipydir.name,
308 self.server_info_file = os.path.join(self.ipydir.name,
309 'profile_default', 'security', 'nbserver-%i.json' % self.server.pid
309 'profile_default', 'security', 'nbserver-%i.json' % self.server.pid
310 )
310 )
311 self._wait_for_server()
311 self._wait_for_server()
312
312
313 def _wait_for_server(self):
313 def _wait_for_server(self):
314 """Wait 30 seconds for the notebook server to start"""
314 """Wait 30 seconds for the notebook server to start"""
315 for i in range(300):
315 for i in range(300):
316 if self.server.poll() is not None:
316 if self.server.poll() is not None:
317 return self._failed_to_start()
317 return self._failed_to_start()
318 if os.path.exists(self.server_info_file):
318 if os.path.exists(self.server_info_file):
319 try:
319 try:
320 self._load_server_info()
320 self._load_server_info()
321 except ValueError:
321 except ValueError:
322 # If the server is halfway through writing the file, we may
322 # If the server is halfway through writing the file, we may
323 # get invalid JSON; it should be ready next iteration.
323 # get invalid JSON; it should be ready next iteration.
324 pass
324 pass
325 else:
325 else:
326 return
326 return
327 time.sleep(0.1)
327 time.sleep(0.1)
328 print("Notebook server-info file never arrived: %s" % self.server_info_file,
328 print("Notebook server-info file never arrived: %s" % self.server_info_file,
329 file=sys.stderr
329 file=sys.stderr
330 )
330 )
331
331
332 def _failed_to_start(self):
332 def _failed_to_start(self):
333 """Notebook server exited prematurely"""
333 """Notebook server exited prematurely"""
334 captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace')
334 captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace')
335 print("Notebook failed to start: ", file=sys.stderr)
335 print("Notebook failed to start: ", file=sys.stderr)
336 print(self.server_command)
336 print(self.server_command)
337 print(captured, file=sys.stderr)
337 print(captured, file=sys.stderr)
338
338
339 def _load_server_info(self):
339 def _load_server_info(self):
340 """Notebook server started, load connection info from JSON"""
340 """Notebook server started, load connection info from JSON"""
341 with open(self.server_info_file) as f:
341 with open(self.server_info_file) as f:
342 info = json.load(f)
342 info = json.load(f)
343 self.server_port = info['port']
343 self.server_port = info['port']
344
344
345 def cleanup(self):
345 def cleanup(self):
346 try:
346 try:
347 self.server.terminate()
347 self.server.terminate()
348 except OSError:
348 except OSError:
349 # already dead
349 # already dead
350 pass
350 pass
351 # wait 10s for the server to shutdown
351 # wait 10s for the server to shutdown
352 try:
352 try:
353 popen_wait(self.server, NOTEBOOK_SHUTDOWN_TIMEOUT)
353 popen_wait(self.server, NOTEBOOK_SHUTDOWN_TIMEOUT)
354 except TimeoutExpired:
354 except TimeoutExpired:
355 # server didn't terminate, kill it
355 # server didn't terminate, kill it
356 try:
356 try:
357 print("Failed to terminate notebook server, killing it.",
357 print("Failed to terminate notebook server, killing it.",
358 file=sys.stderr
358 file=sys.stderr
359 )
359 )
360 self.server.kill()
360 self.server.kill()
361 except OSError:
361 except OSError:
362 # already dead
362 # already dead
363 pass
363 pass
364 # wait another 10s
364 # wait another 10s
365 try:
365 try:
366 popen_wait(self.server, NOTEBOOK_SHUTDOWN_TIMEOUT)
366 popen_wait(self.server, NOTEBOOK_SHUTDOWN_TIMEOUT)
367 except TimeoutExpired:
367 except TimeoutExpired:
368 print("Notebook server still running (%s)" % self.server_info_file,
368 print("Notebook server still running (%s)" % self.server_info_file,
369 file=sys.stderr
369 file=sys.stderr
370 )
370 )
371
371
372 self.stream_capturer.halt()
372 self.stream_capturer.halt()
373 TestController.cleanup(self)
373 TestController.cleanup(self)
374
374
375
375
376 def prepare_controllers(options):
376 def prepare_controllers(options):
377 """Returns two lists of TestController instances, those to run, and those
377 """Returns two lists of TestController instances, those to run, and those
378 not to run."""
378 not to run."""
379 testgroups = options.testgroups
379 testgroups = options.testgroups
380 if testgroups:
380 if testgroups:
381 if 'js' in testgroups:
381 if 'js' in testgroups:
382 js_testgroups = all_js_groups()
382 js_testgroups = all_js_groups()
383 else:
383 else:
384 js_testgroups = [g for g in testgroups if g.startswith(js_prefix)]
384 js_testgroups = [g for g in testgroups if g.startswith(js_prefix)]
385
385
386 py_testgroups = [g for g in testgroups if g not in ['js'] + js_testgroups]
386 py_testgroups = [g for g in testgroups if g not in ['js'] + js_testgroups]
387 else:
387 else:
388 py_testgroups = py_test_group_names
388 py_testgroups = py_test_group_names
389 if not options.all:
389 if not options.all:
390 js_testgroups = []
390 js_testgroups = []
391 test_sections['parallel'].enabled = False
391 test_sections['parallel'].enabled = False
392 else:
392 else:
393 js_testgroups = all_js_groups()
393 js_testgroups = all_js_groups()
394
394
395 engine = 'slimerjs' if options.slimerjs else 'phantomjs'
395 engine = 'slimerjs' if options.slimerjs else 'phantomjs'
396 c_js = [JSController(name, xunit=options.xunit, engine=engine) for name in js_testgroups]
396 c_js = [JSController(name, xunit=options.xunit, engine=engine) for name in js_testgroups]
397 c_py = [PyTestController(name, options) for name in py_testgroups]
397 c_py = [PyTestController(name, options) for name in py_testgroups]
398
398
399 controllers = c_py + c_js
399 controllers = c_py + c_js
400 to_run = [c for c in controllers if c.will_run]
400 to_run = [c for c in controllers if c.will_run]
401 not_run = [c for c in controllers if not c.will_run]
401 not_run = [c for c in controllers if not c.will_run]
402 return to_run, not_run
402 return to_run, not_run
403
403
404 def do_run(controller, buffer_output=True):
404 def do_run(controller, buffer_output=True):
405 """Setup and run a test controller.
405 """Setup and run a test controller.
406
406
407 If buffer_output is True, no output is displayed, to avoid it appearing
407 If buffer_output is True, no output is displayed, to avoid it appearing
408 interleaved. In this case, the caller is responsible for displaying test
408 interleaved. In this case, the caller is responsible for displaying test
409 output on failure.
409 output on failure.
410
410
411 Returns
411 Returns
412 -------
412 -------
413 controller : TestController
413 controller : TestController
414 The same controller as passed in, as a convenience for using map() type
414 The same controller as passed in, as a convenience for using map() type
415 APIs.
415 APIs.
416 exitcode : int
416 exitcode : int
417 The exit code of the test subprocess. Non-zero indicates failure.
417 The exit code of the test subprocess. Non-zero indicates failure.
418 """
418 """
419 try:
419 try:
420 try:
420 try:
421 controller.setup()
421 controller.setup()
422 if not buffer_output:
422 if not buffer_output:
423 controller.print_extra_info()
423 controller.print_extra_info()
424 controller.launch(buffer_output=buffer_output)
424 controller.launch(buffer_output=buffer_output)
425 except Exception:
425 except Exception:
426 import traceback
426 import traceback
427 traceback.print_exc()
427 traceback.print_exc()
428 return controller, 1 # signal failure
428 return controller, 1 # signal failure
429
429
430 exitcode = controller.wait()
430 exitcode = controller.wait()
431 return controller, exitcode
431 return controller, exitcode
432
432
433 except KeyboardInterrupt:
433 except KeyboardInterrupt:
434 return controller, -signal.SIGINT
434 return controller, -signal.SIGINT
435 finally:
435 finally:
436 controller.cleanup()
436 controller.cleanup()
437
437
438 def report():
438 def report():
439 """Return a string with a summary report of test-related variables."""
439 """Return a string with a summary report of test-related variables."""
440 inf = get_sys_info()
440 inf = get_sys_info()
441 out = []
441 out = []
442 def _add(name, value):
442 def _add(name, value):
443 out.append((name, value))
443 out.append((name, value))
444
444
445 _add('IPython version', inf['ipython_version'])
445 _add('IPython version', inf['ipython_version'])
446 _add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source']))
446 _add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source']))
447 _add('IPython package', compress_user(inf['ipython_path']))
447 _add('IPython package', compress_user(inf['ipython_path']))
448 _add('Python version', inf['sys_version'].replace('\n',''))
448 _add('Python version', inf['sys_version'].replace('\n',''))
449 _add('sys.executable', compress_user(inf['sys_executable']))
449 _add('sys.executable', compress_user(inf['sys_executable']))
450 _add('Platform', inf['platform'])
450 _add('Platform', inf['platform'])
451
451
452 width = max(len(n) for (n,v) in out)
452 width = max(len(n) for (n,v) in out)
453 out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out]
453 out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out]
454
454
455 avail = []
455 avail = []
456 not_avail = []
456 not_avail = []
457
457
458 for k, is_avail in have.items():
458 for k, is_avail in have.items():
459 if is_avail:
459 if is_avail:
460 avail.append(k)
460 avail.append(k)
461 else:
461 else:
462 not_avail.append(k)
462 not_avail.append(k)
463
463
464 if avail:
464 if avail:
465 out.append('\nTools and libraries available at test time:\n')
465 out.append('\nTools and libraries available at test time:\n')
466 avail.sort()
466 avail.sort()
467 out.append(' ' + ' '.join(avail)+'\n')
467 out.append(' ' + ' '.join(avail)+'\n')
468
468
469 if not_avail:
469 if not_avail:
470 out.append('\nTools and libraries NOT available at test time:\n')
470 out.append('\nTools and libraries NOT available at test time:\n')
471 not_avail.sort()
471 not_avail.sort()
472 out.append(' ' + ' '.join(not_avail)+'\n')
472 out.append(' ' + ' '.join(not_avail)+'\n')
473
473
474 return ''.join(out)
474 return ''.join(out)
475
475
476 def run_iptestall(options):
476 def run_iptestall(options):
477 """Run the entire IPython test suite by calling nose and trial.
477 """Run the entire IPython test suite by calling nose and trial.
478
478
479 This function constructs :class:`IPTester` instances for all IPython
479 This function constructs :class:`IPTester` instances for all IPython
480 modules and package and then runs each of them. This causes the modules
480 modules and package and then runs each of them. This causes the modules
481 and packages of IPython to be tested each in their own subprocess using
481 and packages of IPython to be tested each in their own subprocess using
482 nose.
482 nose.
483
483
484 Parameters
484 Parameters
485 ----------
485 ----------
486
486
487 All parameters are passed as attributes of the options object.
487 All parameters are passed as attributes of the options object.
488
488
489 testgroups : list of str
489 testgroups : list of str
490 Run only these sections of the test suite. If empty, run all the available
490 Run only these sections of the test suite. If empty, run all the available
491 sections.
491 sections.
492
492
493 fast : int or None
493 fast : int or None
494 Run the test suite in parallel, using n simultaneous processes. If None
494 Run the test suite in parallel, using n simultaneous processes. If None
495 is passed, one process is used per CPU core. Default 1 (i.e. sequential)
495 is passed, one process is used per CPU core. Default 1 (i.e. sequential)
496
496
497 inc_slow : bool
497 inc_slow : bool
498 Include slow tests, like IPython.parallel. By default, these tests aren't
498 Include slow tests, like IPython.parallel. By default, these tests aren't
499 run.
499 run.
500
500
501 slimerjs : bool
501 slimerjs : bool
502 Use slimerjs if it's installed instead of phantomjs for casperjs tests.
502 Use slimerjs if it's installed instead of phantomjs for casperjs tests.
503
503
504 xunit : bool
504 xunit : bool
505 Produce Xunit XML output. This is written to multiple foo.xunit.xml files.
505 Produce Xunit XML output. This is written to multiple foo.xunit.xml files.
506
506
507 coverage : bool or str
507 coverage : bool or str
508 Measure code coverage from tests. True will store the raw coverage data,
508 Measure code coverage from tests. True will store the raw coverage data,
509 or pass 'html' or 'xml' to get reports.
509 or pass 'html' or 'xml' to get reports.
510
510
511 extra_args : list
511 extra_args : list
512 Extra arguments to pass to the test subprocesses, e.g. '-v'
512 Extra arguments to pass to the test subprocesses, e.g. '-v'
513 """
513 """
514 to_run, not_run = prepare_controllers(options)
514 to_run, not_run = prepare_controllers(options)
515
515
516 def justify(ltext, rtext, width=70, fill='-'):
516 def justify(ltext, rtext, width=70, fill='-'):
517 ltext += ' '
517 ltext += ' '
518 rtext = (' ' + rtext).rjust(width - len(ltext), fill)
518 rtext = (' ' + rtext).rjust(width - len(ltext), fill)
519 return ltext + rtext
519 return ltext + rtext
520
520
521 # Run all test runners, tracking execution time
521 # Run all test runners, tracking execution time
522 failed = []
522 failed = []
523 t_start = time.time()
523 t_start = time.time()
524
524
525 print()
525 print()
526 if options.fast == 1:
526 if options.fast == 1:
527 # This actually means sequential, i.e. with 1 job
527 # This actually means sequential, i.e. with 1 job
528 for controller in to_run:
528 for controller in to_run:
529 print('Test group:', controller.section)
529 print('Test group:', controller.section)
530 sys.stdout.flush() # Show in correct order when output is piped
530 sys.stdout.flush() # Show in correct order when output is piped
531 controller, res = do_run(controller, buffer_output=False)
531 controller, res = do_run(controller, buffer_output=False)
532 if res:
532 if res:
533 failed.append(controller)
533 failed.append(controller)
534 if res == -signal.SIGINT:
534 if res == -signal.SIGINT:
535 print("Interrupted")
535 print("Interrupted")
536 break
536 break
537 print()
537 print()
538
538
539 else:
539 else:
540 # Run tests concurrently
540 # Run tests concurrently
541 try:
541 try:
542 pool = multiprocessing.pool.ThreadPool(options.fast)
542 pool = multiprocessing.pool.ThreadPool(options.fast)
543 for (controller, res) in pool.imap_unordered(do_run, to_run):
543 for (controller, res) in pool.imap_unordered(do_run, to_run):
544 res_string = 'OK' if res == 0 else 'FAILED'
544 res_string = 'OK' if res == 0 else 'FAILED'
545 print(justify('Test group: ' + controller.section, res_string))
545 print(justify('Test group: ' + controller.section, res_string))
546 if res:
546 if res:
547 controller.print_extra_info()
547 controller.print_extra_info()
548 print(bytes_to_str(controller.stdout))
548 print(bytes_to_str(controller.stdout))
549 failed.append(controller)
549 failed.append(controller)
550 if res == -signal.SIGINT:
550 if res == -signal.SIGINT:
551 print("Interrupted")
551 print("Interrupted")
552 break
552 break
553 except KeyboardInterrupt:
553 except KeyboardInterrupt:
554 return
554 return
555
555
556 for controller in not_run:
556 for controller in not_run:
557 print(justify('Test group: ' + controller.section, 'NOT RUN'))
557 print(justify('Test group: ' + controller.section, 'NOT RUN'))
558
558
559 t_end = time.time()
559 t_end = time.time()
560 t_tests = t_end - t_start
560 t_tests = t_end - t_start
561 nrunners = len(to_run)
561 nrunners = len(to_run)
562 nfail = len(failed)
562 nfail = len(failed)
563 # summarize results
563 # summarize results
564 print('_'*70)
564 print('_'*70)
565 print('Test suite completed for system with the following information:')
565 print('Test suite completed for system with the following information:')
566 print(report())
566 print(report())
567 took = "Took %.3fs." % t_tests
567 took = "Took %.3fs." % t_tests
568 print('Status: ', end='')
568 print('Status: ', end='')
569 if not failed:
569 if not failed:
570 print('OK (%d test groups).' % nrunners, took)
570 print('OK (%d test groups).' % nrunners, took)
571 else:
571 else:
572 # If anything went wrong, point out what command to rerun manually to
572 # If anything went wrong, point out what command to rerun manually to
573 # see the actual errors and individual summary
573 # see the actual errors and individual summary
574 failed_sections = [c.section for c in failed]
574 failed_sections = [c.section for c in failed]
575 print('ERROR - {} out of {} test groups failed ({}).'.format(nfail,
575 print('ERROR - {} out of {} test groups failed ({}).'.format(nfail,
576 nrunners, ', '.join(failed_sections)), took)
576 nrunners, ', '.join(failed_sections)), took)
577 print()
577 print()
578 print('You may wish to rerun these, with:')
578 print('You may wish to rerun these, with:')
579 print(' iptest', *failed_sections)
579 print(' iptest', *failed_sections)
580 print()
580 print()
581
581
582 if options.coverage:
582 if options.coverage:
583 from coverage import coverage
583 from coverage import coverage
584 cov = coverage(data_file='.coverage')
584 cov = coverage(data_file='.coverage')
585 cov.combine()
585 cov.combine()
586 cov.save()
586 cov.save()
587
587
588 # Coverage HTML report
588 # Coverage HTML report
589 if options.coverage == 'html':
589 if options.coverage == 'html':
590 html_dir = 'ipy_htmlcov'
590 html_dir = 'ipy_htmlcov'
591 shutil.rmtree(html_dir, ignore_errors=True)
591 shutil.rmtree(html_dir, ignore_errors=True)
592 print("Writing HTML coverage report to %s/ ... " % html_dir, end="")
592 print("Writing HTML coverage report to %s/ ... " % html_dir, end="")
593 sys.stdout.flush()
593 sys.stdout.flush()
594
594
595 # Custom HTML reporter to clean up module names.
595 # Custom HTML reporter to clean up module names.
596 from coverage.html import HtmlReporter
596 from coverage.html import HtmlReporter
597 class CustomHtmlReporter(HtmlReporter):
597 class CustomHtmlReporter(HtmlReporter):
598 def find_code_units(self, morfs):
598 def find_code_units(self, morfs):
599 super(CustomHtmlReporter, self).find_code_units(morfs)
599 super(CustomHtmlReporter, self).find_code_units(morfs)
600 for cu in self.code_units:
600 for cu in self.code_units:
601 nameparts = cu.name.split(os.sep)
601 nameparts = cu.name.split(os.sep)
602 if 'IPython' not in nameparts:
602 if 'IPython' not in nameparts:
603 continue
603 continue
604 ix = nameparts.index('IPython')
604 ix = nameparts.index('IPython')
605 cu.name = '.'.join(nameparts[ix:])
605 cu.name = '.'.join(nameparts[ix:])
606
606
607 # Reimplement the html_report method with our custom reporter
607 # Reimplement the html_report method with our custom reporter
608 cov._harvest_data()
608 cov._harvest_data()
609 cov.config.from_args(omit='*{0}tests{0}*'.format(os.sep), html_dir=html_dir,
609 cov.config.from_args(omit='*{0}tests{0}*'.format(os.sep), html_dir=html_dir,
610 html_title='IPython test coverage',
610 html_title='IPython test coverage',
611 )
611 )
612 reporter = CustomHtmlReporter(cov, cov.config)
612 reporter = CustomHtmlReporter(cov, cov.config)
613 reporter.report(None)
613 reporter.report(None)
614 print('done.')
614 print('done.')
615
615
616 # Coverage XML report
616 # Coverage XML report
617 elif options.coverage == 'xml':
617 elif options.coverage == 'xml':
618 cov.xml_report(outfile='ipy_coverage.xml')
618 cov.xml_report(outfile='ipy_coverage.xml')
619
619
620 if failed:
620 if failed:
621 # Ensure that our exit code indicates failure
621 # Ensure that our exit code indicates failure
622 sys.exit(1)
622 sys.exit(1)
623
623
624 argparser = argparse.ArgumentParser(description='Run IPython test suite')
624 argparser = argparse.ArgumentParser(description='Run IPython test suite')
625 argparser.add_argument('testgroups', nargs='*',
625 argparser.add_argument('testgroups', nargs='*',
626 help='Run specified groups of tests. If omitted, run '
626 help='Run specified groups of tests. If omitted, run '
627 'all tests.')
627 'all tests.')
628 argparser.add_argument('--all', action='store_true',
628 argparser.add_argument('--all', action='store_true',
629 help='Include slow tests not run by default.')
629 help='Include slow tests not run by default.')
630 argparser.add_argument('--slimerjs', action='store_true',
630 argparser.add_argument('--slimerjs', action='store_true',
631 help="Use slimerjs if it's installed instead of phantomjs for casperjs tests.")
631 help="Use slimerjs if it's installed instead of phantomjs for casperjs tests.")
632 argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int,
632 argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int,
633 help='Run test sections in parallel. This starts as many '
633 help='Run test sections in parallel. This starts as many '
634 'processes as you have cores, or you can specify a number.')
634 'processes as you have cores, or you can specify a number.')
635 argparser.add_argument('--xunit', action='store_true',
635 argparser.add_argument('--xunit', action='store_true',
636 help='Produce Xunit XML results')
636 help='Produce Xunit XML results')
637 argparser.add_argument('--coverage', nargs='?', const=True, default=False,
637 argparser.add_argument('--coverage', nargs='?', const=True, default=False,
638 help="Measure test coverage. Specify 'html' or "
638 help="Measure test coverage. Specify 'html' or "
639 "'xml' to get reports.")
639 "'xml' to get reports.")
640 argparser.add_argument('--subproc-streams', default='capture',
640 argparser.add_argument('--subproc-streams', default='capture',
641 help="What to do with stdout/stderr from subprocesses. "
641 help="What to do with stdout/stderr from subprocesses. "
642 "'capture' (default), 'show' and 'discard' are the options.")
642 "'capture' (default), 'show' and 'discard' are the options.")
643
643
644 def default_options():
644 def default_options():
645 """Get an argparse Namespace object with the default arguments, to pass to
645 """Get an argparse Namespace object with the default arguments, to pass to
646 :func:`run_iptestall`.
646 :func:`run_iptestall`.
647 """
647 """
648 options = argparser.parse_args([])
648 options = argparser.parse_args([])
649 options.extra_args = []
649 options.extra_args = []
650 return options
650 return options
651
651
652 def main():
652 def main():
653 # iptest doesn't work correctly if the working directory is the
653 # iptest doesn't work correctly if the working directory is the
654 # root of the IPython source tree. Tell the user to avoid
654 # root of the IPython source tree. Tell the user to avoid
655 # frustration.
655 # frustration.
656 if os.path.exists(os.path.join(os.getcwd(),
656 if os.path.exists(os.path.join(os.getcwd(),
657 'IPython', 'testing', '__main__.py')):
657 'IPython', 'testing', '__main__.py')):
658 print("Don't run iptest from the IPython source directory",
658 print("Don't run iptest from the IPython source directory",
659 file=sys.stderr)
659 file=sys.stderr)
660 sys.exit(1)
660 sys.exit(1)
661 # Arguments after -- should be passed through to nose. Argparse treats
661 # Arguments after -- should be passed through to nose. Argparse treats
662 # everything after -- as regular positional arguments, so we separate them
662 # everything after -- as regular positional arguments, so we separate them
663 # first.
663 # first.
664 try:
664 try:
665 ix = sys.argv.index('--')
665 ix = sys.argv.index('--')
666 except ValueError:
666 except ValueError:
667 to_parse = sys.argv[1:]
667 to_parse = sys.argv[1:]
668 extra_args = []
668 extra_args = []
669 else:
669 else:
670 to_parse = sys.argv[1:ix]
670 to_parse = sys.argv[1:ix]
671 extra_args = sys.argv[ix+1:]
671 extra_args = sys.argv[ix+1:]
672
672
673 options = argparser.parse_args(to_parse)
673 options = argparser.parse_args(to_parse)
674 options.extra_args = extra_args
674 options.extra_args = extra_args
675
675
676 run_iptestall(options)
676 run_iptestall(options)
677
677
678
678
679 if __name__ == '__main__':
679 if __name__ == '__main__':
680 main()
680 main()
@@ -1,355 +1,355 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
2 # -*- coding: utf-8 -*-
3 """Setup script for IPython.
3 """Setup script for IPython.
4
4
5 Under Posix environments it works like a typical setup.py script.
5 Under Posix environments it works like a typical setup.py script.
6 Under Windows, the command sdist is not supported, since IPython
6 Under Windows, the command sdist is not supported, since IPython
7 requires utilities which are not available under Windows."""
7 requires utilities which are not available under Windows."""
8
8
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10 # Copyright (c) 2008-2011, IPython Development Team.
10 # Copyright (c) 2008-2011, IPython Development Team.
11 # Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu>
11 # Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu>
12 # Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
12 # Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
13 # Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
13 # Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
14 #
14 #
15 # Distributed under the terms of the Modified BSD License.
15 # Distributed under the terms of the Modified BSD License.
16 #
16 #
17 # The full license is in the file COPYING.rst, distributed with this software.
17 # The full license is in the file COPYING.rst, distributed with this software.
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19
19
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21 # Minimal Python version sanity check
21 # Minimal Python version sanity check
22 #-----------------------------------------------------------------------------
22 #-----------------------------------------------------------------------------
23 from __future__ import print_function
23 from __future__ import print_function
24
24
25 import sys
25 import sys
26
26
27 # This check is also made in IPython/__init__, don't forget to update both when
27 # This check is also made in IPython/__init__, don't forget to update both when
28 # changing Python version requirements.
28 # changing Python version requirements.
29 v = sys.version_info
29 v = sys.version_info
30 if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)):
30 if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)):
31 error = "ERROR: IPython requires Python version 2.7 or 3.3 or above."
31 error = "ERROR: IPython requires Python version 2.7 or 3.3 or above."
32 print(error, file=sys.stderr)
32 print(error, file=sys.stderr)
33 sys.exit(1)
33 sys.exit(1)
34
34
35 PY3 = (sys.version_info[0] >= 3)
35 PY3 = (sys.version_info[0] >= 3)
36
36
37 # At least we're on the python version we need, move on.
37 # At least we're on the python version we need, move on.
38
38
39 #-------------------------------------------------------------------------------
39 #-------------------------------------------------------------------------------
40 # Imports
40 # Imports
41 #-------------------------------------------------------------------------------
41 #-------------------------------------------------------------------------------
42
42
43 # Stdlib imports
43 # Stdlib imports
44 import os
44 import os
45 import shutil
45 import shutil
46
46
47 from glob import glob
47 from glob import glob
48
48
49 # BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
49 # BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
50 # update it when the contents of directories change.
50 # update it when the contents of directories change.
51 if os.path.exists('MANIFEST'): os.remove('MANIFEST')
51 if os.path.exists('MANIFEST'): os.remove('MANIFEST')
52
52
53 from distutils.core import setup
53 from distutils.core import setup
54
54
55 # Our own imports
55 # Our own imports
56 from setupbase import target_update
56 from setupbase import target_update
57
57
58 from setupbase import (
58 from setupbase import (
59 setup_args,
59 setup_args,
60 find_packages,
60 find_packages,
61 find_package_data,
61 find_package_data,
62 check_package_data_first,
62 check_package_data_first,
63 find_entry_points,
63 find_entry_points,
64 build_scripts_entrypt,
64 build_scripts_entrypt,
65 find_data_files,
65 find_data_files,
66 check_for_dependencies,
66 check_for_dependencies,
67 git_prebuild,
67 git_prebuild,
68 check_submodule_status,
68 check_submodule_status,
69 update_submodules,
69 update_submodules,
70 require_submodules,
70 require_submodules,
71 UpdateSubmodules,
71 UpdateSubmodules,
72 get_bdist_wheel,
72 get_bdist_wheel,
73 CompileCSS,
73 CompileCSS,
74 JavascriptVersion,
74 JavascriptVersion,
75 css_js_prerelease,
75 css_js_prerelease,
76 install_symlinked,
76 install_symlinked,
77 install_lib_symlink,
77 install_lib_symlink,
78 install_scripts_for_symlink,
78 install_scripts_for_symlink,
79 unsymlink,
79 unsymlink,
80 )
80 )
81 from setupext import setupext
81 from setupext import setupext
82
82
83 isfile = os.path.isfile
83 isfile = os.path.isfile
84 pjoin = os.path.join
84 pjoin = os.path.join
85
85
86 #-----------------------------------------------------------------------------
86 #-----------------------------------------------------------------------------
87 # Function definitions
87 # Function definitions
88 #-----------------------------------------------------------------------------
88 #-----------------------------------------------------------------------------
89
89
90 def cleanup():
90 def cleanup():
91 """Clean up the junk left around by the build process"""
91 """Clean up the junk left around by the build process"""
92 if "develop" not in sys.argv and "egg_info" not in sys.argv:
92 if "develop" not in sys.argv and "egg_info" not in sys.argv:
93 try:
93 try:
94 shutil.rmtree('ipython.egg-info')
94 shutil.rmtree('ipython.egg-info')
95 except:
95 except:
96 try:
96 try:
97 os.unlink('ipython.egg-info')
97 os.unlink('ipython.egg-info')
98 except:
98 except:
99 pass
99 pass
100
100
101 #-------------------------------------------------------------------------------
101 #-------------------------------------------------------------------------------
102 # Handle OS specific things
102 # Handle OS specific things
103 #-------------------------------------------------------------------------------
103 #-------------------------------------------------------------------------------
104
104
105 if os.name in ('nt','dos'):
105 if os.name in ('nt','dos'):
106 os_name = 'windows'
106 os_name = 'windows'
107 else:
107 else:
108 os_name = os.name
108 os_name = os.name
109
109
110 # Under Windows, 'sdist' has not been supported. Now that the docs build with
110 # Under Windows, 'sdist' has not been supported. Now that the docs build with
111 # Sphinx it might work, but let's not turn it on until someone confirms that it
111 # Sphinx it might work, but let's not turn it on until someone confirms that it
112 # actually works.
112 # actually works.
113 if os_name == 'windows' and 'sdist' in sys.argv:
113 if os_name == 'windows' and 'sdist' in sys.argv:
114 print('The sdist command is not available under Windows. Exiting.')
114 print('The sdist command is not available under Windows. Exiting.')
115 sys.exit(1)
115 sys.exit(1)
116
116
117 #-------------------------------------------------------------------------------
117 #-------------------------------------------------------------------------------
118 # Make sure we aren't trying to run without submodules
118 # Make sure we aren't trying to run without submodules
119 #-------------------------------------------------------------------------------
119 #-------------------------------------------------------------------------------
120 here = os.path.abspath(os.path.dirname(__file__))
120 here = os.path.abspath(os.path.dirname(__file__))
121
121
122 def require_clean_submodules():
122 def require_clean_submodules():
123 """Check on git submodules before distutils can do anything
123 """Check on git submodules before distutils can do anything
124
124
125 Since distutils cannot be trusted to update the tree
125 Since distutils cannot be trusted to update the tree
126 after everything has been set in motion,
126 after everything has been set in motion,
127 this is not a distutils command.
127 this is not a distutils command.
128 """
128 """
129 # PACKAGERS: Add a return here to skip checks for git submodules
129 # PACKAGERS: Add a return here to skip checks for git submodules
130
130
131 # don't do anything if nothing is actually supposed to happen
131 # don't do anything if nothing is actually supposed to happen
132 for do_nothing in ('-h', '--help', '--help-commands', 'clean', 'submodule'):
132 for do_nothing in ('-h', '--help', '--help-commands', 'clean', 'submodule'):
133 if do_nothing in sys.argv:
133 if do_nothing in sys.argv:
134 return
134 return
135
135
136 status = check_submodule_status(here)
136 status = check_submodule_status(here)
137
137
138 if status == "missing":
138 if status == "missing":
139 print("checking out submodules for the first time")
139 print("checking out submodules for the first time")
140 update_submodules(here)
140 update_submodules(here)
141 elif status == "unclean":
141 elif status == "unclean":
142 print('\n'.join([
142 print('\n'.join([
143 "Cannot build / install IPython with unclean submodules",
143 "Cannot build / install IPython with unclean submodules",
144 "Please update submodules with",
144 "Please update submodules with",
145 " python setup.py submodule",
145 " python setup.py submodule",
146 "or",
146 "or",
147 " git submodule update",
147 " git submodule update",
148 "or commit any submodule changes you have made."
148 "or commit any submodule changes you have made."
149 ]))
149 ]))
150 sys.exit(1)
150 sys.exit(1)
151
151
152 require_clean_submodules()
152 require_clean_submodules()
153
153
154 #-------------------------------------------------------------------------------
154 #-------------------------------------------------------------------------------
155 # Things related to the IPython documentation
155 # Things related to the IPython documentation
156 #-------------------------------------------------------------------------------
156 #-------------------------------------------------------------------------------
157
157
158 # update the manuals when building a source dist
158 # update the manuals when building a source dist
159 if len(sys.argv) >= 2 and sys.argv[1] in ('sdist','bdist_rpm'):
159 if len(sys.argv) >= 2 and sys.argv[1] in ('sdist','bdist_rpm'):
160
160
161 # List of things to be updated. Each entry is a triplet of args for
161 # List of things to be updated. Each entry is a triplet of args for
162 # target_update()
162 # target_update()
163 to_update = [
163 to_update = [
164 # FIXME - Disabled for now: we need to redo an automatic way
164 # FIXME - Disabled for now: we need to redo an automatic way
165 # of generating the magic info inside the rst.
165 # of generating the magic info inside the rst.
166 #('docs/magic.tex',
166 #('docs/magic.tex',
167 #['IPython/Magic.py'],
167 #['IPython/Magic.py'],
168 #"cd doc && ./update_magic.sh" ),
168 #"cd doc && ./update_magic.sh" ),
169
169
170 ('docs/man/ipcluster.1.gz',
170 ('docs/man/ipcluster.1.gz',
171 ['docs/man/ipcluster.1'],
171 ['docs/man/ipcluster.1'],
172 'cd docs/man && gzip -9c ipcluster.1 > ipcluster.1.gz'),
172 'cd docs/man && gzip -9c ipcluster.1 > ipcluster.1.gz'),
173
173
174 ('docs/man/ipcontroller.1.gz',
174 ('docs/man/ipcontroller.1.gz',
175 ['docs/man/ipcontroller.1'],
175 ['docs/man/ipcontroller.1'],
176 'cd docs/man && gzip -9c ipcontroller.1 > ipcontroller.1.gz'),
176 'cd docs/man && gzip -9c ipcontroller.1 > ipcontroller.1.gz'),
177
177
178 ('docs/man/ipengine.1.gz',
178 ('docs/man/ipengine.1.gz',
179 ['docs/man/ipengine.1'],
179 ['docs/man/ipengine.1'],
180 'cd docs/man && gzip -9c ipengine.1 > ipengine.1.gz'),
180 'cd docs/man && gzip -9c ipengine.1 > ipengine.1.gz'),
181
181
182 ('docs/man/ipython.1.gz',
182 ('docs/man/ipython.1.gz',
183 ['docs/man/ipython.1'],
183 ['docs/man/ipython.1'],
184 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz'),
184 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz'),
185
185
186 ]
186 ]
187
187
188
188
189 [ target_update(*t) for t in to_update ]
189 [ target_update(*t) for t in to_update ]
190
190
191 #---------------------------------------------------------------------------
191 #---------------------------------------------------------------------------
192 # Find all the packages, package data, and data_files
192 # Find all the packages, package data, and data_files
193 #---------------------------------------------------------------------------
193 #---------------------------------------------------------------------------
194
194
195 packages = find_packages()
195 packages = find_packages()
196 package_data = find_package_data()
196 package_data = find_package_data()
197
197
198 data_files = find_data_files()
198 data_files = find_data_files()
199
199
200 setup_args['packages'] = packages
200 setup_args['packages'] = packages
201 setup_args['package_data'] = package_data
201 setup_args['package_data'] = package_data
202 setup_args['data_files'] = data_files
202 setup_args['data_files'] = data_files
203
203
204 #---------------------------------------------------------------------------
204 #---------------------------------------------------------------------------
205 # custom distutils commands
205 # custom distutils commands
206 #---------------------------------------------------------------------------
206 #---------------------------------------------------------------------------
207 # imports here, so they are after setuptools import if there was one
207 # imports here, so they are after setuptools import if there was one
208 from distutils.command.sdist import sdist
208 from distutils.command.sdist import sdist
209 from distutils.command.upload import upload
209 from distutils.command.upload import upload
210
210
211 class UploadWindowsInstallers(upload):
211 class UploadWindowsInstallers(upload):
212
212
213 description = "Upload Windows installers to PyPI (only used from tools/release_windows.py)"
213 description = "Upload Windows installers to PyPI (only used from tools/release_windows.py)"
214 user_options = upload.user_options + [
214 user_options = upload.user_options + [
215 ('files=', 'f', 'exe file (or glob) to upload')
215 ('files=', 'f', 'exe file (or glob) to upload')
216 ]
216 ]
217 def initialize_options(self):
217 def initialize_options(self):
218 upload.initialize_options(self)
218 upload.initialize_options(self)
219 meta = self.distribution.metadata
219 meta = self.distribution.metadata
220 base = '{name}-{version}'.format(
220 base = '{name}-{version}'.format(
221 name=meta.get_name(),
221 name=meta.get_name(),
222 version=meta.get_version()
222 version=meta.get_version()
223 )
223 )
224 self.files = os.path.join('dist', '%s.*.exe' % base)
224 self.files = os.path.join('dist', '%s.*.exe' % base)
225
225
226 def run(self):
226 def run(self):
227 for dist_file in glob(self.files):
227 for dist_file in glob(self.files):
228 self.upload_file('bdist_wininst', 'any', dist_file)
228 self.upload_file('bdist_wininst', 'any', dist_file)
229
229
230 setup_args['cmdclass'] = {
230 setup_args['cmdclass'] = {
231 'build_py': css_js_prerelease(
231 'build_py': css_js_prerelease(
232 check_package_data_first(git_prebuild('IPython')),
232 check_package_data_first(git_prebuild('IPython')),
233 strict=False),
233 strict=False),
234 'sdist' : css_js_prerelease(git_prebuild('IPython', sdist)),
234 'sdist' : css_js_prerelease(git_prebuild('IPython', sdist)),
235 'upload_wininst' : UploadWindowsInstallers,
235 'upload_wininst' : UploadWindowsInstallers,
236 'submodule' : UpdateSubmodules,
236 'submodule' : UpdateSubmodules,
237 'css' : CompileCSS,
237 'css' : CompileCSS,
238 'symlink': install_symlinked,
238 'symlink': install_symlinked,
239 'install_lib_symlink': install_lib_symlink,
239 'install_lib_symlink': install_lib_symlink,
240 'install_scripts_sym': install_scripts_for_symlink,
240 'install_scripts_sym': install_scripts_for_symlink,
241 'unsymlink': unsymlink,
241 'unsymlink': unsymlink,
242 'jsversion' : JavascriptVersion,
242 'jsversion' : JavascriptVersion,
243 }
243 }
244
244
245 #---------------------------------------------------------------------------
245 #---------------------------------------------------------------------------
246 # Handle scripts, dependencies, and setuptools specific things
246 # Handle scripts, dependencies, and setuptools specific things
247 #---------------------------------------------------------------------------
247 #---------------------------------------------------------------------------
248
248
249 # For some commands, use setuptools. Note that we do NOT list install here!
249 # For some commands, use setuptools. Note that we do NOT list install here!
250 # If you want a setuptools-enhanced install, just run 'setupegg.py install'
250 # If you want a setuptools-enhanced install, just run 'setupegg.py install'
251 needs_setuptools = set(('develop', 'release', 'bdist_egg', 'bdist_rpm',
251 needs_setuptools = set(('develop', 'release', 'bdist_egg', 'bdist_rpm',
252 'bdist', 'bdist_dumb', 'bdist_wininst', 'bdist_wheel',
252 'bdist', 'bdist_dumb', 'bdist_wininst', 'bdist_wheel',
253 'egg_info', 'easy_install', 'upload', 'install_egg_info',
253 'egg_info', 'easy_install', 'upload', 'install_egg_info',
254 ))
254 ))
255 if sys.platform == 'win32':
255 if sys.platform == 'win32':
256 # Depend on setuptools for install on *Windows only*
256 # Depend on setuptools for install on *Windows only*
257 # If we get script-installation working without setuptools,
257 # If we get script-installation working without setuptools,
258 # then we can back off, but until then use it.
258 # then we can back off, but until then use it.
259 # See Issue #369 on GitHub for more
259 # See Issue #369 on GitHub for more
260 needs_setuptools.add('install')
260 needs_setuptools.add('install')
261
261
262 if len(needs_setuptools.intersection(sys.argv)) > 0:
262 if len(needs_setuptools.intersection(sys.argv)) > 0:
263 import setuptools
263 import setuptools
264
264
265 # This dict is used for passing extra arguments that are setuptools
265 # This dict is used for passing extra arguments that are setuptools
266 # specific to setup
266 # specific to setup
267 setuptools_extra_args = {}
267 setuptools_extra_args = {}
268
268
269 # setuptools requirements
269 # setuptools requirements
270
270
271 extras_require = dict(
271 extras_require = dict(
272 parallel = ['pyzmq>=2.1.11'],
272 parallel = ['pyzmq>=2.1.11'],
273 qtconsole = ['pyzmq>=2.1.11', 'pygments'],
273 qtconsole = ['pyzmq>=2.1.11', 'pygments'],
274 zmq = ['pyzmq>=2.1.11'],
274 zmq = ['pyzmq>=2.1.11'],
275 doc = ['Sphinx>=1.1', 'numpydoc'],
275 doc = ['Sphinx>=1.1', 'numpydoc'],
276 test = ['nose>=0.10.1'],
276 test = ['nose>=0.10.1'],
277 terminal = [],
277 terminal = [],
278 nbformat = ['jsonschema>=2.0', 'jsonpointer>=1.3'],
278 nbformat = ['jsonschema>=2.0'],
279 notebook = ['tornado>=3.1', 'pyzmq>=2.1.11', 'jinja2', 'pygments', 'mistune>=0.3.1'],
279 notebook = ['tornado>=3.1', 'pyzmq>=2.1.11', 'jinja2', 'pygments', 'mistune>=0.3.1'],
280 nbconvert = ['pygments', 'jinja2', 'mistune>=0.3.1']
280 nbconvert = ['pygments', 'jinja2', 'mistune>=0.3.1']
281 )
281 )
282
282
283 if sys.version_info < (3, 3):
283 if sys.version_info < (3, 3):
284 extras_require['test'].append('mock')
284 extras_require['test'].append('mock')
285
285
286 extras_require['notebook'].extend(extras_require['nbformat'])
286 extras_require['notebook'].extend(extras_require['nbformat'])
287 extras_require['nbconvert'].extend(extras_require['nbformat'])
287 extras_require['nbconvert'].extend(extras_require['nbformat'])
288
288
289 everything = set()
289 everything = set()
290 for deps in extras_require.values():
290 for deps in extras_require.values():
291 everything.update(deps)
291 everything.update(deps)
292 extras_require['all'] = everything
292 extras_require['all'] = everything
293
293
294 install_requires = []
294 install_requires = []
295
295
296 # add readline
296 # add readline
297 if sys.platform == 'darwin':
297 if sys.platform == 'darwin':
298 if any(arg.startswith('bdist') for arg in sys.argv) or not setupext.check_for_readline():
298 if any(arg.startswith('bdist') for arg in sys.argv) or not setupext.check_for_readline():
299 install_requires.append('gnureadline')
299 install_requires.append('gnureadline')
300 elif sys.platform.startswith('win'):
300 elif sys.platform.startswith('win'):
301 extras_require['terminal'].append('pyreadline>=2.0')
301 extras_require['terminal'].append('pyreadline>=2.0')
302
302
303
303
304 if 'setuptools' in sys.modules:
304 if 'setuptools' in sys.modules:
305 # setup.py develop should check for submodules
305 # setup.py develop should check for submodules
306 from setuptools.command.develop import develop
306 from setuptools.command.develop import develop
307 setup_args['cmdclass']['develop'] = require_submodules(develop)
307 setup_args['cmdclass']['develop'] = require_submodules(develop)
308 setup_args['cmdclass']['bdist_wheel'] = css_js_prerelease(get_bdist_wheel())
308 setup_args['cmdclass']['bdist_wheel'] = css_js_prerelease(get_bdist_wheel())
309
309
310 setuptools_extra_args['zip_safe'] = False
310 setuptools_extra_args['zip_safe'] = False
311 setuptools_extra_args['entry_points'] = {'console_scripts':find_entry_points()}
311 setuptools_extra_args['entry_points'] = {'console_scripts':find_entry_points()}
312 setup_args['extras_require'] = extras_require
312 setup_args['extras_require'] = extras_require
313 requires = setup_args['install_requires'] = install_requires
313 requires = setup_args['install_requires'] = install_requires
314
314
315 # Script to be run by the windows binary installer after the default setup
315 # Script to be run by the windows binary installer after the default setup
316 # routine, to add shortcuts and similar windows-only things. Windows
316 # routine, to add shortcuts and similar windows-only things. Windows
317 # post-install scripts MUST reside in the scripts/ dir, otherwise distutils
317 # post-install scripts MUST reside in the scripts/ dir, otherwise distutils
318 # doesn't find them.
318 # doesn't find them.
319 if 'bdist_wininst' in sys.argv:
319 if 'bdist_wininst' in sys.argv:
320 if len(sys.argv) > 2 and \
320 if len(sys.argv) > 2 and \
321 ('sdist' in sys.argv or 'bdist_rpm' in sys.argv):
321 ('sdist' in sys.argv or 'bdist_rpm' in sys.argv):
322 print >> sys.stderr, "ERROR: bdist_wininst must be run alone. Exiting."
322 print >> sys.stderr, "ERROR: bdist_wininst must be run alone. Exiting."
323 sys.exit(1)
323 sys.exit(1)
324 setup_args['data_files'].append(
324 setup_args['data_files'].append(
325 ['Scripts', ('scripts/ipython.ico', 'scripts/ipython_nb.ico')])
325 ['Scripts', ('scripts/ipython.ico', 'scripts/ipython_nb.ico')])
326 setup_args['scripts'] = [pjoin('scripts','ipython_win_post_install.py')]
326 setup_args['scripts'] = [pjoin('scripts','ipython_win_post_install.py')]
327 setup_args['options'] = {"bdist_wininst":
327 setup_args['options'] = {"bdist_wininst":
328 {"install_script":
328 {"install_script":
329 "ipython_win_post_install.py"}}
329 "ipython_win_post_install.py"}}
330
330
331 else:
331 else:
332 # If we are installing without setuptools, call this function which will
332 # If we are installing without setuptools, call this function which will
333 # check for dependencies an inform the user what is needed. This is
333 # check for dependencies an inform the user what is needed. This is
334 # just to make life easy for users.
334 # just to make life easy for users.
335 for install_cmd in ('install', 'symlink'):
335 for install_cmd in ('install', 'symlink'):
336 if install_cmd in sys.argv:
336 if install_cmd in sys.argv:
337 check_for_dependencies()
337 check_for_dependencies()
338 break
338 break
339 # scripts has to be a non-empty list, or install_scripts isn't called
339 # scripts has to be a non-empty list, or install_scripts isn't called
340 setup_args['scripts'] = [e.split('=')[0].strip() for e in find_entry_points()]
340 setup_args['scripts'] = [e.split('=')[0].strip() for e in find_entry_points()]
341
341
342 setup_args['cmdclass']['build_scripts'] = build_scripts_entrypt
342 setup_args['cmdclass']['build_scripts'] = build_scripts_entrypt
343
343
344 #---------------------------------------------------------------------------
344 #---------------------------------------------------------------------------
345 # Do the actual setup now
345 # Do the actual setup now
346 #---------------------------------------------------------------------------
346 #---------------------------------------------------------------------------
347
347
348 setup_args.update(setuptools_extra_args)
348 setup_args.update(setuptools_extra_args)
349
349
350 def main():
350 def main():
351 setup(**setup_args)
351 setup(**setup_args)
352 cleanup()
352 cleanup()
353
353
354 if __name__ == '__main__':
354 if __name__ == '__main__':
355 main()
355 main()
@@ -1,730 +1,733 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 """
2 """
3 This module defines the things that are used in setup.py for building IPython
3 This module defines the things that are used in setup.py for building IPython
4
4
5 This includes:
5 This includes:
6
6
7 * The basic arguments to setup
7 * The basic arguments to setup
8 * Functions for finding things like packages, package data, etc.
8 * Functions for finding things like packages, package data, etc.
9 * A function for checking dependencies.
9 * A function for checking dependencies.
10 """
10 """
11
11
12 # Copyright (c) IPython Development Team.
12 # Copyright (c) IPython Development Team.
13 # Distributed under the terms of the Modified BSD License.
13 # Distributed under the terms of the Modified BSD License.
14
14
15 from __future__ import print_function
15 from __future__ import print_function
16
16
17 import errno
17 import errno
18 import os
18 import os
19 import sys
19 import sys
20
20
21 from distutils import log
21 from distutils import log
22 from distutils.command.build_py import build_py
22 from distutils.command.build_py import build_py
23 from distutils.command.build_scripts import build_scripts
23 from distutils.command.build_scripts import build_scripts
24 from distutils.command.install import install
24 from distutils.command.install import install
25 from distutils.command.install_scripts import install_scripts
25 from distutils.command.install_scripts import install_scripts
26 from distutils.cmd import Command
26 from distutils.cmd import Command
27 from fnmatch import fnmatch
27 from fnmatch import fnmatch
28 from glob import glob
28 from glob import glob
29 from subprocess import check_call
29 from subprocess import check_call
30
30
31 from setupext import install_data_ext
31 from setupext import install_data_ext
32
32
33 #-------------------------------------------------------------------------------
33 #-------------------------------------------------------------------------------
34 # Useful globals and utility functions
34 # Useful globals and utility functions
35 #-------------------------------------------------------------------------------
35 #-------------------------------------------------------------------------------
36
36
37 # A few handy globals
37 # A few handy globals
38 isfile = os.path.isfile
38 isfile = os.path.isfile
39 pjoin = os.path.join
39 pjoin = os.path.join
40 repo_root = os.path.dirname(os.path.abspath(__file__))
40 repo_root = os.path.dirname(os.path.abspath(__file__))
41
41
42 def oscmd(s):
42 def oscmd(s):
43 print(">", s)
43 print(">", s)
44 os.system(s)
44 os.system(s)
45
45
46 # Py3 compatibility hacks, without assuming IPython itself is installed with
46 # Py3 compatibility hacks, without assuming IPython itself is installed with
47 # the full py3compat machinery.
47 # the full py3compat machinery.
48
48
49 try:
49 try:
50 execfile
50 execfile
51 except NameError:
51 except NameError:
52 def execfile(fname, globs, locs=None):
52 def execfile(fname, globs, locs=None):
53 locs = locs or globs
53 locs = locs or globs
54 exec(compile(open(fname).read(), fname, "exec"), globs, locs)
54 exec(compile(open(fname).read(), fname, "exec"), globs, locs)
55
55
56 # A little utility we'll need below, since glob() does NOT allow you to do
56 # A little utility we'll need below, since glob() does NOT allow you to do
57 # exclusion on multiple endings!
57 # exclusion on multiple endings!
58 def file_doesnt_endwith(test,endings):
58 def file_doesnt_endwith(test,endings):
59 """Return true if test is a file and its name does NOT end with any
59 """Return true if test is a file and its name does NOT end with any
60 of the strings listed in endings."""
60 of the strings listed in endings."""
61 if not isfile(test):
61 if not isfile(test):
62 return False
62 return False
63 for e in endings:
63 for e in endings:
64 if test.endswith(e):
64 if test.endswith(e):
65 return False
65 return False
66 return True
66 return True
67
67
68 #---------------------------------------------------------------------------
68 #---------------------------------------------------------------------------
69 # Basic project information
69 # Basic project information
70 #---------------------------------------------------------------------------
70 #---------------------------------------------------------------------------
71
71
72 # release.py contains version, authors, license, url, keywords, etc.
72 # release.py contains version, authors, license, url, keywords, etc.
73 execfile(pjoin(repo_root, 'IPython','core','release.py'), globals())
73 execfile(pjoin(repo_root, 'IPython','core','release.py'), globals())
74
74
75 # Create a dict with the basic information
75 # Create a dict with the basic information
76 # This dict is eventually passed to setup after additional keys are added.
76 # This dict is eventually passed to setup after additional keys are added.
77 setup_args = dict(
77 setup_args = dict(
78 name = name,
78 name = name,
79 version = version,
79 version = version,
80 description = description,
80 description = description,
81 long_description = long_description,
81 long_description = long_description,
82 author = author,
82 author = author,
83 author_email = author_email,
83 author_email = author_email,
84 url = url,
84 url = url,
85 download_url = download_url,
85 download_url = download_url,
86 license = license,
86 license = license,
87 platforms = platforms,
87 platforms = platforms,
88 keywords = keywords,
88 keywords = keywords,
89 classifiers = classifiers,
89 classifiers = classifiers,
90 cmdclass = {'install_data': install_data_ext},
90 cmdclass = {'install_data': install_data_ext},
91 )
91 )
92
92
93
93
94 #---------------------------------------------------------------------------
94 #---------------------------------------------------------------------------
95 # Find packages
95 # Find packages
96 #---------------------------------------------------------------------------
96 #---------------------------------------------------------------------------
97
97
98 def find_packages():
98 def find_packages():
99 """
99 """
100 Find all of IPython's packages.
100 Find all of IPython's packages.
101 """
101 """
102 excludes = ['deathrow', 'quarantine']
102 excludes = ['deathrow', 'quarantine']
103 packages = []
103 packages = []
104 for dir,subdirs,files in os.walk('IPython'):
104 for dir,subdirs,files in os.walk('IPython'):
105 package = dir.replace(os.path.sep, '.')
105 package = dir.replace(os.path.sep, '.')
106 if any(package.startswith('IPython.'+exc) for exc in excludes):
106 if any(package.startswith('IPython.'+exc) for exc in excludes):
107 # package is to be excluded (e.g. deathrow)
107 # package is to be excluded (e.g. deathrow)
108 continue
108 continue
109 if '__init__.py' not in files:
109 if '__init__.py' not in files:
110 # not a package
110 # not a package
111 continue
111 continue
112 packages.append(package)
112 packages.append(package)
113 return packages
113 return packages
114
114
115 #---------------------------------------------------------------------------
115 #---------------------------------------------------------------------------
116 # Find package data
116 # Find package data
117 #---------------------------------------------------------------------------
117 #---------------------------------------------------------------------------
118
118
119 def find_package_data():
119 def find_package_data():
120 """
120 """
121 Find IPython's package_data.
121 Find IPython's package_data.
122 """
122 """
123 # This is not enough for these things to appear in an sdist.
123 # This is not enough for these things to appear in an sdist.
124 # We need to muck with the MANIFEST to get this to work
124 # We need to muck with the MANIFEST to get this to work
125
125
126 # exclude components and less from the walk;
126 # exclude components and less from the walk;
127 # we will build the components separately
127 # we will build the components separately
128 excludes = [
128 excludes = [
129 pjoin('static', 'components'),
129 pjoin('static', 'components'),
130 pjoin('static', '*', 'less'),
130 pjoin('static', '*', 'less'),
131 ]
131 ]
132
132
133 # walk notebook resources:
133 # walk notebook resources:
134 cwd = os.getcwd()
134 cwd = os.getcwd()
135 os.chdir(os.path.join('IPython', 'html'))
135 os.chdir(os.path.join('IPython', 'html'))
136 static_data = []
136 static_data = []
137 for parent, dirs, files in os.walk('static'):
137 for parent, dirs, files in os.walk('static'):
138 if any(fnmatch(parent, pat) for pat in excludes):
138 if any(fnmatch(parent, pat) for pat in excludes):
139 # prevent descending into subdirs
139 # prevent descending into subdirs
140 dirs[:] = []
140 dirs[:] = []
141 continue
141 continue
142 for f in files:
142 for f in files:
143 static_data.append(pjoin(parent, f))
143 static_data.append(pjoin(parent, f))
144
144
145 components = pjoin("static", "components")
145 components = pjoin("static", "components")
146 # select the components we actually need to install
146 # select the components we actually need to install
147 # (there are lots of resources we bundle for sdist-reasons that we don't actually use)
147 # (there are lots of resources we bundle for sdist-reasons that we don't actually use)
148 static_data.extend([
148 static_data.extend([
149 pjoin(components, "backbone", "backbone-min.js"),
149 pjoin(components, "backbone", "backbone-min.js"),
150 pjoin(components, "bootstrap", "js", "bootstrap.min.js"),
150 pjoin(components, "bootstrap", "js", "bootstrap.min.js"),
151 pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
151 pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
152 pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
152 pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
153 pjoin(components, "font-awesome", "fonts", "*.*"),
153 pjoin(components, "font-awesome", "fonts", "*.*"),
154 pjoin(components, "google-caja", "html-css-sanitizer-minified.js"),
154 pjoin(components, "google-caja", "html-css-sanitizer-minified.js"),
155 pjoin(components, "highlight.js", "build", "highlight.pack.js"),
155 pjoin(components, "highlight.js", "build", "highlight.pack.js"),
156 pjoin(components, "jquery", "jquery.min.js"),
156 pjoin(components, "jquery", "jquery.min.js"),
157 pjoin(components, "jquery-ui", "ui", "minified", "jquery-ui.min.js"),
157 pjoin(components, "jquery-ui", "ui", "minified", "jquery-ui.min.js"),
158 pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
158 pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
159 pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"),
159 pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"),
160 pjoin(components, "marked", "lib", "marked.js"),
160 pjoin(components, "marked", "lib", "marked.js"),
161 pjoin(components, "requirejs", "require.js"),
161 pjoin(components, "requirejs", "require.js"),
162 pjoin(components, "underscore", "underscore-min.js"),
162 pjoin(components, "underscore", "underscore-min.js"),
163 pjoin(components, "moment", "moment.js"),
163 pjoin(components, "moment", "moment.js"),
164 pjoin(components, "moment", "min","moment.min.js"),
164 pjoin(components, "moment", "min","moment.min.js"),
165 ])
165 ])
166
166
167 # Ship all of Codemirror's CSS and JS
167 # Ship all of Codemirror's CSS and JS
168 for parent, dirs, files in os.walk(pjoin(components, 'codemirror')):
168 for parent, dirs, files in os.walk(pjoin(components, 'codemirror')):
169 for f in files:
169 for f in files:
170 if f.endswith(('.js', '.css')):
170 if f.endswith(('.js', '.css')):
171 static_data.append(pjoin(parent, f))
171 static_data.append(pjoin(parent, f))
172
172
173 os.chdir(os.path.join('tests',))
173 os.chdir(os.path.join('tests',))
174 js_tests = glob('*.js') + glob('*/*.js')
174 js_tests = glob('*.js') + glob('*/*.js')
175
175
176 os.chdir(os.path.join(cwd, 'IPython', 'nbconvert'))
176 os.chdir(os.path.join(cwd, 'IPython', 'nbconvert'))
177 nbconvert_templates = [os.path.join(dirpath, '*.*')
177 nbconvert_templates = [os.path.join(dirpath, '*.*')
178 for dirpath, _, _ in os.walk('templates')]
178 for dirpath, _, _ in os.walk('templates')]
179
179
180 os.chdir(cwd)
180 os.chdir(cwd)
181
181
182 package_data = {
182 package_data = {
183 'IPython.config.profile' : ['README*', '*/*.py'],
183 'IPython.config.profile' : ['README*', '*/*.py'],
184 'IPython.core.tests' : ['*.png', '*.jpg'],
184 'IPython.core.tests' : ['*.png', '*.jpg'],
185 'IPython.lib.tests' : ['*.wav'],
185 'IPython.lib.tests' : ['*.wav'],
186 'IPython.testing.plugin' : ['*.txt'],
186 'IPython.testing.plugin' : ['*.txt'],
187 'IPython.html' : ['templates/*'] + static_data,
187 'IPython.html' : ['templates/*'] + static_data,
188 'IPython.html.tests' : js_tests,
188 'IPython.html.tests' : js_tests,
189 'IPython.qt.console' : ['resources/icon/*.svg'],
189 'IPython.qt.console' : ['resources/icon/*.svg'],
190 'IPython.nbconvert' : nbconvert_templates +
190 'IPython.nbconvert' : nbconvert_templates +
191 [
191 [
192 'tests/files/*.*',
192 'tests/files/*.*',
193 'exporters/tests/files/*.*',
193 'exporters/tests/files/*.*',
194 'preprocessors/tests/files/*.*',
194 'preprocessors/tests/files/*.*',
195 ],
195 ],
196 'IPython.nbconvert.filters' : ['marked.js'],
196 'IPython.nbconvert.filters' : ['marked.js'],
197 'IPython.nbformat' : ['tests/*.ipynb','v3/v3.withref.json']
197 'IPython.nbformat' : [
198 'tests/*.ipynb',
199 'v3/nbformat.v3.schema.json',
200 ]
198 }
201 }
199
202
200 return package_data
203 return package_data
201
204
202
205
203 def check_package_data(package_data):
206 def check_package_data(package_data):
204 """verify that package_data globs make sense"""
207 """verify that package_data globs make sense"""
205 print("checking package data")
208 print("checking package data")
206 for pkg, data in package_data.items():
209 for pkg, data in package_data.items():
207 pkg_root = pjoin(*pkg.split('.'))
210 pkg_root = pjoin(*pkg.split('.'))
208 for d in data:
211 for d in data:
209 path = pjoin(pkg_root, d)
212 path = pjoin(pkg_root, d)
210 if '*' in path:
213 if '*' in path:
211 assert len(glob(path)) > 0, "No files match pattern %s" % path
214 assert len(glob(path)) > 0, "No files match pattern %s" % path
212 else:
215 else:
213 assert os.path.exists(path), "Missing package data: %s" % path
216 assert os.path.exists(path), "Missing package data: %s" % path
214
217
215
218
216 def check_package_data_first(command):
219 def check_package_data_first(command):
217 """decorator for checking package_data before running a given command
220 """decorator for checking package_data before running a given command
218
221
219 Probably only needs to wrap build_py
222 Probably only needs to wrap build_py
220 """
223 """
221 class DecoratedCommand(command):
224 class DecoratedCommand(command):
222 def run(self):
225 def run(self):
223 check_package_data(self.package_data)
226 check_package_data(self.package_data)
224 command.run(self)
227 command.run(self)
225 return DecoratedCommand
228 return DecoratedCommand
226
229
227
230
228 #---------------------------------------------------------------------------
231 #---------------------------------------------------------------------------
229 # Find data files
232 # Find data files
230 #---------------------------------------------------------------------------
233 #---------------------------------------------------------------------------
231
234
232 def make_dir_struct(tag,base,out_base):
235 def make_dir_struct(tag,base,out_base):
233 """Make the directory structure of all files below a starting dir.
236 """Make the directory structure of all files below a starting dir.
234
237
235 This is just a convenience routine to help build a nested directory
238 This is just a convenience routine to help build a nested directory
236 hierarchy because distutils is too stupid to do this by itself.
239 hierarchy because distutils is too stupid to do this by itself.
237
240
238 XXX - this needs a proper docstring!
241 XXX - this needs a proper docstring!
239 """
242 """
240
243
241 # we'll use these a lot below
244 # we'll use these a lot below
242 lbase = len(base)
245 lbase = len(base)
243 pathsep = os.path.sep
246 pathsep = os.path.sep
244 lpathsep = len(pathsep)
247 lpathsep = len(pathsep)
245
248
246 out = []
249 out = []
247 for (dirpath,dirnames,filenames) in os.walk(base):
250 for (dirpath,dirnames,filenames) in os.walk(base):
248 # we need to strip out the dirpath from the base to map it to the
251 # we need to strip out the dirpath from the base to map it to the
249 # output (installation) path. This requires possibly stripping the
252 # output (installation) path. This requires possibly stripping the
250 # path separator, because otherwise pjoin will not work correctly
253 # path separator, because otherwise pjoin will not work correctly
251 # (pjoin('foo/','/bar') returns '/bar').
254 # (pjoin('foo/','/bar') returns '/bar').
252
255
253 dp_eff = dirpath[lbase:]
256 dp_eff = dirpath[lbase:]
254 if dp_eff.startswith(pathsep):
257 if dp_eff.startswith(pathsep):
255 dp_eff = dp_eff[lpathsep:]
258 dp_eff = dp_eff[lpathsep:]
256 # The output path must be anchored at the out_base marker
259 # The output path must be anchored at the out_base marker
257 out_path = pjoin(out_base,dp_eff)
260 out_path = pjoin(out_base,dp_eff)
258 # Now we can generate the final filenames. Since os.walk only produces
261 # Now we can generate the final filenames. Since os.walk only produces
259 # filenames, we must join back with the dirpath to get full valid file
262 # filenames, we must join back with the dirpath to get full valid file
260 # paths:
263 # paths:
261 pfiles = [pjoin(dirpath,f) for f in filenames]
264 pfiles = [pjoin(dirpath,f) for f in filenames]
262 # Finally, generate the entry we need, which is a pari of (output
265 # Finally, generate the entry we need, which is a pari of (output
263 # path, files) for use as a data_files parameter in install_data.
266 # path, files) for use as a data_files parameter in install_data.
264 out.append((out_path, pfiles))
267 out.append((out_path, pfiles))
265
268
266 return out
269 return out
267
270
268
271
269 def find_data_files():
272 def find_data_files():
270 """
273 """
271 Find IPython's data_files.
274 Find IPython's data_files.
272
275
273 Just man pages at this point.
276 Just man pages at this point.
274 """
277 """
275
278
276 manpagebase = pjoin('share', 'man', 'man1')
279 manpagebase = pjoin('share', 'man', 'man1')
277
280
278 # Simple file lists can be made by hand
281 # Simple file lists can be made by hand
279 manpages = [f for f in glob(pjoin('docs','man','*.1.gz')) if isfile(f)]
282 manpages = [f for f in glob(pjoin('docs','man','*.1.gz')) if isfile(f)]
280 if not manpages:
283 if not manpages:
281 # When running from a source tree, the manpages aren't gzipped
284 # When running from a source tree, the manpages aren't gzipped
282 manpages = [f for f in glob(pjoin('docs','man','*.1')) if isfile(f)]
285 manpages = [f for f in glob(pjoin('docs','man','*.1')) if isfile(f)]
283
286
284 # And assemble the entire output list
287 # And assemble the entire output list
285 data_files = [ (manpagebase, manpages) ]
288 data_files = [ (manpagebase, manpages) ]
286
289
287 return data_files
290 return data_files
288
291
289
292
290 def make_man_update_target(manpage):
293 def make_man_update_target(manpage):
291 """Return a target_update-compliant tuple for the given manpage.
294 """Return a target_update-compliant tuple for the given manpage.
292
295
293 Parameters
296 Parameters
294 ----------
297 ----------
295 manpage : string
298 manpage : string
296 Name of the manpage, must include the section number (trailing number).
299 Name of the manpage, must include the section number (trailing number).
297
300
298 Example
301 Example
299 -------
302 -------
300
303
301 >>> make_man_update_target('ipython.1') #doctest: +NORMALIZE_WHITESPACE
304 >>> make_man_update_target('ipython.1') #doctest: +NORMALIZE_WHITESPACE
302 ('docs/man/ipython.1.gz',
305 ('docs/man/ipython.1.gz',
303 ['docs/man/ipython.1'],
306 ['docs/man/ipython.1'],
304 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz')
307 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz')
305 """
308 """
306 man_dir = pjoin('docs', 'man')
309 man_dir = pjoin('docs', 'man')
307 manpage_gz = manpage + '.gz'
310 manpage_gz = manpage + '.gz'
308 manpath = pjoin(man_dir, manpage)
311 manpath = pjoin(man_dir, manpage)
309 manpath_gz = pjoin(man_dir, manpage_gz)
312 manpath_gz = pjoin(man_dir, manpage_gz)
310 gz_cmd = ( "cd %(man_dir)s && gzip -9c %(manpage)s > %(manpage_gz)s" %
313 gz_cmd = ( "cd %(man_dir)s && gzip -9c %(manpage)s > %(manpage_gz)s" %
311 locals() )
314 locals() )
312 return (manpath_gz, [manpath], gz_cmd)
315 return (manpath_gz, [manpath], gz_cmd)
313
316
314 # The two functions below are copied from IPython.utils.path, so we don't need
317 # The two functions below are copied from IPython.utils.path, so we don't need
315 # to import IPython during setup, which fails on Python 3.
318 # to import IPython during setup, which fails on Python 3.
316
319
317 def target_outdated(target,deps):
320 def target_outdated(target,deps):
318 """Determine whether a target is out of date.
321 """Determine whether a target is out of date.
319
322
320 target_outdated(target,deps) -> 1/0
323 target_outdated(target,deps) -> 1/0
321
324
322 deps: list of filenames which MUST exist.
325 deps: list of filenames which MUST exist.
323 target: single filename which may or may not exist.
326 target: single filename which may or may not exist.
324
327
325 If target doesn't exist or is older than any file listed in deps, return
328 If target doesn't exist or is older than any file listed in deps, return
326 true, otherwise return false.
329 true, otherwise return false.
327 """
330 """
328 try:
331 try:
329 target_time = os.path.getmtime(target)
332 target_time = os.path.getmtime(target)
330 except os.error:
333 except os.error:
331 return 1
334 return 1
332 for dep in deps:
335 for dep in deps:
333 dep_time = os.path.getmtime(dep)
336 dep_time = os.path.getmtime(dep)
334 if dep_time > target_time:
337 if dep_time > target_time:
335 #print "For target",target,"Dep failed:",dep # dbg
338 #print "For target",target,"Dep failed:",dep # dbg
336 #print "times (dep,tar):",dep_time,target_time # dbg
339 #print "times (dep,tar):",dep_time,target_time # dbg
337 return 1
340 return 1
338 return 0
341 return 0
339
342
340
343
341 def target_update(target,deps,cmd):
344 def target_update(target,deps,cmd):
342 """Update a target with a given command given a list of dependencies.
345 """Update a target with a given command given a list of dependencies.
343
346
344 target_update(target,deps,cmd) -> runs cmd if target is outdated.
347 target_update(target,deps,cmd) -> runs cmd if target is outdated.
345
348
346 This is just a wrapper around target_outdated() which calls the given
349 This is just a wrapper around target_outdated() which calls the given
347 command if target is outdated."""
350 command if target is outdated."""
348
351
349 if target_outdated(target,deps):
352 if target_outdated(target,deps):
350 os.system(cmd)
353 os.system(cmd)
351
354
352 #---------------------------------------------------------------------------
355 #---------------------------------------------------------------------------
353 # Find scripts
356 # Find scripts
354 #---------------------------------------------------------------------------
357 #---------------------------------------------------------------------------
355
358
356 def find_entry_points():
359 def find_entry_points():
357 """Find IPython's scripts.
360 """Find IPython's scripts.
358
361
359 if entry_points is True:
362 if entry_points is True:
360 return setuptools entry_point-style definitions
363 return setuptools entry_point-style definitions
361 else:
364 else:
362 return file paths of plain scripts [default]
365 return file paths of plain scripts [default]
363
366
364 suffix is appended to script names if entry_points is True, so that the
367 suffix is appended to script names if entry_points is True, so that the
365 Python 3 scripts get named "ipython3" etc.
368 Python 3 scripts get named "ipython3" etc.
366 """
369 """
367 ep = [
370 ep = [
368 'ipython%s = IPython:start_ipython',
371 'ipython%s = IPython:start_ipython',
369 'ipcontroller%s = IPython.parallel.apps.ipcontrollerapp:launch_new_instance',
372 'ipcontroller%s = IPython.parallel.apps.ipcontrollerapp:launch_new_instance',
370 'ipengine%s = IPython.parallel.apps.ipengineapp:launch_new_instance',
373 'ipengine%s = IPython.parallel.apps.ipengineapp:launch_new_instance',
371 'ipcluster%s = IPython.parallel.apps.ipclusterapp:launch_new_instance',
374 'ipcluster%s = IPython.parallel.apps.ipclusterapp:launch_new_instance',
372 'iptest%s = IPython.testing.iptestcontroller:main',
375 'iptest%s = IPython.testing.iptestcontroller:main',
373 ]
376 ]
374 suffix = str(sys.version_info[0])
377 suffix = str(sys.version_info[0])
375 return [e % '' for e in ep] + [e % suffix for e in ep]
378 return [e % '' for e in ep] + [e % suffix for e in ep]
376
379
377 script_src = """#!{executable}
380 script_src = """#!{executable}
378 # This script was automatically generated by setup.py
381 # This script was automatically generated by setup.py
379 if __name__ == '__main__':
382 if __name__ == '__main__':
380 from {mod} import {func}
383 from {mod} import {func}
381 {func}()
384 {func}()
382 """
385 """
383
386
384 class build_scripts_entrypt(build_scripts):
387 class build_scripts_entrypt(build_scripts):
385 def run(self):
388 def run(self):
386 self.mkpath(self.build_dir)
389 self.mkpath(self.build_dir)
387 outfiles = []
390 outfiles = []
388 for script in find_entry_points():
391 for script in find_entry_points():
389 name, entrypt = script.split('=')
392 name, entrypt = script.split('=')
390 name = name.strip()
393 name = name.strip()
391 entrypt = entrypt.strip()
394 entrypt = entrypt.strip()
392 outfile = os.path.join(self.build_dir, name)
395 outfile = os.path.join(self.build_dir, name)
393 outfiles.append(outfile)
396 outfiles.append(outfile)
394 print('Writing script to', outfile)
397 print('Writing script to', outfile)
395
398
396 mod, func = entrypt.split(':')
399 mod, func = entrypt.split(':')
397 with open(outfile, 'w') as f:
400 with open(outfile, 'w') as f:
398 f.write(script_src.format(executable=sys.executable,
401 f.write(script_src.format(executable=sys.executable,
399 mod=mod, func=func))
402 mod=mod, func=func))
400
403
401 return outfiles, outfiles
404 return outfiles, outfiles
402
405
403 class install_lib_symlink(Command):
406 class install_lib_symlink(Command):
404 user_options = [
407 user_options = [
405 ('install-dir=', 'd', "directory to install to"),
408 ('install-dir=', 'd', "directory to install to"),
406 ]
409 ]
407
410
408 def initialize_options(self):
411 def initialize_options(self):
409 self.install_dir = None
412 self.install_dir = None
410
413
411 def finalize_options(self):
414 def finalize_options(self):
412 self.set_undefined_options('symlink',
415 self.set_undefined_options('symlink',
413 ('install_lib', 'install_dir'),
416 ('install_lib', 'install_dir'),
414 )
417 )
415
418
416 def run(self):
419 def run(self):
417 if sys.platform == 'win32':
420 if sys.platform == 'win32':
418 raise Exception("This doesn't work on Windows.")
421 raise Exception("This doesn't work on Windows.")
419 pkg = os.path.join(os.getcwd(), 'IPython')
422 pkg = os.path.join(os.getcwd(), 'IPython')
420 dest = os.path.join(self.install_dir, 'IPython')
423 dest = os.path.join(self.install_dir, 'IPython')
421 if os.path.islink(dest):
424 if os.path.islink(dest):
422 print('removing existing symlink at %s' % dest)
425 print('removing existing symlink at %s' % dest)
423 os.unlink(dest)
426 os.unlink(dest)
424 print('symlinking %s -> %s' % (pkg, dest))
427 print('symlinking %s -> %s' % (pkg, dest))
425 os.symlink(pkg, dest)
428 os.symlink(pkg, dest)
426
429
427 class unsymlink(install):
430 class unsymlink(install):
428 def run(self):
431 def run(self):
429 dest = os.path.join(self.install_lib, 'IPython')
432 dest = os.path.join(self.install_lib, 'IPython')
430 if os.path.islink(dest):
433 if os.path.islink(dest):
431 print('removing symlink at %s' % dest)
434 print('removing symlink at %s' % dest)
432 os.unlink(dest)
435 os.unlink(dest)
433 else:
436 else:
434 print('No symlink exists at %s' % dest)
437 print('No symlink exists at %s' % dest)
435
438
436 class install_symlinked(install):
439 class install_symlinked(install):
437 def run(self):
440 def run(self):
438 if sys.platform == 'win32':
441 if sys.platform == 'win32':
439 raise Exception("This doesn't work on Windows.")
442 raise Exception("This doesn't work on Windows.")
440
443
441 # Run all sub-commands (at least those that need to be run)
444 # Run all sub-commands (at least those that need to be run)
442 for cmd_name in self.get_sub_commands():
445 for cmd_name in self.get_sub_commands():
443 self.run_command(cmd_name)
446 self.run_command(cmd_name)
444
447
445 # 'sub_commands': a list of commands this command might have to run to
448 # 'sub_commands': a list of commands this command might have to run to
446 # get its work done. See cmd.py for more info.
449 # get its work done. See cmd.py for more info.
447 sub_commands = [('install_lib_symlink', lambda self:True),
450 sub_commands = [('install_lib_symlink', lambda self:True),
448 ('install_scripts_sym', lambda self:True),
451 ('install_scripts_sym', lambda self:True),
449 ]
452 ]
450
453
451 class install_scripts_for_symlink(install_scripts):
454 class install_scripts_for_symlink(install_scripts):
452 """Redefined to get options from 'symlink' instead of 'install'.
455 """Redefined to get options from 'symlink' instead of 'install'.
453
456
454 I love distutils almost as much as I love setuptools.
457 I love distutils almost as much as I love setuptools.
455 """
458 """
456 def finalize_options(self):
459 def finalize_options(self):
457 self.set_undefined_options('build', ('build_scripts', 'build_dir'))
460 self.set_undefined_options('build', ('build_scripts', 'build_dir'))
458 self.set_undefined_options('symlink',
461 self.set_undefined_options('symlink',
459 ('install_scripts', 'install_dir'),
462 ('install_scripts', 'install_dir'),
460 ('force', 'force'),
463 ('force', 'force'),
461 ('skip_build', 'skip_build'),
464 ('skip_build', 'skip_build'),
462 )
465 )
463
466
464 #---------------------------------------------------------------------------
467 #---------------------------------------------------------------------------
465 # Verify all dependencies
468 # Verify all dependencies
466 #---------------------------------------------------------------------------
469 #---------------------------------------------------------------------------
467
470
468 def check_for_dependencies():
471 def check_for_dependencies():
469 """Check for IPython's dependencies.
472 """Check for IPython's dependencies.
470
473
471 This function should NOT be called if running under setuptools!
474 This function should NOT be called if running under setuptools!
472 """
475 """
473 from setupext.setupext import (
476 from setupext.setupext import (
474 print_line, print_raw, print_status,
477 print_line, print_raw, print_status,
475 check_for_sphinx, check_for_pygments,
478 check_for_sphinx, check_for_pygments,
476 check_for_nose, check_for_pexpect,
479 check_for_nose, check_for_pexpect,
477 check_for_pyzmq, check_for_readline,
480 check_for_pyzmq, check_for_readline,
478 check_for_jinja2, check_for_tornado
481 check_for_jinja2, check_for_tornado
479 )
482 )
480 print_line()
483 print_line()
481 print_raw("BUILDING IPYTHON")
484 print_raw("BUILDING IPYTHON")
482 print_status('python', sys.version)
485 print_status('python', sys.version)
483 print_status('platform', sys.platform)
486 print_status('platform', sys.platform)
484 if sys.platform == 'win32':
487 if sys.platform == 'win32':
485 print_status('Windows version', sys.getwindowsversion())
488 print_status('Windows version', sys.getwindowsversion())
486
489
487 print_raw("")
490 print_raw("")
488 print_raw("OPTIONAL DEPENDENCIES")
491 print_raw("OPTIONAL DEPENDENCIES")
489
492
490 check_for_sphinx()
493 check_for_sphinx()
491 check_for_pygments()
494 check_for_pygments()
492 check_for_nose()
495 check_for_nose()
493 if os.name == 'posix':
496 if os.name == 'posix':
494 check_for_pexpect()
497 check_for_pexpect()
495 check_for_pyzmq()
498 check_for_pyzmq()
496 check_for_tornado()
499 check_for_tornado()
497 check_for_readline()
500 check_for_readline()
498 check_for_jinja2()
501 check_for_jinja2()
499
502
500 #---------------------------------------------------------------------------
503 #---------------------------------------------------------------------------
501 # VCS related
504 # VCS related
502 #---------------------------------------------------------------------------
505 #---------------------------------------------------------------------------
503
506
504 # utils.submodule has checks for submodule status
507 # utils.submodule has checks for submodule status
505 execfile(pjoin('IPython','utils','submodule.py'), globals())
508 execfile(pjoin('IPython','utils','submodule.py'), globals())
506
509
507 class UpdateSubmodules(Command):
510 class UpdateSubmodules(Command):
508 """Update git submodules
511 """Update git submodules
509
512
510 IPython's external javascript dependencies live in a separate repo.
513 IPython's external javascript dependencies live in a separate repo.
511 """
514 """
512 description = "Update git submodules"
515 description = "Update git submodules"
513 user_options = []
516 user_options = []
514
517
515 def initialize_options(self):
518 def initialize_options(self):
516 pass
519 pass
517
520
518 def finalize_options(self):
521 def finalize_options(self):
519 pass
522 pass
520
523
521 def run(self):
524 def run(self):
522 failure = False
525 failure = False
523 try:
526 try:
524 self.spawn('git submodule init'.split())
527 self.spawn('git submodule init'.split())
525 self.spawn('git submodule update --recursive'.split())
528 self.spawn('git submodule update --recursive'.split())
526 except Exception as e:
529 except Exception as e:
527 failure = e
530 failure = e
528 print(e)
531 print(e)
529
532
530 if not check_submodule_status(repo_root) == 'clean':
533 if not check_submodule_status(repo_root) == 'clean':
531 print("submodules could not be checked out")
534 print("submodules could not be checked out")
532 sys.exit(1)
535 sys.exit(1)
533
536
534
537
535 def git_prebuild(pkg_dir, build_cmd=build_py):
538 def git_prebuild(pkg_dir, build_cmd=build_py):
536 """Return extended build or sdist command class for recording commit
539 """Return extended build or sdist command class for recording commit
537
540
538 records git commit in IPython.utils._sysinfo.commit
541 records git commit in IPython.utils._sysinfo.commit
539
542
540 for use in IPython.utils.sysinfo.sys_info() calls after installation.
543 for use in IPython.utils.sysinfo.sys_info() calls after installation.
541
544
542 Also ensures that submodules exist prior to running
545 Also ensures that submodules exist prior to running
543 """
546 """
544
547
545 class MyBuildPy(build_cmd):
548 class MyBuildPy(build_cmd):
546 ''' Subclass to write commit data into installation tree '''
549 ''' Subclass to write commit data into installation tree '''
547 def run(self):
550 def run(self):
548 build_cmd.run(self)
551 build_cmd.run(self)
549 # this one will only fire for build commands
552 # this one will only fire for build commands
550 if hasattr(self, 'build_lib'):
553 if hasattr(self, 'build_lib'):
551 self._record_commit(self.build_lib)
554 self._record_commit(self.build_lib)
552
555
553 def make_release_tree(self, base_dir, files):
556 def make_release_tree(self, base_dir, files):
554 # this one will fire for sdist
557 # this one will fire for sdist
555 build_cmd.make_release_tree(self, base_dir, files)
558 build_cmd.make_release_tree(self, base_dir, files)
556 self._record_commit(base_dir)
559 self._record_commit(base_dir)
557
560
558 def _record_commit(self, base_dir):
561 def _record_commit(self, base_dir):
559 import subprocess
562 import subprocess
560 proc = subprocess.Popen('git rev-parse --short HEAD',
563 proc = subprocess.Popen('git rev-parse --short HEAD',
561 stdout=subprocess.PIPE,
564 stdout=subprocess.PIPE,
562 stderr=subprocess.PIPE,
565 stderr=subprocess.PIPE,
563 shell=True)
566 shell=True)
564 repo_commit, _ = proc.communicate()
567 repo_commit, _ = proc.communicate()
565 repo_commit = repo_commit.strip().decode("ascii")
568 repo_commit = repo_commit.strip().decode("ascii")
566
569
567 out_pth = pjoin(base_dir, pkg_dir, 'utils', '_sysinfo.py')
570 out_pth = pjoin(base_dir, pkg_dir, 'utils', '_sysinfo.py')
568 if os.path.isfile(out_pth) and not repo_commit:
571 if os.path.isfile(out_pth) and not repo_commit:
569 # nothing to write, don't clobber
572 # nothing to write, don't clobber
570 return
573 return
571
574
572 print("writing git commit '%s' to %s" % (repo_commit, out_pth))
575 print("writing git commit '%s' to %s" % (repo_commit, out_pth))
573
576
574 # remove to avoid overwriting original via hard link
577 # remove to avoid overwriting original via hard link
575 try:
578 try:
576 os.remove(out_pth)
579 os.remove(out_pth)
577 except (IOError, OSError):
580 except (IOError, OSError):
578 pass
581 pass
579 with open(out_pth, 'w') as out_file:
582 with open(out_pth, 'w') as out_file:
580 out_file.writelines([
583 out_file.writelines([
581 '# GENERATED BY setup.py\n',
584 '# GENERATED BY setup.py\n',
582 'commit = u"%s"\n' % repo_commit,
585 'commit = u"%s"\n' % repo_commit,
583 ])
586 ])
584 return require_submodules(MyBuildPy)
587 return require_submodules(MyBuildPy)
585
588
586
589
587 def require_submodules(command):
590 def require_submodules(command):
588 """decorator for instructing a command to check for submodules before running"""
591 """decorator for instructing a command to check for submodules before running"""
589 class DecoratedCommand(command):
592 class DecoratedCommand(command):
590 def run(self):
593 def run(self):
591 if not check_submodule_status(repo_root) == 'clean':
594 if not check_submodule_status(repo_root) == 'clean':
592 print("submodules missing! Run `setup.py submodule` and try again")
595 print("submodules missing! Run `setup.py submodule` and try again")
593 sys.exit(1)
596 sys.exit(1)
594 command.run(self)
597 command.run(self)
595 return DecoratedCommand
598 return DecoratedCommand
596
599
597 #---------------------------------------------------------------------------
600 #---------------------------------------------------------------------------
598 # bdist related
601 # bdist related
599 #---------------------------------------------------------------------------
602 #---------------------------------------------------------------------------
600
603
601 def get_bdist_wheel():
604 def get_bdist_wheel():
602 """Construct bdist_wheel command for building wheels
605 """Construct bdist_wheel command for building wheels
603
606
604 Constructs py2-none-any tag, instead of py2.7-none-any
607 Constructs py2-none-any tag, instead of py2.7-none-any
605 """
608 """
606 class RequiresWheel(Command):
609 class RequiresWheel(Command):
607 description = "Dummy command for missing bdist_wheel"
610 description = "Dummy command for missing bdist_wheel"
608 user_options = []
611 user_options = []
609
612
610 def initialize_options(self):
613 def initialize_options(self):
611 pass
614 pass
612
615
613 def finalize_options(self):
616 def finalize_options(self):
614 pass
617 pass
615
618
616 def run(self):
619 def run(self):
617 print("bdist_wheel requires the wheel package")
620 print("bdist_wheel requires the wheel package")
618 sys.exit(1)
621 sys.exit(1)
619
622
620 if 'setuptools' not in sys.modules:
623 if 'setuptools' not in sys.modules:
621 return RequiresWheel
624 return RequiresWheel
622 else:
625 else:
623 try:
626 try:
624 from wheel.bdist_wheel import bdist_wheel, read_pkg_info, write_pkg_info
627 from wheel.bdist_wheel import bdist_wheel, read_pkg_info, write_pkg_info
625 except ImportError:
628 except ImportError:
626 return RequiresWheel
629 return RequiresWheel
627
630
628 class bdist_wheel_tag(bdist_wheel):
631 class bdist_wheel_tag(bdist_wheel):
629
632
630 def add_requirements(self, metadata_path):
633 def add_requirements(self, metadata_path):
631 """transform platform-dependent requirements"""
634 """transform platform-dependent requirements"""
632 pkg_info = read_pkg_info(metadata_path)
635 pkg_info = read_pkg_info(metadata_path)
633 # pkg_info is an email.Message object (?!)
636 # pkg_info is an email.Message object (?!)
634 # we have to remove the unconditional 'readline' and/or 'pyreadline' entries
637 # we have to remove the unconditional 'readline' and/or 'pyreadline' entries
635 # and transform them to conditionals
638 # and transform them to conditionals
636 requires = pkg_info.get_all('Requires-Dist')
639 requires = pkg_info.get_all('Requires-Dist')
637 del pkg_info['Requires-Dist']
640 del pkg_info['Requires-Dist']
638 def _remove_startswith(lis, prefix):
641 def _remove_startswith(lis, prefix):
639 """like list.remove, but with startswith instead of =="""
642 """like list.remove, but with startswith instead of =="""
640 found = False
643 found = False
641 for idx, item in enumerate(lis):
644 for idx, item in enumerate(lis):
642 if item.startswith(prefix):
645 if item.startswith(prefix):
643 found = True
646 found = True
644 break
647 break
645 if found:
648 if found:
646 lis.pop(idx)
649 lis.pop(idx)
647
650
648 for pkg in ("gnureadline", "pyreadline", "mock"):
651 for pkg in ("gnureadline", "pyreadline", "mock"):
649 _remove_startswith(requires, pkg)
652 _remove_startswith(requires, pkg)
650 requires.append("gnureadline; sys.platform == 'darwin' and platform.python_implementation == 'CPython'")
653 requires.append("gnureadline; sys.platform == 'darwin' and platform.python_implementation == 'CPython'")
651 requires.append("pyreadline (>=2.0); extra == 'terminal' and sys.platform == 'win32' and platform.python_implementation == 'CPython'")
654 requires.append("pyreadline (>=2.0); extra == 'terminal' and sys.platform == 'win32' and platform.python_implementation == 'CPython'")
652 requires.append("pyreadline (>=2.0); extra == 'all' and sys.platform == 'win32' and platform.python_implementation == 'CPython'")
655 requires.append("pyreadline (>=2.0); extra == 'all' and sys.platform == 'win32' and platform.python_implementation == 'CPython'")
653 requires.append("mock; extra == 'test' and python_version < '3.3'")
656 requires.append("mock; extra == 'test' and python_version < '3.3'")
654 for r in requires:
657 for r in requires:
655 pkg_info['Requires-Dist'] = r
658 pkg_info['Requires-Dist'] = r
656 write_pkg_info(metadata_path, pkg_info)
659 write_pkg_info(metadata_path, pkg_info)
657
660
658 return bdist_wheel_tag
661 return bdist_wheel_tag
659
662
660 #---------------------------------------------------------------------------
663 #---------------------------------------------------------------------------
661 # Notebook related
664 # Notebook related
662 #---------------------------------------------------------------------------
665 #---------------------------------------------------------------------------
663
666
664 class CompileCSS(Command):
667 class CompileCSS(Command):
665 """Recompile Notebook CSS
668 """Recompile Notebook CSS
666
669
667 Regenerate the compiled CSS from LESS sources.
670 Regenerate the compiled CSS from LESS sources.
668
671
669 Requires various dev dependencies, such as fabric and lessc.
672 Requires various dev dependencies, such as fabric and lessc.
670 """
673 """
671 description = "Recompile Notebook CSS"
674 description = "Recompile Notebook CSS"
672 user_options = [
675 user_options = [
673 ('minify', 'x', "minify CSS"),
676 ('minify', 'x', "minify CSS"),
674 ('force', 'f', "force recompilation of CSS"),
677 ('force', 'f', "force recompilation of CSS"),
675 ]
678 ]
676
679
677 def initialize_options(self):
680 def initialize_options(self):
678 self.minify = False
681 self.minify = False
679 self.force = False
682 self.force = False
680
683
681 def finalize_options(self):
684 def finalize_options(self):
682 self.minify = bool(self.minify)
685 self.minify = bool(self.minify)
683 self.force = bool(self.force)
686 self.force = bool(self.force)
684
687
685 def run(self):
688 def run(self):
686 check_call([
689 check_call([
687 "fab",
690 "fab",
688 "css:minify=%s,force=%s" % (self.minify, self.force),
691 "css:minify=%s,force=%s" % (self.minify, self.force),
689 ], cwd=pjoin(repo_root, "IPython", "html"),
692 ], cwd=pjoin(repo_root, "IPython", "html"),
690 )
693 )
691
694
692
695
693 class JavascriptVersion(Command):
696 class JavascriptVersion(Command):
694 """write the javascript version to notebook javascript"""
697 """write the javascript version to notebook javascript"""
695 description = "Write IPython version to javascript"
698 description = "Write IPython version to javascript"
696 user_options = []
699 user_options = []
697
700
698 def initialize_options(self):
701 def initialize_options(self):
699 pass
702 pass
700
703
701 def finalize_options(self):
704 def finalize_options(self):
702 pass
705 pass
703
706
704 def run(self):
707 def run(self):
705 nsfile = pjoin(repo_root, "IPython", "html", "static", "base", "js", "namespace.js")
708 nsfile = pjoin(repo_root, "IPython", "html", "static", "base", "js", "namespace.js")
706 with open(nsfile) as f:
709 with open(nsfile) as f:
707 lines = f.readlines()
710 lines = f.readlines()
708 with open(nsfile, 'w') as f:
711 with open(nsfile, 'w') as f:
709 for line in lines:
712 for line in lines:
710 if line.startswith("IPython.version"):
713 if line.startswith("IPython.version"):
711 line = 'IPython.version = "{0}";\n'.format(version)
714 line = 'IPython.version = "{0}";\n'.format(version)
712 f.write(line)
715 f.write(line)
713
716
714
717
715 def css_js_prerelease(command, strict=True):
718 def css_js_prerelease(command, strict=True):
716 """decorator for building js/minified css prior to a release"""
719 """decorator for building js/minified css prior to a release"""
717 class DecoratedCommand(command):
720 class DecoratedCommand(command):
718 def run(self):
721 def run(self):
719 self.distribution.run_command('jsversion')
722 self.distribution.run_command('jsversion')
720 css = self.distribution.get_command_obj('css')
723 css = self.distribution.get_command_obj('css')
721 css.minify = True
724 css.minify = True
722 try:
725 try:
723 self.distribution.run_command('css')
726 self.distribution.run_command('css')
724 except Exception as e:
727 except Exception as e:
725 if strict:
728 if strict:
726 raise
729 raise
727 else:
730 else:
728 log.warn("Failed to build css sourcemaps: %s" % e)
731 log.warn("Failed to build css sourcemaps: %s" % e)
729 command.run(self)
732 command.run(self)
730 return DecoratedCommand
733 return DecoratedCommand
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now