##// END OF EJS Templates
Use Draft4 JSON Schema for both v3 and v4...
MinRK -
Show More
@@ -0,0 +1,363 b''
1 {
2 "$schema": "http://json-schema.org/draft-04/schema#",
3 "description": "IPython Notebook v3.0 JSON schema.",
4 "type": "object",
5 "additionalProperties": false,
6 "required": ["metadata", "nbformat_minor", "nbformat", "worksheets"],
7 "properties": {
8 "metadata": {
9 "description": "Notebook root-level metadata.",
10 "type": "object",
11 "additionalProperties": true,
12 "properties": {
13 "kernel_info": {
14 "description": "Kernel information.",
15 "type": "object",
16 "required": ["name", "language"],
17 "properties": {
18 "name": {
19 "description": "Name of the kernel specification.",
20 "type": "string"
21 },
22 "language": {
23 "description": "The programming language which this kernel runs.",
24 "type": "string"
25 },
26 "codemirror_mode": {
27 "description": "The codemirror mode to use for code in this language.",
28 "type": "string"
29 }
30 }
31 },
32 "signature": {
33 "description": "Hash of the notebook.",
34 "type": "string"
35 }
36 }
37 },
38 "nbformat_minor": {
39 "description": "Notebook format (minor number). Incremented for backward compatible changes to the notebook format.",
40 "type": "integer",
41 "minimum": 0
42 },
43 "nbformat": {
44 "description": "Notebook format (major number). Incremented between backwards incompatible changes to the notebook format.",
45 "type": "integer",
46 "minimum": 3,
47 "maximum": 3
48 },
49 "orig_nbformat": {
50 "description": "Original notebook format (major number) before converting the notebook between versions.",
51 "type": "integer",
52 "minimum": 1
53 },
54 "worksheets" : {
55 "description": "Array of worksheets",
56 "type": "array",
57 "items": {"$ref": "#/definitions/worksheet"}
58 }
59 },
60
61 "definitions": {
62 "worksheet": {
63 "additionalProperties": false,
64 "required" : ["cells"],
65 "properties":{
66 "cells": {
67 "description": "Array of cells of the current notebook.",
68 "type": "array",
69 "items": {
70 "type": "object",
71 "oneOf": [
72 {"$ref": "#/definitions/raw_cell"},
73 {"$ref": "#/definitions/markdown_cell"},
74 {"$ref": "#/definitions/heading_cell"},
75 {"$ref": "#/definitions/code_cell"}
76 ]
77 }
78 },
79 "metadata": {
80 "type": "object",
81 "description": "metadata of the current worksheet"
82 }
83 }
84 },
85 "raw_cell": {
86 "description": "Notebook raw nbconvert cell.",
87 "type": "object",
88 "additionalProperties": false,
89 "required": ["cell_type", "source"],
90 "properties": {
91 "cell_type": {
92 "description": "String identifying the type of cell.",
93 "enum": ["raw"]
94 },
95 "metadata": {
96 "description": "Cell-level metadata.",
97 "type": "object",
98 "additionalProperties": true,
99 "properties": {
100 "format": {
101 "description": "Raw cell metadata format for nbconvert.",
102 "type": "string"
103 },
104 "name": {"$ref": "#/definitions/misc/metadata_name"},
105 "tags": {"$ref": "#/definitions/misc/metadata_tags"}
106 }
107 },
108 "source": {"$ref": "#/definitions/misc/source"}
109 }
110 },
111
112 "markdown_cell": {
113 "description": "Notebook markdown cell.",
114 "type": "object",
115 "additionalProperties": false,
116 "required": ["cell_type", "source"],
117 "properties": {
118 "cell_type": {
119 "description": "String identifying the type of cell.",
120 "enum": ["markdown"]
121 },
122 "metadata": {
123 "description": "Cell-level metadata.",
124 "type": "object",
125 "properties": {
126 "name": {"$ref": "#/definitions/misc/metadata_name"},
127 "tags": {"$ref": "#/definitions/misc/metadata_tags"}
128 },
129 "additionalProperties": true
130 },
131 "source": {"$ref": "#/definitions/misc/source"}
132 }
133 },
134
135 "heading_cell": {
136 "description": "Notebook heading cell.",
137 "type": "object",
138 "additionalProperties": false,
139 "required": ["cell_type", "source", "level"],
140 "properties": {
141 "cell_type": {
142 "description": "String identifying the type of cell.",
143 "enum": ["heading"]
144 },
145 "metadata": {
146 "description": "Cell-level metadata.",
147 "type": "object",
148 "additionalProperties": true
149 },
150 "source": {"$ref": "#/definitions/misc/source"},
151 "level": {
152 "description": "Level of heading cells.",
153 "type": "integer",
154 "minimum": 1
155 }
156 }
157 },
158
159 "code_cell": {
160 "description": "Notebook code cell.",
161 "type": "object",
162 "additionalProperties": false,
163 "required": ["cell_type", "input", "outputs", "collapsed", "language"],
164 "properties": {
165 "cell_type": {
166 "description": "String identifying the type of cell.",
167 "enum": ["code"]
168 },
169 "language": {
170 "description": "The cell's language (always Python)",
171 "type": "string"
172 },
173 "collapsed": {
174 "description": "Whether the cell is collapsed/expanded.",
175 "type": "boolean"
176 },
177 "metadata": {
178 "description": "Cell-level metadata.",
179 "type": "object",
180 "additionalProperties": true
181 },
182 "input": {"$ref": "#/definitions/misc/source"},
183 "outputs": {
184 "description": "Execution, display, or stream outputs.",
185 "type": "array",
186 "items": {"$ref": "#/definitions/output"}
187 },
188 "prompt_number": {
189 "description": "The code cell's prompt number. Will be null if the cell has not been run.",
190 "type": ["integer", "null"],
191 "minimum": 0
192 }
193 }
194 },
195 "output": {
196 "type": "object",
197 "oneOf": [
198 {"$ref": "#/definitions/pyout"},
199 {"$ref": "#/definitions/display_data"},
200 {"$ref": "#/definitions/stream"},
201 {"$ref": "#/definitions/pyerr"}
202 ]
203 },
204 "pyout": {
205 "description": "Result of executing a code cell.",
206 "type": "object",
207 "additionalProperties": false,
208 "required": ["output_type", "prompt_number"],
209 "properties": {
210 "output_type": {
211 "description": "Type of cell output.",
212 "enum": ["pyout"]
213 },
214 "prompt_number": {
215 "description": "A result's prompt number.",
216 "type": ["integer"],
217 "minimum": 0
218 },
219 "text": {"$ref": "#/definitions/misc/multiline_string"},
220 "latex": {"$ref": "#/definitions/misc/multiline_string"},
221 "png": {"$ref": "#/definitions/misc/multiline_string"},
222 "jpeg": {"$ref": "#/definitions/misc/multiline_string"},
223 "svg": {"$ref": "#/definitions/misc/multiline_string"},
224 "html": {"$ref": "#/definitions/misc/multiline_string"},
225 "javascript": {"$ref": "#/definitions/misc/multiline_string"},
226 "json": {"$ref": "#/definitions/misc/multiline_string"},
227 "pdf": {"$ref": "#/definitions/misc/multiline_string"},
228 "metadata": {"$ref": "#/definitions/misc/output_metadata"}
229 },
230 "patternProperties": {
231 "^[a-zA-Z0-9]+/[a-zA-Z0-9\\-\\+\\.]+$": {
232 "description": "mimetype output (e.g. text/plain), represented as either an array of strings or a string.",
233 "$ref": "#/definitions/misc/multiline_string"
234 }
235 }
236 },
237
238 "display_data": {
239 "description": "Data displayed as a result of code cell execution.",
240 "type": "object",
241 "additionalProperties": false,
242 "required": ["output_type"],
243 "properties": {
244 "output_type": {
245 "description": "Type of cell output.",
246 "enum": ["display_data"]
247 },
248 "text": {"$ref": "#/definitions/misc/multiline_string"},
249 "latex": {"$ref": "#/definitions/misc/multiline_string"},
250 "png": {"$ref": "#/definitions/misc/multiline_string"},
251 "jpeg": {"$ref": "#/definitions/misc/multiline_string"},
252 "svg": {"$ref": "#/definitions/misc/multiline_string"},
253 "html": {"$ref": "#/definitions/misc/multiline_string"},
254 "javascript": {"$ref": "#/definitions/misc/multiline_string"},
255 "json": {"$ref": "#/definitions/misc/multiline_string"},
256 "pdf": {"$ref": "#/definitions/misc/multiline_string"},
257 "metadata": {"$ref": "#/definitions/misc/output_metadata"}
258 },
259 "patternProperties": {
260 "[a-zA-Z0-9]+/[a-zA-Z0-9\\-\\+\\.]+$": {
261 "description": "mimetype output (e.g. text/plain), represented as either an array of strings or a string.",
262 "$ref": "#/definitions/misc/multiline_string"
263 }
264 }
265 },
266
267 "stream": {
268 "description": "Stream output from a code cell.",
269 "type": "object",
270 "additionalProperties": false,
271 "required": ["output_type", "stream", "text"],
272 "properties": {
273 "output_type": {
274 "description": "Type of cell output.",
275 "enum": ["stream"]
276 },
277 "stream": {
278 "description": "The stream type/destination.",
279 "type": "string"
280 },
281 "text": {
282 "description": "The stream's text output, represented as an array of strings.",
283 "$ref": "#/definitions/misc/multiline_string"
284 }
285 }
286 },
287
288 "pyerr": {
289 "description": "Output of an error that occurred during code cell execution.",
290 "type": "object",
291 "additionalProperties": false,
292 "required": ["output_type", "ename", "evalue", "traceback"],
293 "properties": {
294 "output_type": {
295 "description": "Type of cell output.",
296 "enum": ["pyerr"]
297 },
298 "metadata": {"$ref": "#/definitions/misc/output_metadata"},
299 "ename": {
300 "description": "The name of the error.",
301 "type": "string"
302 },
303 "evalue": {
304 "description": "The value, or message, of the error.",
305 "type": "string"
306 },
307 "traceback": {
308 "description": "The error's traceback, represented as an array of strings.",
309 "type": "array",
310 "items": {"type": "string"}
311 }
312 }
313 },
314
315 "misc": {
316 "metadata_name": {
317 "description": "The cell's name. If present, must be a non-empty string.",
318 "type": "string",
319 "pattern": "^.+$"
320 },
321 "metadata_tags": {
322 "description": "The cell's tags. Tags must be unique, and must not contain commas.",
323 "type": "array",
324 "uniqueItems": true,
325 "items": {
326 "type": "string",
327 "pattern": "^[^,]+$"
328 }
329 },
330 "source": {
331 "description": "Contents of the cell, represented as an array of lines.",
332 "$ref": "#/definitions/misc/multiline_string"
333 },
334 "prompt_number": {
335 "description": "The code cell's prompt number. Will be null if the cell has not been run.",
336 "type": ["integer", "null"],
337 "minimum": 0
338 },
339 "mimetype": {
340 "patternProperties": {
341 "^[a-zA-Z0-9\\-\\+]+/[a-zA-Z0-9\\-\\+]+": {
342 "description": "The cell's mimetype output (e.g. text/plain), represented as either an array of strings or a string.",
343 "$ref": "#/definitions/misc/multiline_string"
344 }
345 }
346 },
347 "output_metadata": {
348 "description": "Cell output metadata.",
349 "type": "object",
350 "additionalProperties": true
351 },
352 "multiline_string": {
353 "oneOf" : [
354 {"type": "string"},
355 {
356 "type": "array",
357 "items": {"type": "string"}
358 }
359 ]
360 }
361 }
362 }
363 }
@@ -1,212 +1,213 b''
1 1 """The official API for working with notebooks in the current format version."""
2 2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
3 6 from __future__ import print_function
4 7
5 8 import re
6 9
7 10 from IPython.utils.py3compat import unicode_type
8 11
9 12 from IPython.nbformat.v3 import (
10 13 NotebookNode,
11 14 new_code_cell, new_text_cell, new_notebook, new_output, new_worksheet,
12 15 parse_filename, new_metadata, new_author, new_heading_cell, nbformat,
13 16 nbformat_minor, nbformat_schema, to_notebook_json
14 17 )
15 18 from IPython.nbformat import v3 as _v_latest
16 19
17 20 from .reader import reads as reader_reads
18 21 from .reader import versions
19 22 from .convert import convert
20 from .validator import validate
23 from .validator import validate, ValidationError
21 24
22 25 from IPython.utils.log import get_logger
23 26
24 27 __all__ = ['NotebookNode', 'new_code_cell', 'new_text_cell', 'new_notebook',
25 28 'new_output', 'new_worksheet', 'parse_filename', 'new_metadata', 'new_author',
26 29 'new_heading_cell', 'nbformat', 'nbformat_minor', 'nbformat_schema',
27 30 'to_notebook_json', 'convert', 'validate', 'NBFormatError', 'parse_py',
28 31 'reads_json', 'writes_json', 'reads_py', 'writes_py', 'reads', 'writes', 'read',
29 32 'write']
30 33
31 34 current_nbformat = nbformat
32 35 current_nbformat_minor = nbformat_minor
33 36 current_nbformat_module = _v_latest.__name__
34 37
35 38
36 39 class NBFormatError(ValueError):
37 40 pass
38 41
39 42
40 43 def parse_py(s, **kwargs):
41 44 """Parse a string into a (nbformat, string) tuple."""
42 45 nbf = current_nbformat
43 46 nbm = current_nbformat_minor
44 47
45 48 pattern = r'# <nbformat>(?P<nbformat>\d+[\.\d+]*)</nbformat>'
46 49 m = re.search(pattern,s)
47 50 if m is not None:
48 51 digits = m.group('nbformat').split('.')
49 52 nbf = int(digits[0])
50 53 if len(digits) > 1:
51 54 nbm = int(digits[1])
52 55
53 56 return nbf, nbm, s
54 57
55 58
56 59 def reads_json(nbjson, **kwargs):
57 60 """Read a JSON notebook from a string and return the NotebookNode
58 61 object. Report if any JSON format errors are detected.
59 62
60 63 """
61 64 nb = reader_reads(nbjson, **kwargs)
62 65 nb_current = convert(nb, current_nbformat)
63 errors = validate(nb_current)
64 if errors:
65 get_logger().error(
66 "Notebook JSON is invalid (%d errors detected during read)",
67 len(errors))
66 try:
67 validate(nb_current)
68 except ValidationError as e:
69 get_logger().error("Notebook JSON is invalid: %s", e)
68 70 return nb_current
69 71
70 72
71 73 def writes_json(nb, **kwargs):
72 74 """Take a NotebookNode object and write out a JSON string. Report if
73 75 any JSON format errors are detected.
74 76
75 77 """
76 errors = validate(nb)
77 if errors:
78 get_logger().error(
79 "Notebook JSON is invalid (%d errors detected during write)",
80 len(errors))
78 try:
79 validate(nb)
80 except ValidationError as e:
81 get_logger().error("Notebook JSON is invalid: %s", e)
81 82 nbjson = versions[current_nbformat].writes_json(nb, **kwargs)
82 83 return nbjson
83 84
84 85
85 86 def reads_py(s, **kwargs):
86 87 """Read a .py notebook from a string and return the NotebookNode object."""
87 88 nbf, nbm, s = parse_py(s, **kwargs)
88 89 if nbf in (2, 3):
89 90 nb = versions[nbf].to_notebook_py(s, **kwargs)
90 91 else:
91 92 raise NBFormatError('Unsupported PY nbformat version: %i' % nbf)
92 93 return nb
93 94
94 95
95 96 def writes_py(nb, **kwargs):
96 97 # nbformat 3 is the latest format that supports py
97 98 return versions[3].writes_py(nb, **kwargs)
98 99
99 100
100 101 # High level API
101 102
102 103
103 104 def reads(s, format, **kwargs):
104 105 """Read a notebook from a string and return the NotebookNode object.
105 106
106 107 This function properly handles notebooks of any version. The notebook
107 108 returned will always be in the current version's format.
108 109
109 110 Parameters
110 111 ----------
111 112 s : unicode
112 113 The raw unicode string to read the notebook from.
113 114 format : (u'json', u'ipynb', u'py')
114 115 The format that the string is in.
115 116
116 117 Returns
117 118 -------
118 119 nb : NotebookNode
119 120 The notebook that was read.
120 121 """
121 122 format = unicode_type(format)
122 123 if format == u'json' or format == u'ipynb':
123 124 return reads_json(s, **kwargs)
124 125 elif format == u'py':
125 126 return reads_py(s, **kwargs)
126 127 else:
127 128 raise NBFormatError('Unsupported format: %s' % format)
128 129
129 130
130 131 def writes(nb, format, **kwargs):
131 132 """Write a notebook to a string in a given format in the current nbformat version.
132 133
133 134 This function always writes the notebook in the current nbformat version.
134 135
135 136 Parameters
136 137 ----------
137 138 nb : NotebookNode
138 139 The notebook to write.
139 140 format : (u'json', u'ipynb', u'py')
140 141 The format to write the notebook in.
141 142
142 143 Returns
143 144 -------
144 145 s : unicode
145 146 The notebook string.
146 147 """
147 148 format = unicode_type(format)
148 149 if format == u'json' or format == u'ipynb':
149 150 return writes_json(nb, **kwargs)
150 151 elif format == u'py':
151 152 return writes_py(nb, **kwargs)
152 153 else:
153 154 raise NBFormatError('Unsupported format: %s' % format)
154 155
155 156
156 157 def read(fp, format, **kwargs):
157 158 """Read a notebook from a file and return the NotebookNode object.
158 159
159 160 This function properly handles notebooks of any version. The notebook
160 161 returned will always be in the current version's format.
161 162
162 163 Parameters
163 164 ----------
164 165 fp : file
165 166 Any file-like object with a read method.
166 167 format : (u'json', u'ipynb', u'py')
167 168 The format that the string is in.
168 169
169 170 Returns
170 171 -------
171 172 nb : NotebookNode
172 173 The notebook that was read.
173 174 """
174 175 return reads(fp.read(), format, **kwargs)
175 176
176 177
177 178 def write(nb, fp, format, **kwargs):
178 179 """Write a notebook to a file in a given format in the current nbformat version.
179 180
180 181 This function always writes the notebook in the current nbformat version.
181 182
182 183 Parameters
183 184 ----------
184 185 nb : NotebookNode
185 186 The notebook to write.
186 187 fp : file
187 188 Any file-like object with a write method.
188 189 format : (u'json', u'ipynb', u'py')
189 190 The format to write the notebook in.
190 191
191 192 Returns
192 193 -------
193 194 s : unicode
194 195 The notebook string.
195 196 """
196 197 return fp.write(writes(nb, format, **kwargs))
197 198
198 199 def _convert_to_metadata():
199 200 """Convert to a notebook having notebook metadata."""
200 201 import glob
201 202 for fname in glob.glob('*.ipynb'):
202 203 print('Converting file:',fname)
203 204 with open(fname,'r') as f:
204 205 nb = read(f,u'json')
205 206 md = new_metadata()
206 207 if u'name' in nb:
207 208 md.name = nb.name
208 209 del nb[u'name']
209 210 nb.metadata = md
210 211 with open(fname,'w') as f:
211 212 write(nb, f, u'json')
212 213
@@ -1,73 +1,45 b''
1 """
2 Contains tests class for validator.py
3 """
4 #-----------------------------------------------------------------------------
5 # Copyright (C) 2014 The IPython Development Team
6 #
7 # Distributed under the terms of the BSD License. The full license is in
8 # the file COPYING, distributed as part of this software.
9 #-----------------------------------------------------------------------------
1 """Test nbformat.validator"""
10 2
11 #-----------------------------------------------------------------------------
12 # Imports
13 #-----------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
14 5
15 6 import os
16 7
17 8 from .base import TestsBase
18 from jsonschema import SchemaError
9 from jsonschema import ValidationError
19 10 from ..current import read
20 from ..validator import schema_path, isvalid, validate, resolve_ref
11 from ..validator import isvalid, validate
21 12
22 13
23 14 #-----------------------------------------------------------------------------
24 15 # Classes and functions
25 16 #-----------------------------------------------------------------------------
26 17
27 18 class TestValidator(TestsBase):
28 19
29 def test_schema_path(self):
30 """Test that the schema path exists"""
31 self.assertEqual(os.path.exists(schema_path), True)
32
33 20 def test_nb2(self):
34 21 """Test that a v2 notebook converted to v3 passes validation"""
35 22 with self.fopen(u'test2.ipynb', u'r') as f:
36 23 nb = read(f, u'json')
37 self.assertEqual(validate(nb), [])
24 validate(nb)
38 25 self.assertEqual(isvalid(nb), True)
39 26
40 27 def test_nb3(self):
41 28 """Test that a v3 notebook passes validation"""
42 29 with self.fopen(u'test3.ipynb', u'r') as f:
43 30 nb = read(f, u'json')
44 self.assertEqual(validate(nb), [])
31 validate(nb)
45 32 self.assertEqual(isvalid(nb), True)
46 33
47 34 def test_invalid(self):
48 35 """Test than an invalid notebook does not pass validation"""
49 36 # this notebook has a few different errors:
50 37 # - the name is an integer, rather than a string
51 38 # - one cell is missing its source
52 39 # - one cell has an invalid level
53 40 with self.fopen(u'invalid.ipynb', u'r') as f:
54 41 nb = read(f, u'json')
55 self.assertEqual(len(validate(nb)), 3)
42 with self.assertRaises(ValidationError):
43 validate(nb)
56 44 self.assertEqual(isvalid(nb), False)
57 45
58 def test_resolve_ref(self):
59 """Test that references are correctly resolved"""
60 # make sure it resolves the ref correctly
61 json = {"abc": "def", "ghi": {"$ref": "/abc"}}
62 resolved = resolve_ref(json)
63 self.assertEqual(resolved, {"abc": "def", "ghi": "def"})
64
65 # make sure it throws an error if the ref is not by itself
66 json = {"abc": "def", "ghi": {"$ref": "/abc", "foo": "bar"}}
67 with self.assertRaises(SchemaError):
68 resolved = resolve_ref(json)
69
70 # make sure it can handle json with no reference
71 json = {"abc": "def"}
72 resolved = resolve_ref(json)
73 self.assertEqual(resolved, json)
@@ -1,205 +1,205 b''
1 1 """The basic dict based notebook format.
2 2
3 3 The Python representation of a notebook is a nested structure of
4 4 dictionary subclasses that support attribute access
5 5 (IPython.utils.ipstruct.Struct). The functions in this module are merely
6 6 helpers to build the structs in the right form.
7 7 """
8 8
9 9 # Copyright (c) IPython Development Team.
10 10 # Distributed under the terms of the Modified BSD License.
11 11
12 12 import pprint
13 13 import uuid
14 14
15 15 from IPython.utils.ipstruct import Struct
16 16 from IPython.utils.py3compat import cast_unicode, unicode_type
17 17
18 18 #-----------------------------------------------------------------------------
19 19 # Code
20 20 #-----------------------------------------------------------------------------
21 21
22 22 # Change this when incrementing the nbformat version
23 23 nbformat = 3
24 24 nbformat_minor = 0
25 nbformat_schema = 'v3.withref.json'
25 nbformat_schema = 'nbformat.v3.schema.json'
26 26
27 27 class NotebookNode(Struct):
28 28 pass
29 29
30 30
31 31 def from_dict(d):
32 32 if isinstance(d, dict):
33 33 newd = NotebookNode()
34 34 for k,v in d.items():
35 35 newd[k] = from_dict(v)
36 36 return newd
37 37 elif isinstance(d, (tuple, list)):
38 38 return [from_dict(i) for i in d]
39 39 else:
40 40 return d
41 41
42 42
43 43 def new_output(output_type, output_text=None, output_png=None,
44 44 output_html=None, output_svg=None, output_latex=None, output_json=None,
45 45 output_javascript=None, output_jpeg=None, prompt_number=None,
46 46 ename=None, evalue=None, traceback=None, stream=None, metadata=None):
47 47 """Create a new output, to go in the ``cell.outputs`` list of a code cell.
48 48 """
49 49 output = NotebookNode()
50 50 output.output_type = unicode_type(output_type)
51 51
52 52 if metadata is None:
53 53 metadata = {}
54 54 if not isinstance(metadata, dict):
55 55 raise TypeError("metadata must be dict")
56 56 output.metadata = metadata
57 57
58 58 if output_type != 'pyerr':
59 59 if output_text is not None:
60 60 output.text = cast_unicode(output_text)
61 61 if output_png is not None:
62 62 output.png = cast_unicode(output_png)
63 63 if output_jpeg is not None:
64 64 output.jpeg = cast_unicode(output_jpeg)
65 65 if output_html is not None:
66 66 output.html = cast_unicode(output_html)
67 67 if output_svg is not None:
68 68 output.svg = cast_unicode(output_svg)
69 69 if output_latex is not None:
70 70 output.latex = cast_unicode(output_latex)
71 71 if output_json is not None:
72 72 output.json = cast_unicode(output_json)
73 73 if output_javascript is not None:
74 74 output.javascript = cast_unicode(output_javascript)
75 75
76 76 if output_type == u'pyout':
77 77 if prompt_number is not None:
78 78 output.prompt_number = int(prompt_number)
79 79
80 80 if output_type == u'pyerr':
81 81 if ename is not None:
82 82 output.ename = cast_unicode(ename)
83 83 if evalue is not None:
84 84 output.evalue = cast_unicode(evalue)
85 85 if traceback is not None:
86 86 output.traceback = [cast_unicode(frame) for frame in list(traceback)]
87 87
88 88 if output_type == u'stream':
89 89 output.stream = 'stdout' if stream is None else cast_unicode(stream)
90 90
91 91 return output
92 92
93 93
94 94 def new_code_cell(input=None, prompt_number=None, outputs=None,
95 95 language=u'python', collapsed=False, metadata=None):
96 96 """Create a new code cell with input and output"""
97 97 cell = NotebookNode()
98 98 cell.cell_type = u'code'
99 99 if language is not None:
100 100 cell.language = cast_unicode(language)
101 101 if input is not None:
102 102 cell.input = cast_unicode(input)
103 103 if prompt_number is not None:
104 104 cell.prompt_number = int(prompt_number)
105 105 if outputs is None:
106 106 cell.outputs = []
107 107 else:
108 108 cell.outputs = outputs
109 109 if collapsed is not None:
110 110 cell.collapsed = bool(collapsed)
111 111 cell.metadata = NotebookNode(metadata or {})
112 112
113 113 return cell
114 114
115 115 def new_text_cell(cell_type, source=None, rendered=None, metadata=None):
116 116 """Create a new text cell."""
117 117 cell = NotebookNode()
118 118 # VERSIONHACK: plaintext -> raw
119 119 # handle never-released plaintext name for raw cells
120 120 if cell_type == 'plaintext':
121 121 cell_type = 'raw'
122 122 if source is not None:
123 123 cell.source = cast_unicode(source)
124 124 if rendered is not None:
125 125 cell.rendered = cast_unicode(rendered)
126 126 cell.metadata = NotebookNode(metadata or {})
127 127 cell.cell_type = cell_type
128 128 return cell
129 129
130 130
131 131 def new_heading_cell(source=None, rendered=None, level=1, metadata=None):
132 132 """Create a new section cell with a given integer level."""
133 133 cell = NotebookNode()
134 134 cell.cell_type = u'heading'
135 135 if source is not None:
136 136 cell.source = cast_unicode(source)
137 137 if rendered is not None:
138 138 cell.rendered = cast_unicode(rendered)
139 139 cell.level = int(level)
140 140 cell.metadata = NotebookNode(metadata or {})
141 141 return cell
142 142
143 143
144 144 def new_worksheet(name=None, cells=None, metadata=None):
145 145 """Create a worksheet by name with with a list of cells."""
146 146 ws = NotebookNode()
147 147 if name is not None:
148 148 ws.name = cast_unicode(name)
149 149 if cells is None:
150 150 ws.cells = []
151 151 else:
152 152 ws.cells = list(cells)
153 153 ws.metadata = NotebookNode(metadata or {})
154 154 return ws
155 155
156 156
157 157 def new_notebook(name=None, metadata=None, worksheets=None):
158 158 """Create a notebook by name, id and a list of worksheets."""
159 159 nb = NotebookNode()
160 160 nb.nbformat = nbformat
161 161 nb.nbformat_minor = nbformat_minor
162 162 if worksheets is None:
163 163 nb.worksheets = []
164 164 else:
165 165 nb.worksheets = list(worksheets)
166 166 if metadata is None:
167 167 nb.metadata = new_metadata()
168 168 else:
169 169 nb.metadata = NotebookNode(metadata)
170 170 if name is not None:
171 171 nb.metadata.name = cast_unicode(name)
172 172 return nb
173 173
174 174
175 175 def new_metadata(name=None, authors=None, license=None, created=None,
176 176 modified=None, gistid=None):
177 177 """Create a new metadata node."""
178 178 metadata = NotebookNode()
179 179 if name is not None:
180 180 metadata.name = cast_unicode(name)
181 181 if authors is not None:
182 182 metadata.authors = list(authors)
183 183 if created is not None:
184 184 metadata.created = cast_unicode(created)
185 185 if modified is not None:
186 186 metadata.modified = cast_unicode(modified)
187 187 if license is not None:
188 188 metadata.license = cast_unicode(license)
189 189 if gistid is not None:
190 190 metadata.gistid = cast_unicode(gistid)
191 191 return metadata
192 192
193 193 def new_author(name=None, email=None, affiliation=None, url=None):
194 194 """Create a new author."""
195 195 author = NotebookNode()
196 196 if name is not None:
197 197 author.name = cast_unicode(name)
198 198 if email is not None:
199 199 author.email = cast_unicode(email)
200 200 if affiliation is not None:
201 201 author.affiliation = cast_unicode(affiliation)
202 202 if url is not None:
203 203 author.url = cast_unicode(url)
204 204 return author
205 205
@@ -1,112 +1,72 b''
1 # Copyright (c) IPython Development Team.
2 # Distributed under the terms of the Modified BSD License.
3
1 4 from __future__ import print_function
2 5 import json
3 6 import os
4 7
5 8 try:
6 from jsonschema import SchemaError
7 from jsonschema import Draft3Validator as Validator
9 from jsonschema import ValidationError
10 from jsonschema import Draft4Validator as Validator
8 11 except ImportError as e:
9 12 verbose_msg = """
10 13
11 IPython depends on the jsonschema package: https://pypi.python.org/pypi/jsonschema
14 IPython notebook format depends on the jsonschema package:
12 15
13 Please install it first.
14 """
15 raise ImportError(str(e) + verbose_msg)
16
17 try:
18 import jsonpointer as jsonpointer
19 except ImportError as e:
20 verbose_msg = """
21
22 IPython depends on the jsonpointer package: https://pypi.python.org/pypi/jsonpointer
16 https://pypi.python.org/pypi/jsonschema
23 17
24 18 Please install it first.
25 19 """
26 20 raise ImportError(str(e) + verbose_msg)
27 21
28 from IPython.utils.py3compat import iteritems
22 from IPython.utils.importstring import import_item
23
29 24
25 validators = {}
30 26
31 from .current import nbformat, nbformat_schema
32 schema_path = os.path.join(
33 os.path.dirname(__file__), "v%d" % nbformat, nbformat_schema)
27 def get_validator(version=None):
28 """Load the JSON schema into a Validator"""
29 if version is None:
30 from .current import nbformat as version
34 31
32 if version not in validators:
33 v = import_item("IPython.nbformat.v%s" % version)
34 schema_path = os.path.join(os.path.dirname(v.__file__), v.nbformat_schema)
35 with open(schema_path) as f:
36 schema_json = json.load(f)
37 validators[version] = Validator(schema_json)
38 return validators[version]
35 39
36 def isvalid(nbjson):
40 def isvalid(nbjson, ref=None, version=None):
37 41 """Checks whether the given notebook JSON conforms to the current
38 42 notebook format schema. Returns True if the JSON is valid, and
39 43 False otherwise.
40 44
41 45 To see the individual errors that were encountered, please use the
42 46 `validate` function instead.
43
44 47 """
45
46 errors = validate(nbjson)
47 return errors == []
48 try:
49 validate(nbjson, ref, version)
50 except ValidationError:
51 return False
52 else:
53 return True
48 54
49 55
50 def validate(nbjson):
56 def validate(nbjson, ref=None, version=None):
51 57 """Checks whether the given notebook JSON conforms to the current
52 notebook format schema, and returns the list of errors.
53
54 """
55
56 # load the schema file
57 with open(schema_path, 'r') as fh:
58 schema_json = json.load(fh)
59
60 # resolve internal references
61 schema = resolve_ref(schema_json)
62 schema = jsonpointer.resolve_pointer(schema, '/notebook')
63
64 # count how many errors there are
65 v = Validator(schema)
66 errors = list(v.iter_errors(nbjson))
67 return errors
68
69
70 def resolve_ref(json, schema=None):
71 """Resolve internal references within the given JSON. This essentially
72 means that dictionaries of this form:
73
74 {"$ref": "/somepointer"}
75
76 will be replaced with the resolved reference to `/somepointer`.
77 This only supports local reference to the same JSON file.
58 notebook format schema.
78 59
60 Raises ValidationError if not valid.
79 61 """
62 if version is None:
63 from .current import nbformat
64 version = nbjson.get('nbformat', nbformat)
80 65
81 if not schema:
82 schema = json
83
84 # if it's a list, resolve references for each item in the list
85 if type(json) is list:
86 resolved = []
87 for item in json:
88 resolved.append(resolve_ref(item, schema=schema))
89
90 # if it's a dictionary, resolve references for each item in the
91 # dictionary
92 elif type(json) is dict:
93 resolved = {}
94 for key, ref in iteritems(json):
95
96 # if the key is equal to $ref, then replace the entire
97 # dictionary with the resolved value
98 if key == '$ref':
99 if len(json) != 1:
100 raise SchemaError(
101 "objects containing a $ref should only have one item")
102 pointer = jsonpointer.resolve_pointer(schema, ref)
103 resolved = resolve_ref(pointer, schema=schema)
104
105 else:
106 resolved[key] = resolve_ref(ref, schema=schema)
66 validator = get_validator(version)
107 67
108 # otherwise it's a normal object, so just return it
68 if ref:
69 return validator.validate(nbjson, {'$ref' : '#/definitions/%s' % ref})
109 70 else:
110 resolved = json
71 return validator.validate(nbjson)
111 72
112 return resolved
@@ -1,680 +1,680 b''
1 1 # -*- coding: utf-8 -*-
2 2 """IPython Test Process Controller
3 3
4 4 This module runs one or more subprocesses which will actually run the IPython
5 5 test suite.
6 6
7 7 """
8 8
9 9 # Copyright (c) IPython Development Team.
10 10 # Distributed under the terms of the Modified BSD License.
11 11
12 12 from __future__ import print_function
13 13
14 14 import argparse
15 15 import json
16 16 import multiprocessing.pool
17 17 import os
18 18 import shutil
19 19 import signal
20 20 import sys
21 21 import subprocess
22 22 import time
23 23 import re
24 24
25 25 from .iptest import have, test_group_names as py_test_group_names, test_sections, StreamCapturer
26 26 from IPython.utils.path import compress_user
27 27 from IPython.utils.py3compat import bytes_to_str
28 28 from IPython.utils.sysinfo import get_sys_info
29 29 from IPython.utils.tempdir import TemporaryDirectory
30 30 from IPython.utils.text import strip_ansi
31 31
32 32 try:
33 33 # Python >= 3.3
34 34 from subprocess import TimeoutExpired
35 35 def popen_wait(p, timeout):
36 36 return p.wait(timeout)
37 37 except ImportError:
38 38 class TimeoutExpired(Exception):
39 39 pass
40 40 def popen_wait(p, timeout):
41 41 """backport of Popen.wait from Python 3"""
42 42 for i in range(int(10 * timeout)):
43 43 if p.poll() is not None:
44 44 return
45 45 time.sleep(0.1)
46 46 if p.poll() is None:
47 47 raise TimeoutExpired
48 48
49 49 NOTEBOOK_SHUTDOWN_TIMEOUT = 10
50 50
51 51 class TestController(object):
52 52 """Run tests in a subprocess
53 53 """
54 54 #: str, IPython test suite to be executed.
55 55 section = None
56 56 #: list, command line arguments to be executed
57 57 cmd = None
58 58 #: dict, extra environment variables to set for the subprocess
59 59 env = None
60 60 #: list, TemporaryDirectory instances to clear up when the process finishes
61 61 dirs = None
62 62 #: subprocess.Popen instance
63 63 process = None
64 64 #: str, process stdout+stderr
65 65 stdout = None
66 66
67 67 def __init__(self):
68 68 self.cmd = []
69 69 self.env = {}
70 70 self.dirs = []
71 71
72 72 def setup(self):
73 73 """Create temporary directories etc.
74 74
75 75 This is only called when we know the test group will be run. Things
76 76 created here may be cleaned up by self.cleanup().
77 77 """
78 78 pass
79 79
80 80 def launch(self, buffer_output=False):
81 81 # print('*** ENV:', self.env) # dbg
82 82 # print('*** CMD:', self.cmd) # dbg
83 83 env = os.environ.copy()
84 84 env.update(self.env)
85 85 output = subprocess.PIPE if buffer_output else None
86 86 stdout = subprocess.STDOUT if buffer_output else None
87 87 self.process = subprocess.Popen(self.cmd, stdout=output,
88 88 stderr=stdout, env=env)
89 89
90 90 def wait(self):
91 91 self.stdout, _ = self.process.communicate()
92 92 return self.process.returncode
93 93
94 94 def print_extra_info(self):
95 95 """Print extra information about this test run.
96 96
97 97 If we're running in parallel and showing the concise view, this is only
98 98 called if the test group fails. Otherwise, it's called before the test
99 99 group is started.
100 100
101 101 The base implementation does nothing, but it can be overridden by
102 102 subclasses.
103 103 """
104 104 return
105 105
106 106 def cleanup_process(self):
107 107 """Cleanup on exit by killing any leftover processes."""
108 108 subp = self.process
109 109 if subp is None or (subp.poll() is not None):
110 110 return # Process doesn't exist, or is already dead.
111 111
112 112 try:
113 113 print('Cleaning up stale PID: %d' % subp.pid)
114 114 subp.kill()
115 115 except: # (OSError, WindowsError) ?
116 116 # This is just a best effort, if we fail or the process was
117 117 # really gone, ignore it.
118 118 pass
119 119 else:
120 120 for i in range(10):
121 121 if subp.poll() is None:
122 122 time.sleep(0.1)
123 123 else:
124 124 break
125 125
126 126 if subp.poll() is None:
127 127 # The process did not die...
128 128 print('... failed. Manual cleanup may be required.')
129 129
130 130 def cleanup(self):
131 131 "Kill process if it's still alive, and clean up temporary directories"
132 132 self.cleanup_process()
133 133 for td in self.dirs:
134 134 td.cleanup()
135 135
136 136 __del__ = cleanup
137 137
138 138
139 139 class PyTestController(TestController):
140 140 """Run Python tests using IPython.testing.iptest"""
141 141 #: str, Python command to execute in subprocess
142 142 pycmd = None
143 143
144 144 def __init__(self, section, options):
145 145 """Create new test runner."""
146 146 TestController.__init__(self)
147 147 self.section = section
148 148 # pycmd is put into cmd[2] in PyTestController.launch()
149 149 self.cmd = [sys.executable, '-c', None, section]
150 150 self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()"
151 151 self.options = options
152 152
153 153 def setup(self):
154 154 ipydir = TemporaryDirectory()
155 155 self.dirs.append(ipydir)
156 156 self.env['IPYTHONDIR'] = ipydir.name
157 157 self.workingdir = workingdir = TemporaryDirectory()
158 158 self.dirs.append(workingdir)
159 159 self.env['IPTEST_WORKING_DIR'] = workingdir.name
160 160 # This means we won't get odd effects from our own matplotlib config
161 161 self.env['MPLCONFIGDIR'] = workingdir.name
162 162
163 163 # From options:
164 164 if self.options.xunit:
165 165 self.add_xunit()
166 166 if self.options.coverage:
167 167 self.add_coverage()
168 168 self.env['IPTEST_SUBPROC_STREAMS'] = self.options.subproc_streams
169 169 self.cmd.extend(self.options.extra_args)
170 170
171 171 @property
172 172 def will_run(self):
173 173 try:
174 174 return test_sections[self.section].will_run
175 175 except KeyError:
176 176 return True
177 177
178 178 def add_xunit(self):
179 179 xunit_file = os.path.abspath(self.section + '.xunit.xml')
180 180 self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file])
181 181
182 182 def add_coverage(self):
183 183 try:
184 184 sources = test_sections[self.section].includes
185 185 except KeyError:
186 186 sources = ['IPython']
187 187
188 188 coverage_rc = ("[run]\n"
189 189 "data_file = {data_file}\n"
190 190 "source =\n"
191 191 " {source}\n"
192 192 ).format(data_file=os.path.abspath('.coverage.'+self.section),
193 193 source="\n ".join(sources))
194 194 config_file = os.path.join(self.workingdir.name, '.coveragerc')
195 195 with open(config_file, 'w') as f:
196 196 f.write(coverage_rc)
197 197
198 198 self.env['COVERAGE_PROCESS_START'] = config_file
199 199 self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd
200 200
201 201 def launch(self, buffer_output=False):
202 202 self.cmd[2] = self.pycmd
203 203 super(PyTestController, self).launch(buffer_output=buffer_output)
204 204
205 205
206 206 js_prefix = 'js/'
207 207
208 208 def get_js_test_dir():
209 209 import IPython.html.tests as t
210 210 return os.path.join(os.path.dirname(t.__file__), '')
211 211
212 212 def all_js_groups():
213 213 import glob
214 214 test_dir = get_js_test_dir()
215 215 all_subdirs = glob.glob(test_dir + '[!_]*/')
216 216 return [js_prefix+os.path.relpath(x, test_dir) for x in all_subdirs]
217 217
218 218 class JSController(TestController):
219 219 """Run CasperJS tests """
220 220 requirements = ['zmq', 'tornado', 'jinja2', 'casperjs', 'sqlite3',
221 'jsonschema', 'jsonpointer']
221 'jsonschema']
222 222 display_slimer_output = False
223 223
224 224 def __init__(self, section, xunit=True, engine='phantomjs'):
225 225 """Create new test runner."""
226 226 TestController.__init__(self)
227 227 self.engine = engine
228 228 self.section = section
229 229 self.xunit = xunit
230 230 self.slimer_failure = re.compile('^FAIL.*', flags=re.MULTILINE)
231 231 js_test_dir = get_js_test_dir()
232 232 includes = '--includes=' + os.path.join(js_test_dir,'util.js')
233 233 test_cases = os.path.join(js_test_dir, self.section[len(js_prefix):])
234 234 self.cmd = ['casperjs', 'test', includes, test_cases, '--engine=%s' % self.engine]
235 235
236 236 def setup(self):
237 237 self.ipydir = TemporaryDirectory()
238 238 self.nbdir = TemporaryDirectory()
239 239 self.dirs.append(self.ipydir)
240 240 self.dirs.append(self.nbdir)
241 241 os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir1', u'sub ∂ir 1a')))
242 242 os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir2', u'sub ∂ir 1b')))
243 243
244 244 if self.xunit:
245 245 self.add_xunit()
246 246
247 247 # start the ipython notebook, so we get the port number
248 248 self.server_port = 0
249 249 self._init_server()
250 250 if self.server_port:
251 251 self.cmd.append("--port=%i" % self.server_port)
252 252 else:
253 253 # don't launch tests if the server didn't start
254 254 self.cmd = [sys.executable, '-c', 'raise SystemExit(1)']
255 255
256 256 def add_xunit(self):
257 257 xunit_file = os.path.abspath(self.section.replace('/','.') + '.xunit.xml')
258 258 self.cmd.append('--xunit=%s' % xunit_file)
259 259
260 260 def launch(self, buffer_output):
261 261 # If the engine is SlimerJS, we need to buffer the output because
262 262 # SlimerJS does not support exit codes, so CasperJS always returns 0.
263 263 if self.engine == 'slimerjs' and not buffer_output:
264 264 self.display_slimer_output = True
265 265 return super(JSController, self).launch(buffer_output=True)
266 266
267 267 else:
268 268 return super(JSController, self).launch(buffer_output=buffer_output)
269 269
270 270 def wait(self, *pargs, **kwargs):
271 271 """Wait for the JSController to finish"""
272 272 ret = super(JSController, self).wait(*pargs, **kwargs)
273 273 # If this is a SlimerJS controller, check the captured stdout for
274 274 # errors. Otherwise, just return the return code.
275 275 if self.engine == 'slimerjs':
276 276 stdout = bytes_to_str(self.stdout)
277 277 if self.display_slimer_output:
278 278 print(stdout)
279 279 if ret != 0:
280 280 # This could still happen e.g. if it's stopped by SIGINT
281 281 return ret
282 282 return bool(self.slimer_failure.search(strip_ansi(stdout)))
283 283 else:
284 284 return ret
285 285
286 286 def print_extra_info(self):
287 287 print("Running tests with notebook directory %r" % self.nbdir.name)
288 288
289 289 @property
290 290 def will_run(self):
291 291 return all(have[a] for a in self.requirements + [self.engine])
292 292
293 293 def _init_server(self):
294 294 "Start the notebook server in a separate process"
295 295 self.server_command = command = [sys.executable,
296 296 '-m', 'IPython.html',
297 297 '--no-browser',
298 298 '--ipython-dir', self.ipydir.name,
299 299 '--notebook-dir', self.nbdir.name,
300 300 ]
301 301 # ipc doesn't work on Windows, and darwin has crazy-long temp paths,
302 302 # which run afoul of ipc's maximum path length.
303 303 if sys.platform.startswith('linux'):
304 304 command.append('--KernelManager.transport=ipc')
305 305 self.stream_capturer = c = StreamCapturer()
306 306 c.start()
307 307 self.server = subprocess.Popen(command, stdout=c.writefd, stderr=subprocess.STDOUT, cwd=self.nbdir.name)
308 308 self.server_info_file = os.path.join(self.ipydir.name,
309 309 'profile_default', 'security', 'nbserver-%i.json' % self.server.pid
310 310 )
311 311 self._wait_for_server()
312 312
313 313 def _wait_for_server(self):
314 314 """Wait 30 seconds for the notebook server to start"""
315 315 for i in range(300):
316 316 if self.server.poll() is not None:
317 317 return self._failed_to_start()
318 318 if os.path.exists(self.server_info_file):
319 319 try:
320 320 self._load_server_info()
321 321 except ValueError:
322 322 # If the server is halfway through writing the file, we may
323 323 # get invalid JSON; it should be ready next iteration.
324 324 pass
325 325 else:
326 326 return
327 327 time.sleep(0.1)
328 328 print("Notebook server-info file never arrived: %s" % self.server_info_file,
329 329 file=sys.stderr
330 330 )
331 331
332 332 def _failed_to_start(self):
333 333 """Notebook server exited prematurely"""
334 334 captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace')
335 335 print("Notebook failed to start: ", file=sys.stderr)
336 336 print(self.server_command)
337 337 print(captured, file=sys.stderr)
338 338
339 339 def _load_server_info(self):
340 340 """Notebook server started, load connection info from JSON"""
341 341 with open(self.server_info_file) as f:
342 342 info = json.load(f)
343 343 self.server_port = info['port']
344 344
345 345 def cleanup(self):
346 346 try:
347 347 self.server.terminate()
348 348 except OSError:
349 349 # already dead
350 350 pass
351 351 # wait 10s for the server to shutdown
352 352 try:
353 353 popen_wait(self.server, NOTEBOOK_SHUTDOWN_TIMEOUT)
354 354 except TimeoutExpired:
355 355 # server didn't terminate, kill it
356 356 try:
357 357 print("Failed to terminate notebook server, killing it.",
358 358 file=sys.stderr
359 359 )
360 360 self.server.kill()
361 361 except OSError:
362 362 # already dead
363 363 pass
364 364 # wait another 10s
365 365 try:
366 366 popen_wait(self.server, NOTEBOOK_SHUTDOWN_TIMEOUT)
367 367 except TimeoutExpired:
368 368 print("Notebook server still running (%s)" % self.server_info_file,
369 369 file=sys.stderr
370 370 )
371 371
372 372 self.stream_capturer.halt()
373 373 TestController.cleanup(self)
374 374
375 375
376 376 def prepare_controllers(options):
377 377 """Returns two lists of TestController instances, those to run, and those
378 378 not to run."""
379 379 testgroups = options.testgroups
380 380 if testgroups:
381 381 if 'js' in testgroups:
382 382 js_testgroups = all_js_groups()
383 383 else:
384 384 js_testgroups = [g for g in testgroups if g.startswith(js_prefix)]
385 385
386 386 py_testgroups = [g for g in testgroups if g not in ['js'] + js_testgroups]
387 387 else:
388 388 py_testgroups = py_test_group_names
389 389 if not options.all:
390 390 js_testgroups = []
391 391 test_sections['parallel'].enabled = False
392 392 else:
393 393 js_testgroups = all_js_groups()
394 394
395 395 engine = 'slimerjs' if options.slimerjs else 'phantomjs'
396 396 c_js = [JSController(name, xunit=options.xunit, engine=engine) for name in js_testgroups]
397 397 c_py = [PyTestController(name, options) for name in py_testgroups]
398 398
399 399 controllers = c_py + c_js
400 400 to_run = [c for c in controllers if c.will_run]
401 401 not_run = [c for c in controllers if not c.will_run]
402 402 return to_run, not_run
403 403
404 404 def do_run(controller, buffer_output=True):
405 405 """Setup and run a test controller.
406 406
407 407 If buffer_output is True, no output is displayed, to avoid it appearing
408 408 interleaved. In this case, the caller is responsible for displaying test
409 409 output on failure.
410 410
411 411 Returns
412 412 -------
413 413 controller : TestController
414 414 The same controller as passed in, as a convenience for using map() type
415 415 APIs.
416 416 exitcode : int
417 417 The exit code of the test subprocess. Non-zero indicates failure.
418 418 """
419 419 try:
420 420 try:
421 421 controller.setup()
422 422 if not buffer_output:
423 423 controller.print_extra_info()
424 424 controller.launch(buffer_output=buffer_output)
425 425 except Exception:
426 426 import traceback
427 427 traceback.print_exc()
428 428 return controller, 1 # signal failure
429 429
430 430 exitcode = controller.wait()
431 431 return controller, exitcode
432 432
433 433 except KeyboardInterrupt:
434 434 return controller, -signal.SIGINT
435 435 finally:
436 436 controller.cleanup()
437 437
438 438 def report():
439 439 """Return a string with a summary report of test-related variables."""
440 440 inf = get_sys_info()
441 441 out = []
442 442 def _add(name, value):
443 443 out.append((name, value))
444 444
445 445 _add('IPython version', inf['ipython_version'])
446 446 _add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source']))
447 447 _add('IPython package', compress_user(inf['ipython_path']))
448 448 _add('Python version', inf['sys_version'].replace('\n',''))
449 449 _add('sys.executable', compress_user(inf['sys_executable']))
450 450 _add('Platform', inf['platform'])
451 451
452 452 width = max(len(n) for (n,v) in out)
453 453 out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out]
454 454
455 455 avail = []
456 456 not_avail = []
457 457
458 458 for k, is_avail in have.items():
459 459 if is_avail:
460 460 avail.append(k)
461 461 else:
462 462 not_avail.append(k)
463 463
464 464 if avail:
465 465 out.append('\nTools and libraries available at test time:\n')
466 466 avail.sort()
467 467 out.append(' ' + ' '.join(avail)+'\n')
468 468
469 469 if not_avail:
470 470 out.append('\nTools and libraries NOT available at test time:\n')
471 471 not_avail.sort()
472 472 out.append(' ' + ' '.join(not_avail)+'\n')
473 473
474 474 return ''.join(out)
475 475
476 476 def run_iptestall(options):
477 477 """Run the entire IPython test suite by calling nose and trial.
478 478
479 479 This function constructs :class:`IPTester` instances for all IPython
480 480 modules and package and then runs each of them. This causes the modules
481 481 and packages of IPython to be tested each in their own subprocess using
482 482 nose.
483 483
484 484 Parameters
485 485 ----------
486 486
487 487 All parameters are passed as attributes of the options object.
488 488
489 489 testgroups : list of str
490 490 Run only these sections of the test suite. If empty, run all the available
491 491 sections.
492 492
493 493 fast : int or None
494 494 Run the test suite in parallel, using n simultaneous processes. If None
495 495 is passed, one process is used per CPU core. Default 1 (i.e. sequential)
496 496
497 497 inc_slow : bool
498 498 Include slow tests, like IPython.parallel. By default, these tests aren't
499 499 run.
500 500
501 501 slimerjs : bool
502 502 Use slimerjs if it's installed instead of phantomjs for casperjs tests.
503 503
504 504 xunit : bool
505 505 Produce Xunit XML output. This is written to multiple foo.xunit.xml files.
506 506
507 507 coverage : bool or str
508 508 Measure code coverage from tests. True will store the raw coverage data,
509 509 or pass 'html' or 'xml' to get reports.
510 510
511 511 extra_args : list
512 512 Extra arguments to pass to the test subprocesses, e.g. '-v'
513 513 """
514 514 to_run, not_run = prepare_controllers(options)
515 515
516 516 def justify(ltext, rtext, width=70, fill='-'):
517 517 ltext += ' '
518 518 rtext = (' ' + rtext).rjust(width - len(ltext), fill)
519 519 return ltext + rtext
520 520
521 521 # Run all test runners, tracking execution time
522 522 failed = []
523 523 t_start = time.time()
524 524
525 525 print()
526 526 if options.fast == 1:
527 527 # This actually means sequential, i.e. with 1 job
528 528 for controller in to_run:
529 529 print('Test group:', controller.section)
530 530 sys.stdout.flush() # Show in correct order when output is piped
531 531 controller, res = do_run(controller, buffer_output=False)
532 532 if res:
533 533 failed.append(controller)
534 534 if res == -signal.SIGINT:
535 535 print("Interrupted")
536 536 break
537 537 print()
538 538
539 539 else:
540 540 # Run tests concurrently
541 541 try:
542 542 pool = multiprocessing.pool.ThreadPool(options.fast)
543 543 for (controller, res) in pool.imap_unordered(do_run, to_run):
544 544 res_string = 'OK' if res == 0 else 'FAILED'
545 545 print(justify('Test group: ' + controller.section, res_string))
546 546 if res:
547 547 controller.print_extra_info()
548 548 print(bytes_to_str(controller.stdout))
549 549 failed.append(controller)
550 550 if res == -signal.SIGINT:
551 551 print("Interrupted")
552 552 break
553 553 except KeyboardInterrupt:
554 554 return
555 555
556 556 for controller in not_run:
557 557 print(justify('Test group: ' + controller.section, 'NOT RUN'))
558 558
559 559 t_end = time.time()
560 560 t_tests = t_end - t_start
561 561 nrunners = len(to_run)
562 562 nfail = len(failed)
563 563 # summarize results
564 564 print('_'*70)
565 565 print('Test suite completed for system with the following information:')
566 566 print(report())
567 567 took = "Took %.3fs." % t_tests
568 568 print('Status: ', end='')
569 569 if not failed:
570 570 print('OK (%d test groups).' % nrunners, took)
571 571 else:
572 572 # If anything went wrong, point out what command to rerun manually to
573 573 # see the actual errors and individual summary
574 574 failed_sections = [c.section for c in failed]
575 575 print('ERROR - {} out of {} test groups failed ({}).'.format(nfail,
576 576 nrunners, ', '.join(failed_sections)), took)
577 577 print()
578 578 print('You may wish to rerun these, with:')
579 579 print(' iptest', *failed_sections)
580 580 print()
581 581
582 582 if options.coverage:
583 583 from coverage import coverage
584 584 cov = coverage(data_file='.coverage')
585 585 cov.combine()
586 586 cov.save()
587 587
588 588 # Coverage HTML report
589 589 if options.coverage == 'html':
590 590 html_dir = 'ipy_htmlcov'
591 591 shutil.rmtree(html_dir, ignore_errors=True)
592 592 print("Writing HTML coverage report to %s/ ... " % html_dir, end="")
593 593 sys.stdout.flush()
594 594
595 595 # Custom HTML reporter to clean up module names.
596 596 from coverage.html import HtmlReporter
597 597 class CustomHtmlReporter(HtmlReporter):
598 598 def find_code_units(self, morfs):
599 599 super(CustomHtmlReporter, self).find_code_units(morfs)
600 600 for cu in self.code_units:
601 601 nameparts = cu.name.split(os.sep)
602 602 if 'IPython' not in nameparts:
603 603 continue
604 604 ix = nameparts.index('IPython')
605 605 cu.name = '.'.join(nameparts[ix:])
606 606
607 607 # Reimplement the html_report method with our custom reporter
608 608 cov._harvest_data()
609 609 cov.config.from_args(omit='*{0}tests{0}*'.format(os.sep), html_dir=html_dir,
610 610 html_title='IPython test coverage',
611 611 )
612 612 reporter = CustomHtmlReporter(cov, cov.config)
613 613 reporter.report(None)
614 614 print('done.')
615 615
616 616 # Coverage XML report
617 617 elif options.coverage == 'xml':
618 618 cov.xml_report(outfile='ipy_coverage.xml')
619 619
620 620 if failed:
621 621 # Ensure that our exit code indicates failure
622 622 sys.exit(1)
623 623
624 624 argparser = argparse.ArgumentParser(description='Run IPython test suite')
625 625 argparser.add_argument('testgroups', nargs='*',
626 626 help='Run specified groups of tests. If omitted, run '
627 627 'all tests.')
628 628 argparser.add_argument('--all', action='store_true',
629 629 help='Include slow tests not run by default.')
630 630 argparser.add_argument('--slimerjs', action='store_true',
631 631 help="Use slimerjs if it's installed instead of phantomjs for casperjs tests.")
632 632 argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int,
633 633 help='Run test sections in parallel. This starts as many '
634 634 'processes as you have cores, or you can specify a number.')
635 635 argparser.add_argument('--xunit', action='store_true',
636 636 help='Produce Xunit XML results')
637 637 argparser.add_argument('--coverage', nargs='?', const=True, default=False,
638 638 help="Measure test coverage. Specify 'html' or "
639 639 "'xml' to get reports.")
640 640 argparser.add_argument('--subproc-streams', default='capture',
641 641 help="What to do with stdout/stderr from subprocesses. "
642 642 "'capture' (default), 'show' and 'discard' are the options.")
643 643
644 644 def default_options():
645 645 """Get an argparse Namespace object with the default arguments, to pass to
646 646 :func:`run_iptestall`.
647 647 """
648 648 options = argparser.parse_args([])
649 649 options.extra_args = []
650 650 return options
651 651
652 652 def main():
653 653 # iptest doesn't work correctly if the working directory is the
654 654 # root of the IPython source tree. Tell the user to avoid
655 655 # frustration.
656 656 if os.path.exists(os.path.join(os.getcwd(),
657 657 'IPython', 'testing', '__main__.py')):
658 658 print("Don't run iptest from the IPython source directory",
659 659 file=sys.stderr)
660 660 sys.exit(1)
661 661 # Arguments after -- should be passed through to nose. Argparse treats
662 662 # everything after -- as regular positional arguments, so we separate them
663 663 # first.
664 664 try:
665 665 ix = sys.argv.index('--')
666 666 except ValueError:
667 667 to_parse = sys.argv[1:]
668 668 extra_args = []
669 669 else:
670 670 to_parse = sys.argv[1:ix]
671 671 extra_args = sys.argv[ix+1:]
672 672
673 673 options = argparser.parse_args(to_parse)
674 674 options.extra_args = extra_args
675 675
676 676 run_iptestall(options)
677 677
678 678
679 679 if __name__ == '__main__':
680 680 main()
@@ -1,355 +1,355 b''
1 1 #!/usr/bin/env python
2 2 # -*- coding: utf-8 -*-
3 3 """Setup script for IPython.
4 4
5 5 Under Posix environments it works like a typical setup.py script.
6 6 Under Windows, the command sdist is not supported, since IPython
7 7 requires utilities which are not available under Windows."""
8 8
9 9 #-----------------------------------------------------------------------------
10 10 # Copyright (c) 2008-2011, IPython Development Team.
11 11 # Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu>
12 12 # Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
13 13 # Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
14 14 #
15 15 # Distributed under the terms of the Modified BSD License.
16 16 #
17 17 # The full license is in the file COPYING.rst, distributed with this software.
18 18 #-----------------------------------------------------------------------------
19 19
20 20 #-----------------------------------------------------------------------------
21 21 # Minimal Python version sanity check
22 22 #-----------------------------------------------------------------------------
23 23 from __future__ import print_function
24 24
25 25 import sys
26 26
27 27 # This check is also made in IPython/__init__, don't forget to update both when
28 28 # changing Python version requirements.
29 29 v = sys.version_info
30 30 if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)):
31 31 error = "ERROR: IPython requires Python version 2.7 or 3.3 or above."
32 32 print(error, file=sys.stderr)
33 33 sys.exit(1)
34 34
35 35 PY3 = (sys.version_info[0] >= 3)
36 36
37 37 # At least we're on the python version we need, move on.
38 38
39 39 #-------------------------------------------------------------------------------
40 40 # Imports
41 41 #-------------------------------------------------------------------------------
42 42
43 43 # Stdlib imports
44 44 import os
45 45 import shutil
46 46
47 47 from glob import glob
48 48
49 49 # BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
50 50 # update it when the contents of directories change.
51 51 if os.path.exists('MANIFEST'): os.remove('MANIFEST')
52 52
53 53 from distutils.core import setup
54 54
55 55 # Our own imports
56 56 from setupbase import target_update
57 57
58 58 from setupbase import (
59 59 setup_args,
60 60 find_packages,
61 61 find_package_data,
62 62 check_package_data_first,
63 63 find_entry_points,
64 64 build_scripts_entrypt,
65 65 find_data_files,
66 66 check_for_dependencies,
67 67 git_prebuild,
68 68 check_submodule_status,
69 69 update_submodules,
70 70 require_submodules,
71 71 UpdateSubmodules,
72 72 get_bdist_wheel,
73 73 CompileCSS,
74 74 JavascriptVersion,
75 75 css_js_prerelease,
76 76 install_symlinked,
77 77 install_lib_symlink,
78 78 install_scripts_for_symlink,
79 79 unsymlink,
80 80 )
81 81 from setupext import setupext
82 82
83 83 isfile = os.path.isfile
84 84 pjoin = os.path.join
85 85
86 86 #-----------------------------------------------------------------------------
87 87 # Function definitions
88 88 #-----------------------------------------------------------------------------
89 89
90 90 def cleanup():
91 91 """Clean up the junk left around by the build process"""
92 92 if "develop" not in sys.argv and "egg_info" not in sys.argv:
93 93 try:
94 94 shutil.rmtree('ipython.egg-info')
95 95 except:
96 96 try:
97 97 os.unlink('ipython.egg-info')
98 98 except:
99 99 pass
100 100
101 101 #-------------------------------------------------------------------------------
102 102 # Handle OS specific things
103 103 #-------------------------------------------------------------------------------
104 104
105 105 if os.name in ('nt','dos'):
106 106 os_name = 'windows'
107 107 else:
108 108 os_name = os.name
109 109
110 110 # Under Windows, 'sdist' has not been supported. Now that the docs build with
111 111 # Sphinx it might work, but let's not turn it on until someone confirms that it
112 112 # actually works.
113 113 if os_name == 'windows' and 'sdist' in sys.argv:
114 114 print('The sdist command is not available under Windows. Exiting.')
115 115 sys.exit(1)
116 116
117 117 #-------------------------------------------------------------------------------
118 118 # Make sure we aren't trying to run without submodules
119 119 #-------------------------------------------------------------------------------
120 120 here = os.path.abspath(os.path.dirname(__file__))
121 121
122 122 def require_clean_submodules():
123 123 """Check on git submodules before distutils can do anything
124 124
125 125 Since distutils cannot be trusted to update the tree
126 126 after everything has been set in motion,
127 127 this is not a distutils command.
128 128 """
129 129 # PACKAGERS: Add a return here to skip checks for git submodules
130 130
131 131 # don't do anything if nothing is actually supposed to happen
132 132 for do_nothing in ('-h', '--help', '--help-commands', 'clean', 'submodule'):
133 133 if do_nothing in sys.argv:
134 134 return
135 135
136 136 status = check_submodule_status(here)
137 137
138 138 if status == "missing":
139 139 print("checking out submodules for the first time")
140 140 update_submodules(here)
141 141 elif status == "unclean":
142 142 print('\n'.join([
143 143 "Cannot build / install IPython with unclean submodules",
144 144 "Please update submodules with",
145 145 " python setup.py submodule",
146 146 "or",
147 147 " git submodule update",
148 148 "or commit any submodule changes you have made."
149 149 ]))
150 150 sys.exit(1)
151 151
152 152 require_clean_submodules()
153 153
154 154 #-------------------------------------------------------------------------------
155 155 # Things related to the IPython documentation
156 156 #-------------------------------------------------------------------------------
157 157
158 158 # update the manuals when building a source dist
159 159 if len(sys.argv) >= 2 and sys.argv[1] in ('sdist','bdist_rpm'):
160 160
161 161 # List of things to be updated. Each entry is a triplet of args for
162 162 # target_update()
163 163 to_update = [
164 164 # FIXME - Disabled for now: we need to redo an automatic way
165 165 # of generating the magic info inside the rst.
166 166 #('docs/magic.tex',
167 167 #['IPython/Magic.py'],
168 168 #"cd doc && ./update_magic.sh" ),
169 169
170 170 ('docs/man/ipcluster.1.gz',
171 171 ['docs/man/ipcluster.1'],
172 172 'cd docs/man && gzip -9c ipcluster.1 > ipcluster.1.gz'),
173 173
174 174 ('docs/man/ipcontroller.1.gz',
175 175 ['docs/man/ipcontroller.1'],
176 176 'cd docs/man && gzip -9c ipcontroller.1 > ipcontroller.1.gz'),
177 177
178 178 ('docs/man/ipengine.1.gz',
179 179 ['docs/man/ipengine.1'],
180 180 'cd docs/man && gzip -9c ipengine.1 > ipengine.1.gz'),
181 181
182 182 ('docs/man/ipython.1.gz',
183 183 ['docs/man/ipython.1'],
184 184 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz'),
185 185
186 186 ]
187 187
188 188
189 189 [ target_update(*t) for t in to_update ]
190 190
191 191 #---------------------------------------------------------------------------
192 192 # Find all the packages, package data, and data_files
193 193 #---------------------------------------------------------------------------
194 194
195 195 packages = find_packages()
196 196 package_data = find_package_data()
197 197
198 198 data_files = find_data_files()
199 199
200 200 setup_args['packages'] = packages
201 201 setup_args['package_data'] = package_data
202 202 setup_args['data_files'] = data_files
203 203
204 204 #---------------------------------------------------------------------------
205 205 # custom distutils commands
206 206 #---------------------------------------------------------------------------
207 207 # imports here, so they are after setuptools import if there was one
208 208 from distutils.command.sdist import sdist
209 209 from distutils.command.upload import upload
210 210
211 211 class UploadWindowsInstallers(upload):
212 212
213 213 description = "Upload Windows installers to PyPI (only used from tools/release_windows.py)"
214 214 user_options = upload.user_options + [
215 215 ('files=', 'f', 'exe file (or glob) to upload')
216 216 ]
217 217 def initialize_options(self):
218 218 upload.initialize_options(self)
219 219 meta = self.distribution.metadata
220 220 base = '{name}-{version}'.format(
221 221 name=meta.get_name(),
222 222 version=meta.get_version()
223 223 )
224 224 self.files = os.path.join('dist', '%s.*.exe' % base)
225 225
226 226 def run(self):
227 227 for dist_file in glob(self.files):
228 228 self.upload_file('bdist_wininst', 'any', dist_file)
229 229
230 230 setup_args['cmdclass'] = {
231 231 'build_py': css_js_prerelease(
232 232 check_package_data_first(git_prebuild('IPython')),
233 233 strict=False),
234 234 'sdist' : css_js_prerelease(git_prebuild('IPython', sdist)),
235 235 'upload_wininst' : UploadWindowsInstallers,
236 236 'submodule' : UpdateSubmodules,
237 237 'css' : CompileCSS,
238 238 'symlink': install_symlinked,
239 239 'install_lib_symlink': install_lib_symlink,
240 240 'install_scripts_sym': install_scripts_for_symlink,
241 241 'unsymlink': unsymlink,
242 242 'jsversion' : JavascriptVersion,
243 243 }
244 244
245 245 #---------------------------------------------------------------------------
246 246 # Handle scripts, dependencies, and setuptools specific things
247 247 #---------------------------------------------------------------------------
248 248
249 249 # For some commands, use setuptools. Note that we do NOT list install here!
250 250 # If you want a setuptools-enhanced install, just run 'setupegg.py install'
251 251 needs_setuptools = set(('develop', 'release', 'bdist_egg', 'bdist_rpm',
252 252 'bdist', 'bdist_dumb', 'bdist_wininst', 'bdist_wheel',
253 253 'egg_info', 'easy_install', 'upload', 'install_egg_info',
254 254 ))
255 255 if sys.platform == 'win32':
256 256 # Depend on setuptools for install on *Windows only*
257 257 # If we get script-installation working without setuptools,
258 258 # then we can back off, but until then use it.
259 259 # See Issue #369 on GitHub for more
260 260 needs_setuptools.add('install')
261 261
262 262 if len(needs_setuptools.intersection(sys.argv)) > 0:
263 263 import setuptools
264 264
265 265 # This dict is used for passing extra arguments that are setuptools
266 266 # specific to setup
267 267 setuptools_extra_args = {}
268 268
269 269 # setuptools requirements
270 270
271 271 extras_require = dict(
272 272 parallel = ['pyzmq>=2.1.11'],
273 273 qtconsole = ['pyzmq>=2.1.11', 'pygments'],
274 274 zmq = ['pyzmq>=2.1.11'],
275 275 doc = ['Sphinx>=1.1', 'numpydoc'],
276 276 test = ['nose>=0.10.1'],
277 277 terminal = [],
278 nbformat = ['jsonschema>=2.0', 'jsonpointer>=1.3'],
278 nbformat = ['jsonschema>=2.0'],
279 279 notebook = ['tornado>=3.1', 'pyzmq>=2.1.11', 'jinja2', 'pygments', 'mistune>=0.3.1'],
280 280 nbconvert = ['pygments', 'jinja2', 'mistune>=0.3.1']
281 281 )
282 282
283 283 if sys.version_info < (3, 3):
284 284 extras_require['test'].append('mock')
285 285
286 286 extras_require['notebook'].extend(extras_require['nbformat'])
287 287 extras_require['nbconvert'].extend(extras_require['nbformat'])
288 288
289 289 everything = set()
290 290 for deps in extras_require.values():
291 291 everything.update(deps)
292 292 extras_require['all'] = everything
293 293
294 294 install_requires = []
295 295
296 296 # add readline
297 297 if sys.platform == 'darwin':
298 298 if any(arg.startswith('bdist') for arg in sys.argv) or not setupext.check_for_readline():
299 299 install_requires.append('gnureadline')
300 300 elif sys.platform.startswith('win'):
301 301 extras_require['terminal'].append('pyreadline>=2.0')
302 302
303 303
304 304 if 'setuptools' in sys.modules:
305 305 # setup.py develop should check for submodules
306 306 from setuptools.command.develop import develop
307 307 setup_args['cmdclass']['develop'] = require_submodules(develop)
308 308 setup_args['cmdclass']['bdist_wheel'] = css_js_prerelease(get_bdist_wheel())
309 309
310 310 setuptools_extra_args['zip_safe'] = False
311 311 setuptools_extra_args['entry_points'] = {'console_scripts':find_entry_points()}
312 312 setup_args['extras_require'] = extras_require
313 313 requires = setup_args['install_requires'] = install_requires
314 314
315 315 # Script to be run by the windows binary installer after the default setup
316 316 # routine, to add shortcuts and similar windows-only things. Windows
317 317 # post-install scripts MUST reside in the scripts/ dir, otherwise distutils
318 318 # doesn't find them.
319 319 if 'bdist_wininst' in sys.argv:
320 320 if len(sys.argv) > 2 and \
321 321 ('sdist' in sys.argv or 'bdist_rpm' in sys.argv):
322 322 print >> sys.stderr, "ERROR: bdist_wininst must be run alone. Exiting."
323 323 sys.exit(1)
324 324 setup_args['data_files'].append(
325 325 ['Scripts', ('scripts/ipython.ico', 'scripts/ipython_nb.ico')])
326 326 setup_args['scripts'] = [pjoin('scripts','ipython_win_post_install.py')]
327 327 setup_args['options'] = {"bdist_wininst":
328 328 {"install_script":
329 329 "ipython_win_post_install.py"}}
330 330
331 331 else:
332 332 # If we are installing without setuptools, call this function which will
333 333 # check for dependencies an inform the user what is needed. This is
334 334 # just to make life easy for users.
335 335 for install_cmd in ('install', 'symlink'):
336 336 if install_cmd in sys.argv:
337 337 check_for_dependencies()
338 338 break
339 339 # scripts has to be a non-empty list, or install_scripts isn't called
340 340 setup_args['scripts'] = [e.split('=')[0].strip() for e in find_entry_points()]
341 341
342 342 setup_args['cmdclass']['build_scripts'] = build_scripts_entrypt
343 343
344 344 #---------------------------------------------------------------------------
345 345 # Do the actual setup now
346 346 #---------------------------------------------------------------------------
347 347
348 348 setup_args.update(setuptools_extra_args)
349 349
350 350 def main():
351 351 setup(**setup_args)
352 352 cleanup()
353 353
354 354 if __name__ == '__main__':
355 355 main()
@@ -1,730 +1,733 b''
1 1 # encoding: utf-8
2 2 """
3 3 This module defines the things that are used in setup.py for building IPython
4 4
5 5 This includes:
6 6
7 7 * The basic arguments to setup
8 8 * Functions for finding things like packages, package data, etc.
9 9 * A function for checking dependencies.
10 10 """
11 11
12 12 # Copyright (c) IPython Development Team.
13 13 # Distributed under the terms of the Modified BSD License.
14 14
15 15 from __future__ import print_function
16 16
17 17 import errno
18 18 import os
19 19 import sys
20 20
21 21 from distutils import log
22 22 from distutils.command.build_py import build_py
23 23 from distutils.command.build_scripts import build_scripts
24 24 from distutils.command.install import install
25 25 from distutils.command.install_scripts import install_scripts
26 26 from distutils.cmd import Command
27 27 from fnmatch import fnmatch
28 28 from glob import glob
29 29 from subprocess import check_call
30 30
31 31 from setupext import install_data_ext
32 32
33 33 #-------------------------------------------------------------------------------
34 34 # Useful globals and utility functions
35 35 #-------------------------------------------------------------------------------
36 36
37 37 # A few handy globals
38 38 isfile = os.path.isfile
39 39 pjoin = os.path.join
40 40 repo_root = os.path.dirname(os.path.abspath(__file__))
41 41
42 42 def oscmd(s):
43 43 print(">", s)
44 44 os.system(s)
45 45
46 46 # Py3 compatibility hacks, without assuming IPython itself is installed with
47 47 # the full py3compat machinery.
48 48
49 49 try:
50 50 execfile
51 51 except NameError:
52 52 def execfile(fname, globs, locs=None):
53 53 locs = locs or globs
54 54 exec(compile(open(fname).read(), fname, "exec"), globs, locs)
55 55
56 56 # A little utility we'll need below, since glob() does NOT allow you to do
57 57 # exclusion on multiple endings!
58 58 def file_doesnt_endwith(test,endings):
59 59 """Return true if test is a file and its name does NOT end with any
60 60 of the strings listed in endings."""
61 61 if not isfile(test):
62 62 return False
63 63 for e in endings:
64 64 if test.endswith(e):
65 65 return False
66 66 return True
67 67
68 68 #---------------------------------------------------------------------------
69 69 # Basic project information
70 70 #---------------------------------------------------------------------------
71 71
72 72 # release.py contains version, authors, license, url, keywords, etc.
73 73 execfile(pjoin(repo_root, 'IPython','core','release.py'), globals())
74 74
75 75 # Create a dict with the basic information
76 76 # This dict is eventually passed to setup after additional keys are added.
77 77 setup_args = dict(
78 78 name = name,
79 79 version = version,
80 80 description = description,
81 81 long_description = long_description,
82 82 author = author,
83 83 author_email = author_email,
84 84 url = url,
85 85 download_url = download_url,
86 86 license = license,
87 87 platforms = platforms,
88 88 keywords = keywords,
89 89 classifiers = classifiers,
90 90 cmdclass = {'install_data': install_data_ext},
91 91 )
92 92
93 93
94 94 #---------------------------------------------------------------------------
95 95 # Find packages
96 96 #---------------------------------------------------------------------------
97 97
98 98 def find_packages():
99 99 """
100 100 Find all of IPython's packages.
101 101 """
102 102 excludes = ['deathrow', 'quarantine']
103 103 packages = []
104 104 for dir,subdirs,files in os.walk('IPython'):
105 105 package = dir.replace(os.path.sep, '.')
106 106 if any(package.startswith('IPython.'+exc) for exc in excludes):
107 107 # package is to be excluded (e.g. deathrow)
108 108 continue
109 109 if '__init__.py' not in files:
110 110 # not a package
111 111 continue
112 112 packages.append(package)
113 113 return packages
114 114
115 115 #---------------------------------------------------------------------------
116 116 # Find package data
117 117 #---------------------------------------------------------------------------
118 118
119 119 def find_package_data():
120 120 """
121 121 Find IPython's package_data.
122 122 """
123 123 # This is not enough for these things to appear in an sdist.
124 124 # We need to muck with the MANIFEST to get this to work
125 125
126 126 # exclude components and less from the walk;
127 127 # we will build the components separately
128 128 excludes = [
129 129 pjoin('static', 'components'),
130 130 pjoin('static', '*', 'less'),
131 131 ]
132 132
133 133 # walk notebook resources:
134 134 cwd = os.getcwd()
135 135 os.chdir(os.path.join('IPython', 'html'))
136 136 static_data = []
137 137 for parent, dirs, files in os.walk('static'):
138 138 if any(fnmatch(parent, pat) for pat in excludes):
139 139 # prevent descending into subdirs
140 140 dirs[:] = []
141 141 continue
142 142 for f in files:
143 143 static_data.append(pjoin(parent, f))
144 144
145 145 components = pjoin("static", "components")
146 146 # select the components we actually need to install
147 147 # (there are lots of resources we bundle for sdist-reasons that we don't actually use)
148 148 static_data.extend([
149 149 pjoin(components, "backbone", "backbone-min.js"),
150 150 pjoin(components, "bootstrap", "js", "bootstrap.min.js"),
151 151 pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
152 152 pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
153 153 pjoin(components, "font-awesome", "fonts", "*.*"),
154 154 pjoin(components, "google-caja", "html-css-sanitizer-minified.js"),
155 155 pjoin(components, "highlight.js", "build", "highlight.pack.js"),
156 156 pjoin(components, "jquery", "jquery.min.js"),
157 157 pjoin(components, "jquery-ui", "ui", "minified", "jquery-ui.min.js"),
158 158 pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
159 159 pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"),
160 160 pjoin(components, "marked", "lib", "marked.js"),
161 161 pjoin(components, "requirejs", "require.js"),
162 162 pjoin(components, "underscore", "underscore-min.js"),
163 163 pjoin(components, "moment", "moment.js"),
164 164 pjoin(components, "moment", "min","moment.min.js"),
165 165 ])
166 166
167 167 # Ship all of Codemirror's CSS and JS
168 168 for parent, dirs, files in os.walk(pjoin(components, 'codemirror')):
169 169 for f in files:
170 170 if f.endswith(('.js', '.css')):
171 171 static_data.append(pjoin(parent, f))
172 172
173 173 os.chdir(os.path.join('tests',))
174 174 js_tests = glob('*.js') + glob('*/*.js')
175 175
176 176 os.chdir(os.path.join(cwd, 'IPython', 'nbconvert'))
177 177 nbconvert_templates = [os.path.join(dirpath, '*.*')
178 178 for dirpath, _, _ in os.walk('templates')]
179 179
180 180 os.chdir(cwd)
181 181
182 182 package_data = {
183 183 'IPython.config.profile' : ['README*', '*/*.py'],
184 184 'IPython.core.tests' : ['*.png', '*.jpg'],
185 185 'IPython.lib.tests' : ['*.wav'],
186 186 'IPython.testing.plugin' : ['*.txt'],
187 187 'IPython.html' : ['templates/*'] + static_data,
188 188 'IPython.html.tests' : js_tests,
189 189 'IPython.qt.console' : ['resources/icon/*.svg'],
190 190 'IPython.nbconvert' : nbconvert_templates +
191 191 [
192 192 'tests/files/*.*',
193 193 'exporters/tests/files/*.*',
194 194 'preprocessors/tests/files/*.*',
195 195 ],
196 196 'IPython.nbconvert.filters' : ['marked.js'],
197 'IPython.nbformat' : ['tests/*.ipynb','v3/v3.withref.json']
197 'IPython.nbformat' : [
198 'tests/*.ipynb',
199 'v3/nbformat.v3.schema.json',
200 ]
198 201 }
199 202
200 203 return package_data
201 204
202 205
203 206 def check_package_data(package_data):
204 207 """verify that package_data globs make sense"""
205 208 print("checking package data")
206 209 for pkg, data in package_data.items():
207 210 pkg_root = pjoin(*pkg.split('.'))
208 211 for d in data:
209 212 path = pjoin(pkg_root, d)
210 213 if '*' in path:
211 214 assert len(glob(path)) > 0, "No files match pattern %s" % path
212 215 else:
213 216 assert os.path.exists(path), "Missing package data: %s" % path
214 217
215 218
216 219 def check_package_data_first(command):
217 220 """decorator for checking package_data before running a given command
218 221
219 222 Probably only needs to wrap build_py
220 223 """
221 224 class DecoratedCommand(command):
222 225 def run(self):
223 226 check_package_data(self.package_data)
224 227 command.run(self)
225 228 return DecoratedCommand
226 229
227 230
228 231 #---------------------------------------------------------------------------
229 232 # Find data files
230 233 #---------------------------------------------------------------------------
231 234
232 235 def make_dir_struct(tag,base,out_base):
233 236 """Make the directory structure of all files below a starting dir.
234 237
235 238 This is just a convenience routine to help build a nested directory
236 239 hierarchy because distutils is too stupid to do this by itself.
237 240
238 241 XXX - this needs a proper docstring!
239 242 """
240 243
241 244 # we'll use these a lot below
242 245 lbase = len(base)
243 246 pathsep = os.path.sep
244 247 lpathsep = len(pathsep)
245 248
246 249 out = []
247 250 for (dirpath,dirnames,filenames) in os.walk(base):
248 251 # we need to strip out the dirpath from the base to map it to the
249 252 # output (installation) path. This requires possibly stripping the
250 253 # path separator, because otherwise pjoin will not work correctly
251 254 # (pjoin('foo/','/bar') returns '/bar').
252 255
253 256 dp_eff = dirpath[lbase:]
254 257 if dp_eff.startswith(pathsep):
255 258 dp_eff = dp_eff[lpathsep:]
256 259 # The output path must be anchored at the out_base marker
257 260 out_path = pjoin(out_base,dp_eff)
258 261 # Now we can generate the final filenames. Since os.walk only produces
259 262 # filenames, we must join back with the dirpath to get full valid file
260 263 # paths:
261 264 pfiles = [pjoin(dirpath,f) for f in filenames]
262 265 # Finally, generate the entry we need, which is a pari of (output
263 266 # path, files) for use as a data_files parameter in install_data.
264 267 out.append((out_path, pfiles))
265 268
266 269 return out
267 270
268 271
269 272 def find_data_files():
270 273 """
271 274 Find IPython's data_files.
272 275
273 276 Just man pages at this point.
274 277 """
275 278
276 279 manpagebase = pjoin('share', 'man', 'man1')
277 280
278 281 # Simple file lists can be made by hand
279 282 manpages = [f for f in glob(pjoin('docs','man','*.1.gz')) if isfile(f)]
280 283 if not manpages:
281 284 # When running from a source tree, the manpages aren't gzipped
282 285 manpages = [f for f in glob(pjoin('docs','man','*.1')) if isfile(f)]
283 286
284 287 # And assemble the entire output list
285 288 data_files = [ (manpagebase, manpages) ]
286 289
287 290 return data_files
288 291
289 292
290 293 def make_man_update_target(manpage):
291 294 """Return a target_update-compliant tuple for the given manpage.
292 295
293 296 Parameters
294 297 ----------
295 298 manpage : string
296 299 Name of the manpage, must include the section number (trailing number).
297 300
298 301 Example
299 302 -------
300 303
301 304 >>> make_man_update_target('ipython.1') #doctest: +NORMALIZE_WHITESPACE
302 305 ('docs/man/ipython.1.gz',
303 306 ['docs/man/ipython.1'],
304 307 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz')
305 308 """
306 309 man_dir = pjoin('docs', 'man')
307 310 manpage_gz = manpage + '.gz'
308 311 manpath = pjoin(man_dir, manpage)
309 312 manpath_gz = pjoin(man_dir, manpage_gz)
310 313 gz_cmd = ( "cd %(man_dir)s && gzip -9c %(manpage)s > %(manpage_gz)s" %
311 314 locals() )
312 315 return (manpath_gz, [manpath], gz_cmd)
313 316
314 317 # The two functions below are copied from IPython.utils.path, so we don't need
315 318 # to import IPython during setup, which fails on Python 3.
316 319
317 320 def target_outdated(target,deps):
318 321 """Determine whether a target is out of date.
319 322
320 323 target_outdated(target,deps) -> 1/0
321 324
322 325 deps: list of filenames which MUST exist.
323 326 target: single filename which may or may not exist.
324 327
325 328 If target doesn't exist or is older than any file listed in deps, return
326 329 true, otherwise return false.
327 330 """
328 331 try:
329 332 target_time = os.path.getmtime(target)
330 333 except os.error:
331 334 return 1
332 335 for dep in deps:
333 336 dep_time = os.path.getmtime(dep)
334 337 if dep_time > target_time:
335 338 #print "For target",target,"Dep failed:",dep # dbg
336 339 #print "times (dep,tar):",dep_time,target_time # dbg
337 340 return 1
338 341 return 0
339 342
340 343
341 344 def target_update(target,deps,cmd):
342 345 """Update a target with a given command given a list of dependencies.
343 346
344 347 target_update(target,deps,cmd) -> runs cmd if target is outdated.
345 348
346 349 This is just a wrapper around target_outdated() which calls the given
347 350 command if target is outdated."""
348 351
349 352 if target_outdated(target,deps):
350 353 os.system(cmd)
351 354
352 355 #---------------------------------------------------------------------------
353 356 # Find scripts
354 357 #---------------------------------------------------------------------------
355 358
356 359 def find_entry_points():
357 360 """Find IPython's scripts.
358 361
359 362 if entry_points is True:
360 363 return setuptools entry_point-style definitions
361 364 else:
362 365 return file paths of plain scripts [default]
363 366
364 367 suffix is appended to script names if entry_points is True, so that the
365 368 Python 3 scripts get named "ipython3" etc.
366 369 """
367 370 ep = [
368 371 'ipython%s = IPython:start_ipython',
369 372 'ipcontroller%s = IPython.parallel.apps.ipcontrollerapp:launch_new_instance',
370 373 'ipengine%s = IPython.parallel.apps.ipengineapp:launch_new_instance',
371 374 'ipcluster%s = IPython.parallel.apps.ipclusterapp:launch_new_instance',
372 375 'iptest%s = IPython.testing.iptestcontroller:main',
373 376 ]
374 377 suffix = str(sys.version_info[0])
375 378 return [e % '' for e in ep] + [e % suffix for e in ep]
376 379
377 380 script_src = """#!{executable}
378 381 # This script was automatically generated by setup.py
379 382 if __name__ == '__main__':
380 383 from {mod} import {func}
381 384 {func}()
382 385 """
383 386
384 387 class build_scripts_entrypt(build_scripts):
385 388 def run(self):
386 389 self.mkpath(self.build_dir)
387 390 outfiles = []
388 391 for script in find_entry_points():
389 392 name, entrypt = script.split('=')
390 393 name = name.strip()
391 394 entrypt = entrypt.strip()
392 395 outfile = os.path.join(self.build_dir, name)
393 396 outfiles.append(outfile)
394 397 print('Writing script to', outfile)
395 398
396 399 mod, func = entrypt.split(':')
397 400 with open(outfile, 'w') as f:
398 401 f.write(script_src.format(executable=sys.executable,
399 402 mod=mod, func=func))
400 403
401 404 return outfiles, outfiles
402 405
403 406 class install_lib_symlink(Command):
404 407 user_options = [
405 408 ('install-dir=', 'd', "directory to install to"),
406 409 ]
407 410
408 411 def initialize_options(self):
409 412 self.install_dir = None
410 413
411 414 def finalize_options(self):
412 415 self.set_undefined_options('symlink',
413 416 ('install_lib', 'install_dir'),
414 417 )
415 418
416 419 def run(self):
417 420 if sys.platform == 'win32':
418 421 raise Exception("This doesn't work on Windows.")
419 422 pkg = os.path.join(os.getcwd(), 'IPython')
420 423 dest = os.path.join(self.install_dir, 'IPython')
421 424 if os.path.islink(dest):
422 425 print('removing existing symlink at %s' % dest)
423 426 os.unlink(dest)
424 427 print('symlinking %s -> %s' % (pkg, dest))
425 428 os.symlink(pkg, dest)
426 429
427 430 class unsymlink(install):
428 431 def run(self):
429 432 dest = os.path.join(self.install_lib, 'IPython')
430 433 if os.path.islink(dest):
431 434 print('removing symlink at %s' % dest)
432 435 os.unlink(dest)
433 436 else:
434 437 print('No symlink exists at %s' % dest)
435 438
436 439 class install_symlinked(install):
437 440 def run(self):
438 441 if sys.platform == 'win32':
439 442 raise Exception("This doesn't work on Windows.")
440 443
441 444 # Run all sub-commands (at least those that need to be run)
442 445 for cmd_name in self.get_sub_commands():
443 446 self.run_command(cmd_name)
444 447
445 448 # 'sub_commands': a list of commands this command might have to run to
446 449 # get its work done. See cmd.py for more info.
447 450 sub_commands = [('install_lib_symlink', lambda self:True),
448 451 ('install_scripts_sym', lambda self:True),
449 452 ]
450 453
451 454 class install_scripts_for_symlink(install_scripts):
452 455 """Redefined to get options from 'symlink' instead of 'install'.
453 456
454 457 I love distutils almost as much as I love setuptools.
455 458 """
456 459 def finalize_options(self):
457 460 self.set_undefined_options('build', ('build_scripts', 'build_dir'))
458 461 self.set_undefined_options('symlink',
459 462 ('install_scripts', 'install_dir'),
460 463 ('force', 'force'),
461 464 ('skip_build', 'skip_build'),
462 465 )
463 466
464 467 #---------------------------------------------------------------------------
465 468 # Verify all dependencies
466 469 #---------------------------------------------------------------------------
467 470
468 471 def check_for_dependencies():
469 472 """Check for IPython's dependencies.
470 473
471 474 This function should NOT be called if running under setuptools!
472 475 """
473 476 from setupext.setupext import (
474 477 print_line, print_raw, print_status,
475 478 check_for_sphinx, check_for_pygments,
476 479 check_for_nose, check_for_pexpect,
477 480 check_for_pyzmq, check_for_readline,
478 481 check_for_jinja2, check_for_tornado
479 482 )
480 483 print_line()
481 484 print_raw("BUILDING IPYTHON")
482 485 print_status('python', sys.version)
483 486 print_status('platform', sys.platform)
484 487 if sys.platform == 'win32':
485 488 print_status('Windows version', sys.getwindowsversion())
486 489
487 490 print_raw("")
488 491 print_raw("OPTIONAL DEPENDENCIES")
489 492
490 493 check_for_sphinx()
491 494 check_for_pygments()
492 495 check_for_nose()
493 496 if os.name == 'posix':
494 497 check_for_pexpect()
495 498 check_for_pyzmq()
496 499 check_for_tornado()
497 500 check_for_readline()
498 501 check_for_jinja2()
499 502
500 503 #---------------------------------------------------------------------------
501 504 # VCS related
502 505 #---------------------------------------------------------------------------
503 506
504 507 # utils.submodule has checks for submodule status
505 508 execfile(pjoin('IPython','utils','submodule.py'), globals())
506 509
507 510 class UpdateSubmodules(Command):
508 511 """Update git submodules
509 512
510 513 IPython's external javascript dependencies live in a separate repo.
511 514 """
512 515 description = "Update git submodules"
513 516 user_options = []
514 517
515 518 def initialize_options(self):
516 519 pass
517 520
518 521 def finalize_options(self):
519 522 pass
520 523
521 524 def run(self):
522 525 failure = False
523 526 try:
524 527 self.spawn('git submodule init'.split())
525 528 self.spawn('git submodule update --recursive'.split())
526 529 except Exception as e:
527 530 failure = e
528 531 print(e)
529 532
530 533 if not check_submodule_status(repo_root) == 'clean':
531 534 print("submodules could not be checked out")
532 535 sys.exit(1)
533 536
534 537
535 538 def git_prebuild(pkg_dir, build_cmd=build_py):
536 539 """Return extended build or sdist command class for recording commit
537 540
538 541 records git commit in IPython.utils._sysinfo.commit
539 542
540 543 for use in IPython.utils.sysinfo.sys_info() calls after installation.
541 544
542 545 Also ensures that submodules exist prior to running
543 546 """
544 547
545 548 class MyBuildPy(build_cmd):
546 549 ''' Subclass to write commit data into installation tree '''
547 550 def run(self):
548 551 build_cmd.run(self)
549 552 # this one will only fire for build commands
550 553 if hasattr(self, 'build_lib'):
551 554 self._record_commit(self.build_lib)
552 555
553 556 def make_release_tree(self, base_dir, files):
554 557 # this one will fire for sdist
555 558 build_cmd.make_release_tree(self, base_dir, files)
556 559 self._record_commit(base_dir)
557 560
558 561 def _record_commit(self, base_dir):
559 562 import subprocess
560 563 proc = subprocess.Popen('git rev-parse --short HEAD',
561 564 stdout=subprocess.PIPE,
562 565 stderr=subprocess.PIPE,
563 566 shell=True)
564 567 repo_commit, _ = proc.communicate()
565 568 repo_commit = repo_commit.strip().decode("ascii")
566 569
567 570 out_pth = pjoin(base_dir, pkg_dir, 'utils', '_sysinfo.py')
568 571 if os.path.isfile(out_pth) and not repo_commit:
569 572 # nothing to write, don't clobber
570 573 return
571 574
572 575 print("writing git commit '%s' to %s" % (repo_commit, out_pth))
573 576
574 577 # remove to avoid overwriting original via hard link
575 578 try:
576 579 os.remove(out_pth)
577 580 except (IOError, OSError):
578 581 pass
579 582 with open(out_pth, 'w') as out_file:
580 583 out_file.writelines([
581 584 '# GENERATED BY setup.py\n',
582 585 'commit = u"%s"\n' % repo_commit,
583 586 ])
584 587 return require_submodules(MyBuildPy)
585 588
586 589
587 590 def require_submodules(command):
588 591 """decorator for instructing a command to check for submodules before running"""
589 592 class DecoratedCommand(command):
590 593 def run(self):
591 594 if not check_submodule_status(repo_root) == 'clean':
592 595 print("submodules missing! Run `setup.py submodule` and try again")
593 596 sys.exit(1)
594 597 command.run(self)
595 598 return DecoratedCommand
596 599
597 600 #---------------------------------------------------------------------------
598 601 # bdist related
599 602 #---------------------------------------------------------------------------
600 603
601 604 def get_bdist_wheel():
602 605 """Construct bdist_wheel command for building wheels
603 606
604 607 Constructs py2-none-any tag, instead of py2.7-none-any
605 608 """
606 609 class RequiresWheel(Command):
607 610 description = "Dummy command for missing bdist_wheel"
608 611 user_options = []
609 612
610 613 def initialize_options(self):
611 614 pass
612 615
613 616 def finalize_options(self):
614 617 pass
615 618
616 619 def run(self):
617 620 print("bdist_wheel requires the wheel package")
618 621 sys.exit(1)
619 622
620 623 if 'setuptools' not in sys.modules:
621 624 return RequiresWheel
622 625 else:
623 626 try:
624 627 from wheel.bdist_wheel import bdist_wheel, read_pkg_info, write_pkg_info
625 628 except ImportError:
626 629 return RequiresWheel
627 630
628 631 class bdist_wheel_tag(bdist_wheel):
629 632
630 633 def add_requirements(self, metadata_path):
631 634 """transform platform-dependent requirements"""
632 635 pkg_info = read_pkg_info(metadata_path)
633 636 # pkg_info is an email.Message object (?!)
634 637 # we have to remove the unconditional 'readline' and/or 'pyreadline' entries
635 638 # and transform them to conditionals
636 639 requires = pkg_info.get_all('Requires-Dist')
637 640 del pkg_info['Requires-Dist']
638 641 def _remove_startswith(lis, prefix):
639 642 """like list.remove, but with startswith instead of =="""
640 643 found = False
641 644 for idx, item in enumerate(lis):
642 645 if item.startswith(prefix):
643 646 found = True
644 647 break
645 648 if found:
646 649 lis.pop(idx)
647 650
648 651 for pkg in ("gnureadline", "pyreadline", "mock"):
649 652 _remove_startswith(requires, pkg)
650 653 requires.append("gnureadline; sys.platform == 'darwin' and platform.python_implementation == 'CPython'")
651 654 requires.append("pyreadline (>=2.0); extra == 'terminal' and sys.platform == 'win32' and platform.python_implementation == 'CPython'")
652 655 requires.append("pyreadline (>=2.0); extra == 'all' and sys.platform == 'win32' and platform.python_implementation == 'CPython'")
653 656 requires.append("mock; extra == 'test' and python_version < '3.3'")
654 657 for r in requires:
655 658 pkg_info['Requires-Dist'] = r
656 659 write_pkg_info(metadata_path, pkg_info)
657 660
658 661 return bdist_wheel_tag
659 662
660 663 #---------------------------------------------------------------------------
661 664 # Notebook related
662 665 #---------------------------------------------------------------------------
663 666
664 667 class CompileCSS(Command):
665 668 """Recompile Notebook CSS
666 669
667 670 Regenerate the compiled CSS from LESS sources.
668 671
669 672 Requires various dev dependencies, such as fabric and lessc.
670 673 """
671 674 description = "Recompile Notebook CSS"
672 675 user_options = [
673 676 ('minify', 'x', "minify CSS"),
674 677 ('force', 'f', "force recompilation of CSS"),
675 678 ]
676 679
677 680 def initialize_options(self):
678 681 self.minify = False
679 682 self.force = False
680 683
681 684 def finalize_options(self):
682 685 self.minify = bool(self.minify)
683 686 self.force = bool(self.force)
684 687
685 688 def run(self):
686 689 check_call([
687 690 "fab",
688 691 "css:minify=%s,force=%s" % (self.minify, self.force),
689 692 ], cwd=pjoin(repo_root, "IPython", "html"),
690 693 )
691 694
692 695
693 696 class JavascriptVersion(Command):
694 697 """write the javascript version to notebook javascript"""
695 698 description = "Write IPython version to javascript"
696 699 user_options = []
697 700
698 701 def initialize_options(self):
699 702 pass
700 703
701 704 def finalize_options(self):
702 705 pass
703 706
704 707 def run(self):
705 708 nsfile = pjoin(repo_root, "IPython", "html", "static", "base", "js", "namespace.js")
706 709 with open(nsfile) as f:
707 710 lines = f.readlines()
708 711 with open(nsfile, 'w') as f:
709 712 for line in lines:
710 713 if line.startswith("IPython.version"):
711 714 line = 'IPython.version = "{0}";\n'.format(version)
712 715 f.write(line)
713 716
714 717
715 718 def css_js_prerelease(command, strict=True):
716 719 """decorator for building js/minified css prior to a release"""
717 720 class DecoratedCommand(command):
718 721 def run(self):
719 722 self.distribution.run_command('jsversion')
720 723 css = self.distribution.get_command_obj('css')
721 724 css.minify = True
722 725 try:
723 726 self.distribution.run_command('css')
724 727 except Exception as e:
725 728 if strict:
726 729 raise
727 730 else:
728 731 log.warn("Failed to build css sourcemaps: %s" % e)
729 732 command.run(self)
730 733 return DecoratedCommand
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now