##// END OF EJS Templates
Shim IPython.nbformat so tests pass again
Thomas Kluyver -
Show More
@@ -0,0 +1,22 b''
1 """
2 Shim to maintain backwards compatibility with old IPython.nbformat imports.
3 """
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
6
7 from __future__ import print_function
8
9 # Stdlib
10 import sys
11 import types
12 from warnings import warn
13
14 warn("The `IPython.nbformat` package has been deprecated. "
15 "You should import from jupyter_nbformat instead.")
16
17 from IPython.utils.shimmodule import ShimModule
18
19 # Unconditionally insert the shim into sys.modules so that further import calls
20 # trigger the custom attribute access above
21
22 sys.modules['IPython.nbformat'] = ShimModule('nbformat', mirror='jupyter_nbformat')
@@ -1,42 +1,33 b''
1 1 """
2 2 Contains base test class for nbformat
3 3 """
4 4 #-----------------------------------------------------------------------------
5 5 #Copyright (c) 2013, the IPython Development Team.
6 6 #
7 7 #Distributed under the terms of the Modified BSD License.
8 8 #
9 9 #The full license is in the file COPYING.txt, distributed with this software.
10 10 #-----------------------------------------------------------------------------
11 11
12 12 #-----------------------------------------------------------------------------
13 13 # Imports
14 14 #-----------------------------------------------------------------------------
15 15
16 16 import os
17 17 import unittest
18 18
19 19 import IPython
20 20
21 21 #-----------------------------------------------------------------------------
22 22 # Classes and functions
23 23 #-----------------------------------------------------------------------------
24 24
25 25 class TestsBase(unittest.TestCase):
26 26 """Base tests class."""
27 27
28 28 def fopen(self, f, mode=u'r'):
29 29 return open(os.path.join(self._get_files_path(), f), mode)
30 30
31 31
32 32 def _get_files_path(self):
33
34 #Get the relative path to this module in the IPython directory.
35 names = self.__module__.split(u'.')[1:-1]
36
37 #Build a path using the IPython directory and the relative path we just
38 #found.
39 path = IPython.__path__[0]
40 for name in names:
41 path = os.path.join(path, name)
42 return path
33 return os.path.dirname(__file__)
@@ -1,91 +1,91 b''
1 1 """Code for converting notebooks to and from the v2 format."""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from .nbbase import (
7 7 new_code_cell, new_text_cell, new_worksheet, new_notebook, new_output,
8 8 nbformat, nbformat_minor
9 9 )
10 10
11 from IPython.nbformat import v2
11 from jupyter_nbformat import v2
12 12
13 13 def _unbytes(obj):
14 14 """There should be no bytes objects in a notebook
15 15
16 16 v2 stores png/jpeg as b64 ascii bytes
17 17 """
18 18 if isinstance(obj, dict):
19 19 for k,v in obj.items():
20 20 obj[k] = _unbytes(v)
21 21 elif isinstance(obj, list):
22 22 for i,v in enumerate(obj):
23 23 obj[i] = _unbytes(v)
24 24 elif isinstance(obj, bytes):
25 25 # only valid bytes are b64-encoded ascii
26 26 obj = obj.decode('ascii')
27 27 return obj
28 28
29 29 def upgrade(nb, from_version=2, from_minor=0):
30 30 """Convert a notebook to v3.
31 31
32 32 Parameters
33 33 ----------
34 34 nb : NotebookNode
35 35 The Python representation of the notebook to convert.
36 36 from_version : int
37 37 The original version of the notebook to convert.
38 38 from_minor : int
39 39 The original minor version of the notebook to convert (only relevant for v >= 3).
40 40 """
41 41 if from_version == 2:
42 42 # Mark the original nbformat so consumers know it has been converted.
43 43 nb.nbformat = nbformat
44 44 nb.nbformat_minor = nbformat_minor
45 45
46 46 nb.orig_nbformat = 2
47 47 nb = _unbytes(nb)
48 48 for ws in nb['worksheets']:
49 49 for cell in ws['cells']:
50 50 cell.setdefault('metadata', {})
51 51 return nb
52 52 elif from_version == 3:
53 53 if from_minor != nbformat_minor:
54 54 nb.orig_nbformat_minor = from_minor
55 55 nb.nbformat_minor = nbformat_minor
56 56 return nb
57 57 else:
58 58 raise ValueError('Cannot convert a notebook directly from v%s to v3. ' \
59 59 'Try using the IPython.nbformat.convert module.' % from_version)
60 60
61 61
62 62 def heading_to_md(cell):
63 63 """turn heading cell into corresponding markdown"""
64 64 cell.cell_type = "markdown"
65 65 level = cell.pop('level', 1)
66 66 cell.source = '#'*level + ' ' + cell.source
67 67
68 68
69 69 def raw_to_md(cell):
70 70 """let raw passthrough as markdown"""
71 71 cell.cell_type = "markdown"
72 72
73 73
74 74 def downgrade(nb):
75 75 """Convert a v3 notebook to v2.
76 76
77 77 Parameters
78 78 ----------
79 79 nb : NotebookNode
80 80 The Python representation of the notebook to convert.
81 81 """
82 82 if nb.nbformat != 3:
83 83 return nb
84 84 nb.nbformat = 2
85 85 for ws in nb.worksheets:
86 86 for cell in ws.cells:
87 87 if cell.cell_type == 'heading':
88 88 heading_to_md(cell)
89 89 elif cell.cell_type == 'raw':
90 90 raw_to_md(cell)
91 91 return nb No newline at end of file
@@ -1,253 +1,253 b''
1 1 """Code for converting notebooks to and from v3."""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 import json
7 7 import re
8 8
9 9 from .nbbase import (
10 10 nbformat, nbformat_minor,
11 11 NotebookNode,
12 12 )
13 13
14 from IPython.nbformat import v3
14 from jupyter_nbformat import v3
15 15 from IPython.utils.log import get_logger
16 16
17 17 def _warn_if_invalid(nb, version):
18 18 """Log validation errors, if there are any."""
19 19 from IPython.nbformat import validate, ValidationError
20 20 try:
21 21 validate(nb, version=version)
22 22 except ValidationError as e:
23 23 get_logger().error("Notebook JSON is not valid v%i: %s", version, e)
24 24
25 25 def upgrade(nb, from_version=3, from_minor=0):
26 26 """Convert a notebook to v4.
27 27
28 28 Parameters
29 29 ----------
30 30 nb : NotebookNode
31 31 The Python representation of the notebook to convert.
32 32 from_version : int
33 33 The original version of the notebook to convert.
34 34 from_minor : int
35 35 The original minor version of the notebook to convert (only relevant for v >= 3).
36 36 """
37 37 if from_version == 3:
38 38 # Validate the notebook before conversion
39 39 _warn_if_invalid(nb, from_version)
40 40
41 41 # Mark the original nbformat so consumers know it has been converted
42 42 orig_nbformat = nb.pop('orig_nbformat', None)
43 43 nb.metadata.orig_nbformat = orig_nbformat or 3
44 44
45 45 # Mark the new format
46 46 nb.nbformat = nbformat
47 47 nb.nbformat_minor = nbformat_minor
48 48
49 49 # remove worksheet(s)
50 50 nb['cells'] = cells = []
51 51 # In the unlikely event of multiple worksheets,
52 52 # they will be flattened
53 53 for ws in nb.pop('worksheets', []):
54 54 # upgrade each cell
55 55 for cell in ws['cells']:
56 56 cells.append(upgrade_cell(cell))
57 57 # upgrade metadata
58 58 nb.metadata.pop('name', '')
59 59 nb.metadata.pop('signature', '')
60 60 # Validate the converted notebook before returning it
61 61 _warn_if_invalid(nb, nbformat)
62 62 return nb
63 63 elif from_version == 4:
64 64 # nothing to do
65 65 if from_minor != nbformat_minor:
66 66 nb.metadata.orig_nbformat_minor = from_minor
67 67 nb.nbformat_minor = nbformat_minor
68 68
69 69 return nb
70 70 else:
71 71 raise ValueError('Cannot convert a notebook directly from v%s to v4. ' \
72 72 'Try using the IPython.nbformat.convert module.' % from_version)
73 73
74 74 def upgrade_cell(cell):
75 75 """upgrade a cell from v3 to v4
76 76
77 77 heading cell:
78 78 - -> markdown heading
79 79 code cell:
80 80 - remove language metadata
81 81 - cell.input -> cell.source
82 82 - cell.prompt_number -> cell.execution_count
83 83 - update outputs
84 84 """
85 85 cell.setdefault('metadata', NotebookNode())
86 86 if cell.cell_type == 'code':
87 87 cell.pop('language', '')
88 88 if 'collapsed' in cell:
89 89 cell.metadata['collapsed'] = cell.pop('collapsed')
90 90 cell.source = cell.pop('input', '')
91 91 cell.execution_count = cell.pop('prompt_number', None)
92 92 cell.outputs = upgrade_outputs(cell.outputs)
93 93 elif cell.cell_type == 'heading':
94 94 cell.cell_type = 'markdown'
95 95 level = cell.pop('level', 1)
96 96 cell.source = u'{hashes} {single_line}'.format(
97 97 hashes='#' * level,
98 98 single_line = ' '.join(cell.get('source', '').splitlines()),
99 99 )
100 100 elif cell.cell_type == 'html':
101 101 # Technically, this exists. It will never happen in practice.
102 102 cell.cell_type = 'markdown'
103 103 return cell
104 104
105 105 def downgrade_cell(cell):
106 106 """downgrade a cell from v4 to v3
107 107
108 108 code cell:
109 109 - set cell.language
110 110 - cell.input <- cell.source
111 111 - cell.prompt_number <- cell.execution_count
112 112 - update outputs
113 113 markdown cell:
114 114 - single-line heading -> heading cell
115 115 """
116 116 if cell.cell_type == 'code':
117 117 cell.language = 'python'
118 118 cell.input = cell.pop('source', '')
119 119 cell.prompt_number = cell.pop('execution_count', None)
120 120 cell.collapsed = cell.metadata.pop('collapsed', False)
121 121 cell.outputs = downgrade_outputs(cell.outputs)
122 122 elif cell.cell_type == 'markdown':
123 123 source = cell.get('source', '')
124 124 if '\n' not in source and source.startswith('#'):
125 125 prefix, text = re.match(r'(#+)\s*(.*)', source).groups()
126 126 cell.cell_type = 'heading'
127 127 cell.source = text
128 128 cell.level = len(prefix)
129 129 return cell
130 130
131 131 _mime_map = {
132 132 "text" : "text/plain",
133 133 "html" : "text/html",
134 134 "svg" : "image/svg+xml",
135 135 "png" : "image/png",
136 136 "jpeg" : "image/jpeg",
137 137 "latex" : "text/latex",
138 138 "json" : "application/json",
139 139 "javascript" : "application/javascript",
140 140 };
141 141
142 142 def to_mime_key(d):
143 143 """convert dict with v3 aliases to plain mime-type keys"""
144 144 for alias, mime in _mime_map.items():
145 145 if alias in d:
146 146 d[mime] = d.pop(alias)
147 147 return d
148 148
149 149 def from_mime_key(d):
150 150 """convert dict with mime-type keys to v3 aliases"""
151 151 for alias, mime in _mime_map.items():
152 152 if mime in d:
153 153 d[alias] = d.pop(mime)
154 154 return d
155 155
156 156 def upgrade_output(output):
157 157 """upgrade a single code cell output from v3 to v4
158 158
159 159 - pyout -> execute_result
160 160 - pyerr -> error
161 161 - output.type -> output.data.mime/type
162 162 - mime-type keys
163 163 - stream.stream -> stream.name
164 164 """
165 165 if output['output_type'] in {'pyout', 'display_data'}:
166 166 output.setdefault('metadata', NotebookNode())
167 167 if output['output_type'] == 'pyout':
168 168 output['output_type'] = 'execute_result'
169 169 output['execution_count'] = output.pop('prompt_number', None)
170 170
171 171 # move output data into data sub-dict
172 172 data = {}
173 173 for key in list(output):
174 174 if key in {'output_type', 'execution_count', 'metadata'}:
175 175 continue
176 176 data[key] = output.pop(key)
177 177 to_mime_key(data)
178 178 output['data'] = data
179 179 to_mime_key(output.metadata)
180 180 if 'application/json' in data:
181 181 data['application/json'] = json.loads(data['application/json'])
182 182 # promote ascii bytes (from v2) to unicode
183 183 for key in ('image/png', 'image/jpeg'):
184 184 if key in data and isinstance(data[key], bytes):
185 185 data[key] = data[key].decode('ascii')
186 186 elif output['output_type'] == 'pyerr':
187 187 output['output_type'] = 'error'
188 188 elif output['output_type'] == 'stream':
189 189 output['name'] = output.pop('stream', 'stdout')
190 190 return output
191 191
192 192 def downgrade_output(output):
193 193 """downgrade a single code cell output to v3 from v4
194 194
195 195 - pyout <- execute_result
196 196 - pyerr <- error
197 197 - output.data.mime/type -> output.type
198 198 - un-mime-type keys
199 199 - stream.stream <- stream.name
200 200 """
201 201 if output['output_type'] in {'execute_result', 'display_data'}:
202 202 if output['output_type'] == 'execute_result':
203 203 output['output_type'] = 'pyout'
204 204 output['prompt_number'] = output.pop('execution_count', None)
205 205
206 206 # promote data dict to top-level output namespace
207 207 data = output.pop('data', {})
208 208 if 'application/json' in data:
209 209 data['application/json'] = json.dumps(data['application/json'])
210 210 from_mime_key(data)
211 211 output.update(data)
212 212 from_mime_key(output.get('metadata', {}))
213 213 elif output['output_type'] == 'error':
214 214 output['output_type'] = 'pyerr'
215 215 elif output['output_type'] == 'stream':
216 216 output['stream'] = output.pop('name')
217 217 return output
218 218
219 219 def upgrade_outputs(outputs):
220 220 """upgrade outputs of a code cell from v3 to v4"""
221 221 return [upgrade_output(op) for op in outputs]
222 222
223 223 def downgrade_outputs(outputs):
224 224 """downgrade outputs of a code cell to v3 from v4"""
225 225 return [downgrade_output(op) for op in outputs]
226 226
227 227 def downgrade(nb):
228 228 """Convert a v4 notebook to v3.
229 229
230 230 Parameters
231 231 ----------
232 232 nb : NotebookNode
233 233 The Python representation of the notebook to convert.
234 234 """
235 235 if nb.nbformat != nbformat:
236 236 return nb
237 237
238 238 # Validate the notebook before conversion
239 239 _warn_if_invalid(nb, nbformat)
240 240
241 241 nb.nbformat = v3.nbformat
242 242 nb.nbformat_minor = v3.nbformat_minor
243 243 cells = [ downgrade_cell(cell) for cell in nb.pop('cells') ]
244 244 nb.worksheets = [v3.new_worksheet(cells=cells)]
245 245 nb.metadata.setdefault('name', '')
246 246
247 247 # Validate the converted notebook before returning it
248 248 _warn_if_invalid(nb, v3.nbformat)
249 249
250 250 nb.orig_nbformat = nb.metadata.pop('orig_nbformat', nbformat)
251 251 nb.orig_nbformat_minor = nb.metadata.pop('orig_nbformat_minor', nbformat_minor)
252 252
253 253 return nb
@@ -1,755 +1,755 b''
1 1 # encoding: utf-8
2 2 """
3 3 This module defines the things that are used in setup.py for building IPython
4 4
5 5 This includes:
6 6
7 7 * The basic arguments to setup
8 8 * Functions for finding things like packages, package data, etc.
9 9 * A function for checking dependencies.
10 10 """
11 11
12 12 # Copyright (c) IPython Development Team.
13 13 # Distributed under the terms of the Modified BSD License.
14 14
15 15 from __future__ import print_function
16 16
17 17 import errno
18 18 import os
19 19 import sys
20 20
21 21 from distutils import log
22 22 from distutils.command.build_py import build_py
23 23 from distutils.command.build_scripts import build_scripts
24 24 from distutils.command.install import install
25 25 from distutils.command.install_scripts import install_scripts
26 26 from distutils.cmd import Command
27 27 from distutils.errors import DistutilsExecError
28 28 from fnmatch import fnmatch
29 29 from glob import glob
30 30 from subprocess import Popen, PIPE
31 31
32 32 from setupext import install_data_ext
33 33
34 34 #-------------------------------------------------------------------------------
35 35 # Useful globals and utility functions
36 36 #-------------------------------------------------------------------------------
37 37
38 38 # A few handy globals
39 39 isfile = os.path.isfile
40 40 pjoin = os.path.join
41 41 repo_root = os.path.dirname(os.path.abspath(__file__))
42 42
43 43 def oscmd(s):
44 44 print(">", s)
45 45 os.system(s)
46 46
47 47 # Py3 compatibility hacks, without assuming IPython itself is installed with
48 48 # the full py3compat machinery.
49 49
50 50 try:
51 51 execfile
52 52 except NameError:
53 53 def execfile(fname, globs, locs=None):
54 54 locs = locs or globs
55 55 exec(compile(open(fname).read(), fname, "exec"), globs, locs)
56 56
57 57 # A little utility we'll need below, since glob() does NOT allow you to do
58 58 # exclusion on multiple endings!
59 59 def file_doesnt_endwith(test,endings):
60 60 """Return true if test is a file and its name does NOT end with any
61 61 of the strings listed in endings."""
62 62 if not isfile(test):
63 63 return False
64 64 for e in endings:
65 65 if test.endswith(e):
66 66 return False
67 67 return True
68 68
69 69 #---------------------------------------------------------------------------
70 70 # Basic project information
71 71 #---------------------------------------------------------------------------
72 72
73 73 # release.py contains version, authors, license, url, keywords, etc.
74 74 execfile(pjoin(repo_root, 'IPython','core','release.py'), globals())
75 75
76 76 # Create a dict with the basic information
77 77 # This dict is eventually passed to setup after additional keys are added.
78 78 setup_args = dict(
79 79 name = name,
80 80 version = version,
81 81 description = description,
82 82 long_description = long_description,
83 83 author = author,
84 84 author_email = author_email,
85 85 url = url,
86 86 download_url = download_url,
87 87 license = license,
88 88 platforms = platforms,
89 89 keywords = keywords,
90 90 classifiers = classifiers,
91 91 cmdclass = {'install_data': install_data_ext},
92 92 )
93 93
94 94
95 95 #---------------------------------------------------------------------------
96 96 # Find packages
97 97 #---------------------------------------------------------------------------
98 98
99 99 def find_packages():
100 100 """
101 101 Find all of IPython's packages.
102 102 """
103 103 excludes = ['deathrow', 'quarantine']
104 104 packages = []
105 105 for dir,subdirs,files in os.walk('IPython'):
106 106 package = dir.replace(os.path.sep, '.')
107 107 if any(package.startswith('IPython.'+exc) for exc in excludes):
108 108 # package is to be excluded (e.g. deathrow)
109 109 continue
110 110 if '__init__.py' not in files:
111 111 # not a package
112 112 continue
113 113 packages.append(package)
114 114 return packages
115 115
116 116 #---------------------------------------------------------------------------
117 117 # Find package data
118 118 #---------------------------------------------------------------------------
119 119
120 120 def find_package_data():
121 121 """
122 122 Find IPython's package_data.
123 123 """
124 124 # This is not enough for these things to appear in an sdist.
125 125 # We need to muck with the MANIFEST to get this to work
126 126
127 127 # exclude components and less from the walk;
128 128 # we will build the components separately
129 129 excludes = [
130 130 pjoin('static', 'components'),
131 131 pjoin('static', '*', 'less'),
132 132 ]
133 133
134 134 # walk notebook resources:
135 135 cwd = os.getcwd()
136 136 os.chdir(os.path.join('IPython', 'html'))
137 137 static_data = []
138 138 for parent, dirs, files in os.walk('static'):
139 139 if any(fnmatch(parent, pat) for pat in excludes):
140 140 # prevent descending into subdirs
141 141 dirs[:] = []
142 142 continue
143 143 for f in files:
144 144 static_data.append(pjoin(parent, f))
145 145
146 146 components = pjoin("static", "components")
147 147 # select the components we actually need to install
148 148 # (there are lots of resources we bundle for sdist-reasons that we don't actually use)
149 149 static_data.extend([
150 150 pjoin(components, "backbone", "backbone-min.js"),
151 151 pjoin(components, "bootstrap", "js", "bootstrap.min.js"),
152 152 pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
153 153 pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
154 154 pjoin(components, "es6-promise", "*.js"),
155 155 pjoin(components, "font-awesome", "fonts", "*.*"),
156 156 pjoin(components, "google-caja", "html-css-sanitizer-minified.js"),
157 157 pjoin(components, "jquery", "jquery.min.js"),
158 158 pjoin(components, "jquery-ui", "ui", "minified", "jquery-ui.min.js"),
159 159 pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
160 160 pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"),
161 161 pjoin(components, "marked", "lib", "marked.js"),
162 162 pjoin(components, "requirejs", "require.js"),
163 163 pjoin(components, "underscore", "underscore-min.js"),
164 164 pjoin(components, "moment", "moment.js"),
165 165 pjoin(components, "moment", "min", "moment.min.js"),
166 166 pjoin(components, "term.js", "src", "term.js"),
167 167 pjoin(components, "text-encoding", "lib", "encoding.js"),
168 168 ])
169 169
170 170 # Ship all of Codemirror's CSS and JS
171 171 for parent, dirs, files in os.walk(pjoin(components, 'codemirror')):
172 172 for f in files:
173 173 if f.endswith(('.js', '.css')):
174 174 static_data.append(pjoin(parent, f))
175 175
176 176 os.chdir(os.path.join('tests',))
177 177 js_tests = glob('*.js') + glob('*/*.js')
178 178
179 179 os.chdir(os.path.join(cwd, 'IPython', 'nbconvert'))
180 180 nbconvert_templates = [os.path.join(dirpath, '*.*')
181 181 for dirpath, _, _ in os.walk('templates')]
182 182
183 183 os.chdir(cwd)
184 184
185 185 package_data = {
186 186 'IPython.core' : ['profile/README*'],
187 187 'IPython.core.tests' : ['*.png', '*.jpg'],
188 188 'IPython.lib.tests' : ['*.wav'],
189 189 'IPython.testing.plugin' : ['*.txt'],
190 190 'IPython.html' : ['templates/*'] + static_data,
191 191 'IPython.html.tests' : js_tests,
192 192 'IPython.nbconvert' : nbconvert_templates +
193 193 [
194 194 'tests/files/*.*',
195 195 'exporters/tests/files/*.*',
196 196 'preprocessors/tests/files/*.*',
197 197 ],
198 198 'IPython.nbconvert.filters' : ['marked.js'],
199 'IPython.nbformat' : [
200 'tests/*.ipynb',
201 'v3/nbformat.v3.schema.json',
202 'v4/nbformat.v4.schema.json',
203 ],
199 # 'IPython.nbformat' : [
200 # 'tests/*.ipynb',
201 # 'v3/nbformat.v3.schema.json',
202 # 'v4/nbformat.v4.schema.json',
203 # ],
204 204 'IPython.kernel': ['resources/*.*'],
205 205 }
206 206
207 207 return package_data
208 208
209 209
210 210 def check_package_data(package_data):
211 211 """verify that package_data globs make sense"""
212 212 print("checking package data")
213 213 for pkg, data in package_data.items():
214 214 pkg_root = pjoin(*pkg.split('.'))
215 215 for d in data:
216 216 path = pjoin(pkg_root, d)
217 217 if '*' in path:
218 218 assert len(glob(path)) > 0, "No files match pattern %s" % path
219 219 else:
220 220 assert os.path.exists(path), "Missing package data: %s" % path
221 221
222 222
223 223 def check_package_data_first(command):
224 224 """decorator for checking package_data before running a given command
225 225
226 226 Probably only needs to wrap build_py
227 227 """
228 228 class DecoratedCommand(command):
229 229 def run(self):
230 230 check_package_data(self.package_data)
231 231 command.run(self)
232 232 return DecoratedCommand
233 233
234 234
235 235 #---------------------------------------------------------------------------
236 236 # Find data files
237 237 #---------------------------------------------------------------------------
238 238
239 239 def make_dir_struct(tag,base,out_base):
240 240 """Make the directory structure of all files below a starting dir.
241 241
242 242 This is just a convenience routine to help build a nested directory
243 243 hierarchy because distutils is too stupid to do this by itself.
244 244
245 245 XXX - this needs a proper docstring!
246 246 """
247 247
248 248 # we'll use these a lot below
249 249 lbase = len(base)
250 250 pathsep = os.path.sep
251 251 lpathsep = len(pathsep)
252 252
253 253 out = []
254 254 for (dirpath,dirnames,filenames) in os.walk(base):
255 255 # we need to strip out the dirpath from the base to map it to the
256 256 # output (installation) path. This requires possibly stripping the
257 257 # path separator, because otherwise pjoin will not work correctly
258 258 # (pjoin('foo/','/bar') returns '/bar').
259 259
260 260 dp_eff = dirpath[lbase:]
261 261 if dp_eff.startswith(pathsep):
262 262 dp_eff = dp_eff[lpathsep:]
263 263 # The output path must be anchored at the out_base marker
264 264 out_path = pjoin(out_base,dp_eff)
265 265 # Now we can generate the final filenames. Since os.walk only produces
266 266 # filenames, we must join back with the dirpath to get full valid file
267 267 # paths:
268 268 pfiles = [pjoin(dirpath,f) for f in filenames]
269 269 # Finally, generate the entry we need, which is a pari of (output
270 270 # path, files) for use as a data_files parameter in install_data.
271 271 out.append((out_path, pfiles))
272 272
273 273 return out
274 274
275 275
276 276 def find_data_files():
277 277 """
278 278 Find IPython's data_files.
279 279
280 280 Just man pages at this point.
281 281 """
282 282
283 283 manpagebase = pjoin('share', 'man', 'man1')
284 284
285 285 # Simple file lists can be made by hand
286 286 manpages = [f for f in glob(pjoin('docs','man','*.1.gz')) if isfile(f)]
287 287 if not manpages:
288 288 # When running from a source tree, the manpages aren't gzipped
289 289 manpages = [f for f in glob(pjoin('docs','man','*.1')) if isfile(f)]
290 290
291 291 # And assemble the entire output list
292 292 data_files = [ (manpagebase, manpages) ]
293 293
294 294 return data_files
295 295
296 296
297 297 def make_man_update_target(manpage):
298 298 """Return a target_update-compliant tuple for the given manpage.
299 299
300 300 Parameters
301 301 ----------
302 302 manpage : string
303 303 Name of the manpage, must include the section number (trailing number).
304 304
305 305 Example
306 306 -------
307 307
308 308 >>> make_man_update_target('ipython.1') #doctest: +NORMALIZE_WHITESPACE
309 309 ('docs/man/ipython.1.gz',
310 310 ['docs/man/ipython.1'],
311 311 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz')
312 312 """
313 313 man_dir = pjoin('docs', 'man')
314 314 manpage_gz = manpage + '.gz'
315 315 manpath = pjoin(man_dir, manpage)
316 316 manpath_gz = pjoin(man_dir, manpage_gz)
317 317 gz_cmd = ( "cd %(man_dir)s && gzip -9c %(manpage)s > %(manpage_gz)s" %
318 318 locals() )
319 319 return (manpath_gz, [manpath], gz_cmd)
320 320
321 321 # The two functions below are copied from IPython.utils.path, so we don't need
322 322 # to import IPython during setup, which fails on Python 3.
323 323
324 324 def target_outdated(target,deps):
325 325 """Determine whether a target is out of date.
326 326
327 327 target_outdated(target,deps) -> 1/0
328 328
329 329 deps: list of filenames which MUST exist.
330 330 target: single filename which may or may not exist.
331 331
332 332 If target doesn't exist or is older than any file listed in deps, return
333 333 true, otherwise return false.
334 334 """
335 335 try:
336 336 target_time = os.path.getmtime(target)
337 337 except os.error:
338 338 return 1
339 339 for dep in deps:
340 340 dep_time = os.path.getmtime(dep)
341 341 if dep_time > target_time:
342 342 #print "For target",target,"Dep failed:",dep # dbg
343 343 #print "times (dep,tar):",dep_time,target_time # dbg
344 344 return 1
345 345 return 0
346 346
347 347
348 348 def target_update(target,deps,cmd):
349 349 """Update a target with a given command given a list of dependencies.
350 350
351 351 target_update(target,deps,cmd) -> runs cmd if target is outdated.
352 352
353 353 This is just a wrapper around target_outdated() which calls the given
354 354 command if target is outdated."""
355 355
356 356 if target_outdated(target,deps):
357 357 os.system(cmd)
358 358
359 359 #---------------------------------------------------------------------------
360 360 # Find scripts
361 361 #---------------------------------------------------------------------------
362 362
363 363 def find_entry_points():
364 364 """Defines the command line entry points for IPython
365 365
366 366 This always uses setuptools-style entry points. When setuptools is not in
367 367 use, our own build_scripts_entrypt class below parses these and builds
368 368 command line scripts.
369 369
370 370 Each of our entry points gets both a plain name, e.g. ipython, and one
371 371 suffixed with the Python major version number, e.g. ipython3.
372 372 """
373 373 ep = [
374 374 'ipython%s = IPython:start_ipython',
375 375 'ipcontroller%s = IPython.parallel.apps.ipcontrollerapp:launch_new_instance',
376 376 'ipengine%s = IPython.parallel.apps.ipengineapp:launch_new_instance',
377 377 'ipcluster%s = IPython.parallel.apps.ipclusterapp:launch_new_instance',
378 378 'iptest%s = IPython.testing.iptestcontroller:main',
379 379 ]
380 380 suffix = str(sys.version_info[0])
381 381 return [e % '' for e in ep] + [e % suffix for e in ep]
382 382
383 383 script_src = """#!{executable}
384 384 # This script was automatically generated by setup.py
385 385 if __name__ == '__main__':
386 386 from {mod} import {func}
387 387 {func}()
388 388 """
389 389
390 390 class build_scripts_entrypt(build_scripts):
391 391 """Build the command line scripts
392 392
393 393 Parse setuptools style entry points and write simple scripts to run the
394 394 target functions.
395 395
396 396 On Windows, this also creates .cmd wrappers for the scripts so that you can
397 397 easily launch them from a command line.
398 398 """
399 399 def run(self):
400 400 self.mkpath(self.build_dir)
401 401 outfiles = []
402 402 for script in find_entry_points():
403 403 name, entrypt = script.split('=')
404 404 name = name.strip()
405 405 entrypt = entrypt.strip()
406 406 outfile = os.path.join(self.build_dir, name)
407 407 outfiles.append(outfile)
408 408 print('Writing script to', outfile)
409 409
410 410 mod, func = entrypt.split(':')
411 411 with open(outfile, 'w') as f:
412 412 f.write(script_src.format(executable=sys.executable,
413 413 mod=mod, func=func))
414 414
415 415 if sys.platform == 'win32':
416 416 # Write .cmd wrappers for Windows so 'ipython' etc. work at the
417 417 # command line
418 418 cmd_file = os.path.join(self.build_dir, name + '.cmd')
419 419 cmd = '@"{python}" "%~dp0\{script}" %*\r\n'.format(
420 420 python=sys.executable, script=name)
421 421 log.info("Writing %s wrapper script" % cmd_file)
422 422 with open(cmd_file, 'w') as f:
423 423 f.write(cmd)
424 424
425 425 return outfiles, outfiles
426 426
427 427 class install_lib_symlink(Command):
428 428 user_options = [
429 429 ('install-dir=', 'd', "directory to install to"),
430 430 ]
431 431
432 432 def initialize_options(self):
433 433 self.install_dir = None
434 434
435 435 def finalize_options(self):
436 436 self.set_undefined_options('symlink',
437 437 ('install_lib', 'install_dir'),
438 438 )
439 439
440 440 def run(self):
441 441 if sys.platform == 'win32':
442 442 raise Exception("This doesn't work on Windows.")
443 443 pkg = os.path.join(os.getcwd(), 'IPython')
444 444 dest = os.path.join(self.install_dir, 'IPython')
445 445 if os.path.islink(dest):
446 446 print('removing existing symlink at %s' % dest)
447 447 os.unlink(dest)
448 448 print('symlinking %s -> %s' % (pkg, dest))
449 449 os.symlink(pkg, dest)
450 450
451 451 class unsymlink(install):
452 452 def run(self):
453 453 dest = os.path.join(self.install_lib, 'IPython')
454 454 if os.path.islink(dest):
455 455 print('removing symlink at %s' % dest)
456 456 os.unlink(dest)
457 457 else:
458 458 print('No symlink exists at %s' % dest)
459 459
460 460 class install_symlinked(install):
461 461 def run(self):
462 462 if sys.platform == 'win32':
463 463 raise Exception("This doesn't work on Windows.")
464 464
465 465 # Run all sub-commands (at least those that need to be run)
466 466 for cmd_name in self.get_sub_commands():
467 467 self.run_command(cmd_name)
468 468
469 469 # 'sub_commands': a list of commands this command might have to run to
470 470 # get its work done. See cmd.py for more info.
471 471 sub_commands = [('install_lib_symlink', lambda self:True),
472 472 ('install_scripts_sym', lambda self:True),
473 473 ]
474 474
475 475 class install_scripts_for_symlink(install_scripts):
476 476 """Redefined to get options from 'symlink' instead of 'install'.
477 477
478 478 I love distutils almost as much as I love setuptools.
479 479 """
480 480 def finalize_options(self):
481 481 self.set_undefined_options('build', ('build_scripts', 'build_dir'))
482 482 self.set_undefined_options('symlink',
483 483 ('install_scripts', 'install_dir'),
484 484 ('force', 'force'),
485 485 ('skip_build', 'skip_build'),
486 486 )
487 487
488 488 #---------------------------------------------------------------------------
489 489 # Verify all dependencies
490 490 #---------------------------------------------------------------------------
491 491
492 492 def check_for_readline():
493 493 """Check for GNU readline"""
494 494 try:
495 495 import gnureadline as readline
496 496 except ImportError:
497 497 pass
498 498 else:
499 499 return True
500 500 try:
501 501 import readline
502 502 except ImportError:
503 503 return False
504 504 else:
505 505 if sys.platform == 'darwin' and 'libedit' in readline.__doc__:
506 506 print("Ignoring readline linked to libedit", file=sys.stderr)
507 507 return False
508 508 return True
509 509
510 510 #---------------------------------------------------------------------------
511 511 # VCS related
512 512 #---------------------------------------------------------------------------
513 513
514 514 # utils.submodule has checks for submodule status
515 515 execfile(pjoin('IPython','utils','submodule.py'), globals())
516 516
517 517 class UpdateSubmodules(Command):
518 518 """Update git submodules
519 519
520 520 IPython's external javascript dependencies live in a separate repo.
521 521 """
522 522 description = "Update git submodules"
523 523 user_options = []
524 524
525 525 def initialize_options(self):
526 526 pass
527 527
528 528 def finalize_options(self):
529 529 pass
530 530
531 531 def run(self):
532 532 failure = False
533 533 try:
534 534 self.spawn('git submodule init'.split())
535 535 self.spawn('git submodule update --recursive'.split())
536 536 except Exception as e:
537 537 failure = e
538 538 print(e)
539 539
540 540 if not check_submodule_status(repo_root) == 'clean':
541 541 print("submodules could not be checked out")
542 542 sys.exit(1)
543 543
544 544
545 545 def git_prebuild(pkg_dir, build_cmd=build_py):
546 546 """Return extended build or sdist command class for recording commit
547 547
548 548 records git commit in IPython.utils._sysinfo.commit
549 549
550 550 for use in IPython.utils.sysinfo.sys_info() calls after installation.
551 551
552 552 Also ensures that submodules exist prior to running
553 553 """
554 554
555 555 class MyBuildPy(build_cmd):
556 556 ''' Subclass to write commit data into installation tree '''
557 557 def run(self):
558 558 build_cmd.run(self)
559 559 # this one will only fire for build commands
560 560 if hasattr(self, 'build_lib'):
561 561 self._record_commit(self.build_lib)
562 562
563 563 def make_release_tree(self, base_dir, files):
564 564 # this one will fire for sdist
565 565 build_cmd.make_release_tree(self, base_dir, files)
566 566 self._record_commit(base_dir)
567 567
568 568 def _record_commit(self, base_dir):
569 569 import subprocess
570 570 proc = subprocess.Popen('git rev-parse --short HEAD',
571 571 stdout=subprocess.PIPE,
572 572 stderr=subprocess.PIPE,
573 573 shell=True)
574 574 repo_commit, _ = proc.communicate()
575 575 repo_commit = repo_commit.strip().decode("ascii")
576 576
577 577 out_pth = pjoin(base_dir, pkg_dir, 'utils', '_sysinfo.py')
578 578 if os.path.isfile(out_pth) and not repo_commit:
579 579 # nothing to write, don't clobber
580 580 return
581 581
582 582 print("writing git commit '%s' to %s" % (repo_commit, out_pth))
583 583
584 584 # remove to avoid overwriting original via hard link
585 585 try:
586 586 os.remove(out_pth)
587 587 except (IOError, OSError):
588 588 pass
589 589 with open(out_pth, 'w') as out_file:
590 590 out_file.writelines([
591 591 '# GENERATED BY setup.py\n',
592 592 'commit = u"%s"\n' % repo_commit,
593 593 ])
594 594 return require_submodules(MyBuildPy)
595 595
596 596
597 597 def require_submodules(command):
598 598 """decorator for instructing a command to check for submodules before running"""
599 599 class DecoratedCommand(command):
600 600 def run(self):
601 601 if not check_submodule_status(repo_root) == 'clean':
602 602 print("submodules missing! Run `setup.py submodule` and try again")
603 603 sys.exit(1)
604 604 command.run(self)
605 605 return DecoratedCommand
606 606
607 607 #---------------------------------------------------------------------------
608 608 # bdist related
609 609 #---------------------------------------------------------------------------
610 610
611 611 def get_bdist_wheel():
612 612 """Construct bdist_wheel command for building wheels
613 613
614 614 Constructs py2-none-any tag, instead of py2.7-none-any
615 615 """
616 616 class RequiresWheel(Command):
617 617 description = "Dummy command for missing bdist_wheel"
618 618 user_options = []
619 619
620 620 def initialize_options(self):
621 621 pass
622 622
623 623 def finalize_options(self):
624 624 pass
625 625
626 626 def run(self):
627 627 print("bdist_wheel requires the wheel package")
628 628 sys.exit(1)
629 629
630 630 if 'setuptools' not in sys.modules:
631 631 return RequiresWheel
632 632 else:
633 633 try:
634 634 from wheel.bdist_wheel import bdist_wheel, read_pkg_info, write_pkg_info
635 635 except ImportError:
636 636 return RequiresWheel
637 637
638 638 class bdist_wheel_tag(bdist_wheel):
639 639
640 640 def add_requirements(self, metadata_path):
641 641 """transform platform-dependent requirements"""
642 642 pkg_info = read_pkg_info(metadata_path)
643 643 # pkg_info is an email.Message object (?!)
644 644 # we have to remove the unconditional 'readline' and/or 'pyreadline' entries
645 645 # and transform them to conditionals
646 646 requires = pkg_info.get_all('Requires-Dist')
647 647 del pkg_info['Requires-Dist']
648 648 def _remove_startswith(lis, prefix):
649 649 """like list.remove, but with startswith instead of =="""
650 650 found = False
651 651 for idx, item in enumerate(lis):
652 652 if item.startswith(prefix):
653 653 found = True
654 654 break
655 655 if found:
656 656 lis.pop(idx)
657 657
658 658 for pkg in ("gnureadline", "pyreadline", "mock", "terminado", "appnope", "pexpect"):
659 659 _remove_startswith(requires, pkg)
660 660 requires.append("gnureadline; sys.platform == 'darwin' and platform.python_implementation == 'CPython'")
661 661 requires.append("terminado (>=0.3.3); extra == 'notebook' and sys.platform != 'win32'")
662 662 requires.append("terminado (>=0.3.3); extra == 'all' and sys.platform != 'win32'")
663 663 requires.append("pyreadline (>=2.0); extra == 'terminal' and sys.platform == 'win32' and platform.python_implementation == 'CPython'")
664 664 requires.append("pyreadline (>=2.0); extra == 'all' and sys.platform == 'win32' and platform.python_implementation == 'CPython'")
665 665 requires.append("mock; extra == 'test' and python_version < '3.3'")
666 666 requires.append("appnope; sys.platform == 'darwin'")
667 667 requires.append("pexpect; sys.platform != 'win32'")
668 668 for r in requires:
669 669 pkg_info['Requires-Dist'] = r
670 670 write_pkg_info(metadata_path, pkg_info)
671 671
672 672 return bdist_wheel_tag
673 673
674 674 #---------------------------------------------------------------------------
675 675 # Notebook related
676 676 #---------------------------------------------------------------------------
677 677
678 678 class CompileCSS(Command):
679 679 """Recompile Notebook CSS
680 680
681 681 Regenerate the compiled CSS from LESS sources.
682 682
683 683 Requires various dev dependencies, such as invoke and lessc.
684 684 """
685 685 description = "Recompile Notebook CSS"
686 686 user_options = [
687 687 ('minify', 'x', "minify CSS"),
688 688 ('force', 'f', "force recompilation of CSS"),
689 689 ]
690 690
691 691 def initialize_options(self):
692 692 self.minify = False
693 693 self.force = False
694 694
695 695 def finalize_options(self):
696 696 self.minify = bool(self.minify)
697 697 self.force = bool(self.force)
698 698
699 699 def run(self):
700 700 cmd = ['invoke', 'css']
701 701 if self.minify:
702 702 cmd.append('--minify')
703 703 if self.force:
704 704 cmd.append('--force')
705 705 try:
706 706 p = Popen(cmd, cwd=pjoin(repo_root, "IPython", "html"), stderr=PIPE)
707 707 except OSError:
708 708 raise DistutilsExecError("invoke is required to rebuild css (pip install invoke)")
709 709 out, err = p.communicate()
710 710 if p.returncode:
711 711 if sys.version_info[0] >= 3:
712 712 err = err.decode('utf8', 'replace')
713 713 raise DistutilsExecError(err.strip())
714 714
715 715
716 716 class JavascriptVersion(Command):
717 717 """write the javascript version to notebook javascript"""
718 718 description = "Write IPython version to javascript"
719 719 user_options = []
720 720
721 721 def initialize_options(self):
722 722 pass
723 723
724 724 def finalize_options(self):
725 725 pass
726 726
727 727 def run(self):
728 728 nsfile = pjoin(repo_root, "IPython", "html", "static", "base", "js", "namespace.js")
729 729 with open(nsfile) as f:
730 730 lines = f.readlines()
731 731 with open(nsfile, 'w') as f:
732 732 found = False
733 733 for line in lines:
734 734 if line.strip().startswith("IPython.version"):
735 735 line = ' IPython.version = "{0}";\n'.format(version)
736 736 found = True
737 737 f.write(line)
738 738 if not found:
739 739 raise RuntimeError("Didn't find IPython.version line in %s" % nsfile)
740 740
741 741
742 742 def css_js_prerelease(command):
743 743 """decorator for building js/minified css prior to a release"""
744 744 class DecoratedCommand(command):
745 745 def run(self):
746 746 self.distribution.run_command('jsversion')
747 747 css = self.distribution.get_command_obj('css')
748 748 css.minify = True
749 749 try:
750 750 self.distribution.run_command('css')
751 751 except Exception as e:
752 752 log.warn("rebuilding css and sourcemaps failed (not a problem)")
753 753 log.warn(str(e))
754 754 command.run(self)
755 755 return DecoratedCommand
General Comments 0
You need to be logged in to leave comments. Login now