##// END OF EJS Templates
Option to spew subprocess streams during tests...
Thomas Kluyver -
Show More
@@ -1,175 +1,174 b''
1 1 """utilities for testing IPython kernels"""
2 2
3 3 #-------------------------------------------------------------------------------
4 4 # Copyright (C) 2013 The IPython Development Team
5 5 #
6 6 # Distributed under the terms of the BSD License. The full license is in
7 7 # the file COPYING, distributed as part of this software.
8 8 #-------------------------------------------------------------------------------
9 9
10 10 #-------------------------------------------------------------------------------
11 11 # Imports
12 12 #-------------------------------------------------------------------------------
13 13
14 14 import atexit
15 15
16 16 from contextlib import contextmanager
17 17 from subprocess import PIPE, STDOUT
18 18 try:
19 19 from queue import Empty # Py 3
20 20 except ImportError:
21 21 from Queue import Empty # Py 2
22 22
23 23 import nose
24 24 import nose.tools as nt
25 25
26 26 from IPython.kernel import KernelManager
27 27
28 28 #-------------------------------------------------------------------------------
29 29 # Globals
30 30 #-------------------------------------------------------------------------------
31 31
32 32 STARTUP_TIMEOUT = 60
33 33 TIMEOUT = 15
34 34
35 35 KM = None
36 36 KC = None
37 37
38 38 #-------------------------------------------------------------------------------
39 39 # code
40 40 #-------------------------------------------------------------------------------
41 41
42 42
43 43 def start_new_kernel(argv=None):
44 44 """start a new kernel, and return its Manager and Client"""
45 45 km = KernelManager()
46 kwargs = dict(stdout=nose.ipy_stream_capturer.writefd, stderr=STDOUT)
46 kwargs = dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT)
47 47 if argv:
48 48 kwargs['extra_arguments'] = argv
49 49 km.start_kernel(**kwargs)
50 nose.ipy_stream_capturer.ensure_started()
51 50 kc = km.client()
52 51 kc.start_channels()
53 52
54 53 msg_id = kc.kernel_info()
55 54 kc.get_shell_msg(block=True, timeout=STARTUP_TIMEOUT)
56 55 flush_channels(kc)
57 56 return km, kc
58 57
59 58 def flush_channels(kc=None):
60 59 """flush any messages waiting on the queue"""
61 60 from .test_message_spec import validate_message
62 61
63 62 if kc is None:
64 63 kc = KC
65 64 for channel in (kc.shell_channel, kc.iopub_channel):
66 65 while True:
67 66 try:
68 67 msg = channel.get_msg(block=True, timeout=0.1)
69 68 except Empty:
70 69 break
71 70 else:
72 71 validate_message(msg)
73 72
74 73
75 74 def execute(code='', kc=None, **kwargs):
76 75 """wrapper for doing common steps for validating an execution request"""
77 76 from .test_message_spec import validate_message
78 77 if kc is None:
79 78 kc = KC
80 79 msg_id = kc.execute(code=code, **kwargs)
81 80 reply = kc.get_shell_msg(timeout=TIMEOUT)
82 81 validate_message(reply, 'execute_reply', msg_id)
83 82 busy = kc.get_iopub_msg(timeout=TIMEOUT)
84 83 validate_message(busy, 'status', msg_id)
85 84 nt.assert_equal(busy['content']['execution_state'], 'busy')
86 85
87 86 if not kwargs.get('silent'):
88 87 pyin = kc.get_iopub_msg(timeout=TIMEOUT)
89 88 validate_message(pyin, 'pyin', msg_id)
90 89 nt.assert_equal(pyin['content']['code'], code)
91 90
92 91 return msg_id, reply['content']
93 92
94 93 def start_global_kernel():
95 94 """start the global kernel (if it isn't running) and return its client"""
96 95 global KM, KC
97 96 if KM is None:
98 97 KM, KC = start_new_kernel()
99 98 atexit.register(stop_global_kernel)
100 99 return KC
101 100
102 101 @contextmanager
103 102 def kernel():
104 103 """Context manager for the global kernel instance
105 104
106 105 Should be used for most kernel tests
107 106
108 107 Returns
109 108 -------
110 109 kernel_client: connected KernelClient instance
111 110 """
112 111 yield start_global_kernel()
113 112
114 113 def uses_kernel(test_f):
115 114 """Decorator for tests that use the global kernel"""
116 115 def wrapped_test():
117 116 with kernel() as kc:
118 117 test_f(kc)
119 118 wrapped_test.__doc__ = test_f.__doc__
120 119 wrapped_test.__name__ = test_f.__name__
121 120 return wrapped_test
122 121
123 122 def stop_global_kernel():
124 123 """Stop the global shared kernel instance, if it exists"""
125 124 global KM, KC
126 125 KC.stop_channels()
127 126 KC = None
128 127 if KM is None:
129 128 return
130 129 KM.shutdown_kernel(now=True)
131 130 KM = None
132 131
133 132 @contextmanager
134 133 def new_kernel(argv=None):
135 134 """Context manager for a new kernel in a subprocess
136 135
137 136 Should only be used for tests where the kernel must not be re-used.
138 137
139 138 Returns
140 139 -------
141 140 kernel_client: connected KernelClient instance
142 141 """
143 142 km, kc = start_new_kernel(argv)
144 143 try:
145 144 yield kc
146 145 finally:
147 146 kc.stop_channels()
148 147 km.shutdown_kernel(now=True)
149 148
150 149
151 150 def assemble_output(iopub):
152 151 """assemble stdout/err from an execution"""
153 152 stdout = ''
154 153 stderr = ''
155 154 while True:
156 155 msg = iopub.get_msg(block=True, timeout=1)
157 156 msg_type = msg['msg_type']
158 157 content = msg['content']
159 158 if msg_type == 'status' and content['execution_state'] == 'idle':
160 159 # idle message signals end of output
161 160 break
162 161 elif msg['msg_type'] == 'stream':
163 162 if content['name'] == 'stdout':
164 163 stdout += content['data']
165 164 elif content['name'] == 'stderr':
166 165 stderr += content['data']
167 166 else:
168 167 raise KeyError("bad stream: %r" % content['name'])
169 168 else:
170 169 # other output, ignored
171 170 pass
172 171 return stdout, stderr
173 172
174 173
175 174
@@ -1,131 +1,130 b''
1 1 """toplevel setup/teardown for parallel tests."""
2 2 from __future__ import print_function
3 3
4 4 #-------------------------------------------------------------------------------
5 5 # Copyright (C) 2011 The IPython Development Team
6 6 #
7 7 # Distributed under the terms of the BSD License. The full license is in
8 8 # the file COPYING, distributed as part of this software.
9 9 #-------------------------------------------------------------------------------
10 10
11 11 #-------------------------------------------------------------------------------
12 12 # Imports
13 13 #-------------------------------------------------------------------------------
14 14
15 15 import os
16 16 import tempfile
17 17 import time
18 18 from subprocess import Popen, PIPE, STDOUT
19 19
20 20 import nose
21 21
22 22 from IPython.utils.path import get_ipython_dir
23 23 from IPython.parallel import Client
24 24 from IPython.parallel.apps.launcher import (LocalProcessLauncher,
25 25 ipengine_cmd_argv,
26 26 ipcontroller_cmd_argv,
27 27 SIGKILL,
28 28 ProcessStateError,
29 29 )
30 30
31 31 # globals
32 32 launchers = []
33 33 blackhole = open(os.devnull, 'w')
34 34
35 35 # Launcher class
36 36 class TestProcessLauncher(LocalProcessLauncher):
37 37 """subclass LocalProcessLauncher, to prevent extra sockets and threads being created on Windows"""
38 38 def start(self):
39 39 if self.state == 'before':
40 # Store stdout & stderr to show with failing tests.
41 # This is defined in IPython.testing.iptest
40 42 self.process = Popen(self.args,
41 stdout=nose.ipy_stream_capturer.writefd, stderr=STDOUT,
43 stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT,
42 44 env=os.environ,
43 45 cwd=self.work_dir
44 46 )
45 47 self.notify_start(self.process.pid)
46 48 self.poll = self.process.poll
47 # Store stdout & stderr to show with failing tests.
48 # This is defined in IPython.testing.iptest
49 nose.ipy_stream_capturer.ensure_started()
50 49 else:
51 50 s = 'The process was already started and has state: %r' % self.state
52 51 raise ProcessStateError(s)
53 52
54 53 # nose setup/teardown
55 54
56 55 def setup():
57 56 cluster_dir = os.path.join(get_ipython_dir(), 'profile_iptest')
58 57 engine_json = os.path.join(cluster_dir, 'security', 'ipcontroller-engine.json')
59 58 client_json = os.path.join(cluster_dir, 'security', 'ipcontroller-client.json')
60 59 for json in (engine_json, client_json):
61 60 if os.path.exists(json):
62 61 os.remove(json)
63 62
64 63 cp = TestProcessLauncher()
65 64 cp.cmd_and_args = ipcontroller_cmd_argv + \
66 65 ['--profile=iptest', '--log-level=20', '--ping=250', '--dictdb']
67 66 cp.start()
68 67 launchers.append(cp)
69 68 tic = time.time()
70 69 while not os.path.exists(engine_json) or not os.path.exists(client_json):
71 70 if cp.poll() is not None:
72 71 raise RuntimeError("The test controller exited with status %s" % cp.poll())
73 72 elif time.time()-tic > 15:
74 73 raise RuntimeError("Timeout waiting for the test controller to start.")
75 74 time.sleep(0.1)
76 75 add_engines(1)
77 76
78 77 def add_engines(n=1, profile='iptest', total=False):
79 78 """add a number of engines to a given profile.
80 79
81 80 If total is True, then already running engines are counted, and only
82 81 the additional engines necessary (if any) are started.
83 82 """
84 83 rc = Client(profile=profile)
85 84 base = len(rc)
86 85
87 86 if total:
88 87 n = max(n - base, 0)
89 88
90 89 eps = []
91 90 for i in range(n):
92 91 ep = TestProcessLauncher()
93 92 ep.cmd_and_args = ipengine_cmd_argv + [
94 93 '--profile=%s' % profile,
95 94 '--log-level=50',
96 95 '--InteractiveShell.colors=nocolor'
97 96 ]
98 97 ep.start()
99 98 launchers.append(ep)
100 99 eps.append(ep)
101 100 tic = time.time()
102 101 while len(rc) < base+n:
103 102 if any([ ep.poll() is not None for ep in eps ]):
104 103 raise RuntimeError("A test engine failed to start.")
105 104 elif time.time()-tic > 15:
106 105 raise RuntimeError("Timeout waiting for engines to connect.")
107 106 time.sleep(.1)
108 107 rc.spin()
109 108 rc.close()
110 109 return eps
111 110
112 111 def teardown():
113 112 time.sleep(1)
114 113 while launchers:
115 114 p = launchers.pop()
116 115 if p.poll() is None:
117 116 try:
118 117 p.stop()
119 118 except Exception as e:
120 119 print(e)
121 120 pass
122 121 if p.poll() is None:
123 122 time.sleep(.25)
124 123 if p.poll() is None:
125 124 try:
126 125 print('cleaning up test process...')
127 126 p.signal(SIGKILL)
128 127 except:
129 128 print("couldn't shutdown process: ", p)
130 129 blackhole.close()
131 130
@@ -1,519 +1,530 b''
1 1 # -*- coding: utf-8 -*-
2 2 """IPython Test Suite Runner.
3 3
4 4 This module provides a main entry point to a user script to test IPython
5 5 itself from the command line. There are two ways of running this script:
6 6
7 7 1. With the syntax `iptest all`. This runs our entire test suite by
8 8 calling this script (with different arguments) recursively. This
9 9 causes modules and package to be tested in different processes, using nose
10 10 or trial where appropriate.
11 11 2. With the regular nose syntax, like `iptest -vvs IPython`. In this form
12 12 the script simply calls nose, but with special command line flags and
13 13 plugins loaded.
14 14
15 15 """
16 16
17 17 #-----------------------------------------------------------------------------
18 18 # Copyright (C) 2009-2011 The IPython Development Team
19 19 #
20 20 # Distributed under the terms of the BSD License. The full license is in
21 21 # the file COPYING, distributed as part of this software.
22 22 #-----------------------------------------------------------------------------
23 23
24 24 #-----------------------------------------------------------------------------
25 25 # Imports
26 26 #-----------------------------------------------------------------------------
27 27 from __future__ import print_function
28 28
29 29 # Stdlib
30 30 import glob
31 31 from io import BytesIO
32 32 import os
33 33 import os.path as path
34 34 import sys
35 35 from threading import Thread, Lock, Event
36 36 import warnings
37 37
38 38 # Now, proceed to import nose itself
39 39 import nose.plugins.builtin
40 40 from nose.plugins.xunit import Xunit
41 41 from nose import SkipTest
42 42 from nose.core import TestProgram
43 43 from nose.plugins import Plugin
44 44 from nose.util import safe_str
45 45
46 46 # Our own imports
47 47 from IPython.utils.process import is_cmd_found
48 48 from IPython.utils.importstring import import_item
49 49 from IPython.testing.plugin.ipdoctest import IPythonDoctest
50 50 from IPython.external.decorators import KnownFailure, knownfailureif
51 51
52 52 pjoin = path.join
53 53
54 54
55 55 #-----------------------------------------------------------------------------
56 56 # Globals
57 57 #-----------------------------------------------------------------------------
58 58
59 59
60 60 #-----------------------------------------------------------------------------
61 61 # Warnings control
62 62 #-----------------------------------------------------------------------------
63 63
64 64 # Twisted generates annoying warnings with Python 2.6, as will do other code
65 65 # that imports 'sets' as of today
66 66 warnings.filterwarnings('ignore', 'the sets module is deprecated',
67 67 DeprecationWarning )
68 68
69 69 # This one also comes from Twisted
70 70 warnings.filterwarnings('ignore', 'the sha module is deprecated',
71 71 DeprecationWarning)
72 72
73 73 # Wx on Fedora11 spits these out
74 74 warnings.filterwarnings('ignore', 'wxPython/wxWidgets release number mismatch',
75 75 UserWarning)
76 76
77 77 # ------------------------------------------------------------------------------
78 78 # Monkeypatch Xunit to count known failures as skipped.
79 79 # ------------------------------------------------------------------------------
80 80 def monkeypatch_xunit():
81 81 try:
82 82 knownfailureif(True)(lambda: None)()
83 83 except Exception as e:
84 84 KnownFailureTest = type(e)
85 85
86 86 def addError(self, test, err, capt=None):
87 87 if issubclass(err[0], KnownFailureTest):
88 88 err = (SkipTest,) + err[1:]
89 89 return self.orig_addError(test, err, capt)
90 90
91 91 Xunit.orig_addError = Xunit.addError
92 92 Xunit.addError = addError
93 93
94 94 #-----------------------------------------------------------------------------
95 95 # Check which dependencies are installed and greater than minimum version.
96 96 #-----------------------------------------------------------------------------
97 97 def extract_version(mod):
98 98 return mod.__version__
99 99
100 100 def test_for(item, min_version=None, callback=extract_version):
101 101 """Test to see if item is importable, and optionally check against a minimum
102 102 version.
103 103
104 104 If min_version is given, the default behavior is to check against the
105 105 `__version__` attribute of the item, but specifying `callback` allows you to
106 106 extract the value you are interested in. e.g::
107 107
108 108 In [1]: import sys
109 109
110 110 In [2]: from IPython.testing.iptest import test_for
111 111
112 112 In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info)
113 113 Out[3]: True
114 114
115 115 """
116 116 try:
117 117 check = import_item(item)
118 118 except (ImportError, RuntimeError):
119 119 # GTK reports Runtime error if it can't be initialized even if it's
120 120 # importable.
121 121 return False
122 122 else:
123 123 if min_version:
124 124 if callback:
125 125 # extra processing step to get version to compare
126 126 check = callback(check)
127 127
128 128 return check >= min_version
129 129 else:
130 130 return True
131 131
132 132 # Global dict where we can store information on what we have and what we don't
133 133 # have available at test run time
134 134 have = {}
135 135
136 136 have['curses'] = test_for('_curses')
137 137 have['matplotlib'] = test_for('matplotlib')
138 138 have['numpy'] = test_for('numpy')
139 139 have['pexpect'] = test_for('IPython.external.pexpect')
140 140 have['pymongo'] = test_for('pymongo')
141 141 have['pygments'] = test_for('pygments')
142 142 have['qt'] = test_for('IPython.external.qt')
143 143 have['rpy2'] = test_for('rpy2')
144 144 have['sqlite3'] = test_for('sqlite3')
145 145 have['cython'] = test_for('Cython')
146 146 have['oct2py'] = test_for('oct2py')
147 147 have['tornado'] = test_for('tornado.version_info', (3,1,0), callback=None)
148 148 have['jinja2'] = test_for('jinja2')
149 149 have['azure'] = test_for('azure')
150 150 have['requests'] = test_for('requests')
151 151 have['sphinx'] = test_for('sphinx')
152 152 have['casperjs'] = is_cmd_found('casperjs')
153 153
154 154 min_zmq = (2,1,11)
155 155
156 156 have['zmq'] = test_for('zmq.pyzmq_version_info', min_zmq, callback=lambda x: x())
157 157
158 158 #-----------------------------------------------------------------------------
159 159 # Test suite definitions
160 160 #-----------------------------------------------------------------------------
161 161
162 162 test_group_names = ['parallel', 'kernel', 'kernel.inprocess', 'config', 'core',
163 163 'extensions', 'lib', 'terminal', 'testing', 'utils',
164 164 'nbformat', 'qt', 'html', 'nbconvert'
165 165 ]
166 166
167 167 class TestSection(object):
168 168 def __init__(self, name, includes):
169 169 self.name = name
170 170 self.includes = includes
171 171 self.excludes = []
172 172 self.dependencies = []
173 173 self.enabled = True
174 174
175 175 def exclude(self, module):
176 176 if not module.startswith('IPython'):
177 177 module = self.includes[0] + "." + module
178 178 self.excludes.append(module.replace('.', os.sep))
179 179
180 180 def requires(self, *packages):
181 181 self.dependencies.extend(packages)
182 182
183 183 @property
184 184 def will_run(self):
185 185 return self.enabled and all(have[p] for p in self.dependencies)
186 186
187 187 # Name -> (include, exclude, dependencies_met)
188 188 test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names}
189 189
190 190 # Exclusions and dependencies
191 191 # ---------------------------
192 192
193 193 # core:
194 194 sec = test_sections['core']
195 195 if not have['sqlite3']:
196 196 sec.exclude('tests.test_history')
197 197 sec.exclude('history')
198 198 if not have['matplotlib']:
199 199 sec.exclude('pylabtools'),
200 200 sec.exclude('tests.test_pylabtools')
201 201
202 202 # lib:
203 203 sec = test_sections['lib']
204 204 if not have['pexpect']:
205 205 sec.exclude('irunner')
206 206 sec.exclude('tests.test_irunner')
207 207 if not have['zmq']:
208 208 sec.exclude('kernel')
209 209 # We do this unconditionally, so that the test suite doesn't import
210 210 # gtk, changing the default encoding and masking some unicode bugs.
211 211 sec.exclude('inputhookgtk')
212 212 # We also do this unconditionally, because wx can interfere with Unix signals.
213 213 # There are currently no tests for it anyway.
214 214 sec.exclude('inputhookwx')
215 215 # Testing inputhook will need a lot of thought, to figure out
216 216 # how to have tests that don't lock up with the gui event
217 217 # loops in the picture
218 218 sec.exclude('inputhook')
219 219
220 220 # testing:
221 221 sec = test_sections['testing']
222 222 # This guy is probably attic material
223 223 sec.exclude('mkdoctests')
224 224 # These have to be skipped on win32 because they use echo, rm, cd, etc.
225 225 # See ticket https://github.com/ipython/ipython/issues/87
226 226 if sys.platform == 'win32':
227 227 sec.exclude('plugin.test_exampleip')
228 228 sec.exclude('plugin.dtexample')
229 229
230 230 # terminal:
231 231 if (not have['pexpect']) or (not have['zmq']):
232 232 test_sections['terminal'].exclude('console')
233 233
234 234 # parallel
235 235 sec = test_sections['parallel']
236 236 sec.requires('zmq')
237 237 if not have['pymongo']:
238 238 sec.exclude('controller.mongodb')
239 239 sec.exclude('tests.test_mongodb')
240 240
241 241 # kernel:
242 242 sec = test_sections['kernel']
243 243 sec.requires('zmq')
244 244 # The in-process kernel tests are done in a separate section
245 245 sec.exclude('inprocess')
246 246 # importing gtk sets the default encoding, which we want to avoid
247 247 sec.exclude('zmq.gui.gtkembed')
248 248 if not have['matplotlib']:
249 249 sec.exclude('zmq.pylab')
250 250
251 251 # kernel.inprocess:
252 252 test_sections['kernel.inprocess'].requires('zmq')
253 253
254 254 # extensions:
255 255 sec = test_sections['extensions']
256 256 if not have['cython']:
257 257 sec.exclude('cythonmagic')
258 258 sec.exclude('tests.test_cythonmagic')
259 259 if not have['oct2py']:
260 260 sec.exclude('octavemagic')
261 261 sec.exclude('tests.test_octavemagic')
262 262 if not have['rpy2'] or not have['numpy']:
263 263 sec.exclude('rmagic')
264 264 sec.exclude('tests.test_rmagic')
265 265 # autoreload does some strange stuff, so move it to its own test section
266 266 sec.exclude('autoreload')
267 267 sec.exclude('tests.test_autoreload')
268 268 test_sections['autoreload'] = TestSection('autoreload',
269 269 ['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload'])
270 270 test_group_names.append('autoreload')
271 271
272 272 # qt:
273 273 test_sections['qt'].requires('zmq', 'qt', 'pygments')
274 274
275 275 # html:
276 276 sec = test_sections['html']
277 277 sec.requires('zmq', 'tornado', 'requests')
278 278 # The notebook 'static' directory contains JS, css and other
279 279 # files for web serving. Occasionally projects may put a .py
280 280 # file in there (MathJax ships a conf.py), so we might as
281 281 # well play it safe and skip the whole thing.
282 282 sec.exclude('static')
283 283 sec.exclude('fabfile')
284 284 if not have['jinja2']:
285 285 sec.exclude('notebookapp')
286 286 if not have['azure']:
287 287 sec.exclude('services.notebooks.azurenbmanager')
288 288
289 289 # config:
290 290 # Config files aren't really importable stand-alone
291 291 test_sections['config'].exclude('profile')
292 292
293 293 # nbconvert:
294 294 sec = test_sections['nbconvert']
295 295 sec.requires('pygments', 'jinja2', 'sphinx')
296 296 # Exclude nbconvert directories containing config files used to test.
297 297 # Executing the config files with iptest would cause an exception.
298 298 sec.exclude('tests.files')
299 299 sec.exclude('exporters.tests.files')
300 300 if not have['tornado']:
301 301 sec.exclude('nbconvert.post_processors.serve')
302 302 sec.exclude('nbconvert.post_processors.tests.test_serve')
303 303
304 304 #-----------------------------------------------------------------------------
305 305 # Functions and classes
306 306 #-----------------------------------------------------------------------------
307 307
308 308 def check_exclusions_exist():
309 309 from IPython.utils.path import get_ipython_package_dir
310 310 from IPython.utils.warn import warn
311 311 parent = os.path.dirname(get_ipython_package_dir())
312 312 for sec in test_sections:
313 313 for pattern in sec.exclusions:
314 314 fullpath = pjoin(parent, pattern)
315 315 if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'):
316 316 warn("Excluding nonexistent file: %r" % pattern)
317 317
318 318
319 319 class ExclusionPlugin(Plugin):
320 320 """A nose plugin to effect our exclusions of files and directories.
321 321 """
322 322 name = 'exclusions'
323 323 score = 3000 # Should come before any other plugins
324 324
325 325 def __init__(self, exclude_patterns=None):
326 326 """
327 327 Parameters
328 328 ----------
329 329
330 330 exclude_patterns : sequence of strings, optional
331 331 Filenames containing these patterns (as raw strings, not as regular
332 332 expressions) are excluded from the tests.
333 333 """
334 334 self.exclude_patterns = exclude_patterns or []
335 335 super(ExclusionPlugin, self).__init__()
336 336
337 337 def options(self, parser, env=os.environ):
338 338 Plugin.options(self, parser, env)
339 339
340 340 def configure(self, options, config):
341 341 Plugin.configure(self, options, config)
342 342 # Override nose trying to disable plugin.
343 343 self.enabled = True
344 344
345 345 def wantFile(self, filename):
346 346 """Return whether the given filename should be scanned for tests.
347 347 """
348 348 if any(pat in filename for pat in self.exclude_patterns):
349 349 return False
350 350 return None
351 351
352 352 def wantDirectory(self, directory):
353 353 """Return whether the given directory should be scanned for tests.
354 354 """
355 355 if any(pat in directory for pat in self.exclude_patterns):
356 356 return False
357 357 return None
358 358
359 359
360 360 class StreamCapturer(Thread):
361 361 daemon = True # Don't hang if main thread crashes
362 362 started = False
363 363 def __init__(self):
364 364 super(StreamCapturer, self).__init__()
365 365 self.streams = []
366 366 self.buffer = BytesIO()
367 367 self.readfd, self.writefd = os.pipe()
368 368 self.buffer_lock = Lock()
369 369 self.stop = Event()
370 370
371 371 def run(self):
372 372 self.started = True
373 373
374 374 while not self.stop.is_set():
375 375 chunk = os.read(self.readfd, 1024)
376 376
377 377 with self.buffer_lock:
378 378 self.buffer.write(chunk)
379 379
380 380 os.close(self.readfd)
381 381 os.close(self.writefd)
382 382
383 383 def reset_buffer(self):
384 384 with self.buffer_lock:
385 385 self.buffer.truncate(0)
386 386 self.buffer.seek(0)
387 387
388 388 def get_buffer(self):
389 389 with self.buffer_lock:
390 390 return self.buffer.getvalue()
391 391
392 392 def ensure_started(self):
393 393 if not self.started:
394 394 self.start()
395 395
396 396 def halt(self):
397 397 """Safely stop the thread."""
398 398 if not self.started:
399 399 return
400 400
401 401 self.stop.set()
402 402 os.write(self.writefd, b'wake up') # Ensure we're not locked in a read()
403 403 self.join()
404 404
405 405 class SubprocessStreamCapturePlugin(Plugin):
406 406 name='subprocstreams'
407 407 def __init__(self):
408 408 Plugin.__init__(self)
409 409 self.stream_capturer = StreamCapturer()
410 self.destination = os.environ.get('IPTEST_SUBPROC_STREAMS', 'capture')
410 411 # This is ugly, but distant parts of the test machinery need to be able
411 412 # to redirect streams, so we make the object globally accessible.
412 nose.ipy_stream_capturer = self.stream_capturer
413 nose.iptest_stdstreams_fileno = self.get_write_fileno
414
415 def get_write_fileno(self):
416 if self.destination == 'capture':
417 self.stream_capturer.ensure_started()
418 return self.stream_capturer.writefd
419 elif self.destination == 'discard':
420 return os.open(os.devnull, os.O_WRONLY)
421 else:
422 return sys.__stdout__.fileno()
413 423
414 424 def configure(self, options, config):
415 425 Plugin.configure(self, options, config)
416 426 # Override nose trying to disable plugin.
417 self.enabled = True
427 if self.destination == 'capture':
428 self.enabled = True
418 429
419 430 def startTest(self, test):
420 431 # Reset log capture
421 432 self.stream_capturer.reset_buffer()
422 433
423 434 def formatFailure(self, test, err):
424 435 # Show output
425 436 ec, ev, tb = err
426 437 captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace')
427 438 if captured.strip():
428 439 ev = safe_str(ev)
429 440 out = [ev, '>> begin captured subprocess output <<',
430 441 captured,
431 442 '>> end captured subprocess output <<']
432 443 return ec, '\n'.join(out), tb
433 444
434 445 return err
435 446
436 447 formatError = formatFailure
437 448
438 449 def finalize(self, result):
439 450 self.stream_capturer.halt()
440 451
441 452
442 453 def run_iptest():
443 454 """Run the IPython test suite using nose.
444 455
445 456 This function is called when this script is **not** called with the form
446 457 `iptest all`. It simply calls nose with appropriate command line flags
447 458 and accepts all of the standard nose arguments.
448 459 """
449 460 # Apply our monkeypatch to Xunit
450 461 if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'):
451 462 monkeypatch_xunit()
452 463
453 464 warnings.filterwarnings('ignore',
454 465 'This will be removed soon. Use IPython.testing.util instead')
455 466
456 467 arg1 = sys.argv[1]
457 468 if arg1 in test_sections:
458 469 section = test_sections[arg1]
459 470 sys.argv[1:2] = section.includes
460 471 elif arg1.startswith('IPython.') and arg1[8:] in test_sections:
461 472 section = test_sections[arg1[8:]]
462 473 sys.argv[1:2] = section.includes
463 474 else:
464 475 section = TestSection(arg1, includes=[arg1])
465 476
466 477
467 478 argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks
468 479
469 480 '--with-ipdoctest',
470 481 '--ipdoctest-tests','--ipdoctest-extension=txt',
471 482
472 483 # We add --exe because of setuptools' imbecility (it
473 484 # blindly does chmod +x on ALL files). Nose does the
474 485 # right thing and it tries to avoid executables,
475 486 # setuptools unfortunately forces our hand here. This
476 487 # has been discussed on the distutils list and the
477 488 # setuptools devs refuse to fix this problem!
478 489 '--exe',
479 490 ]
480 491 if '-a' not in argv and '-A' not in argv:
481 492 argv = argv + ['-a', '!crash']
482 493
483 494 if nose.__version__ >= '0.11':
484 495 # I don't fully understand why we need this one, but depending on what
485 496 # directory the test suite is run from, if we don't give it, 0 tests
486 497 # get run. Specifically, if the test suite is run from the source dir
487 498 # with an argument (like 'iptest.py IPython.core', 0 tests are run,
488 499 # even if the same call done in this directory works fine). It appears
489 500 # that if the requested package is in the current dir, nose bails early
490 501 # by default. Since it's otherwise harmless, leave it in by default
491 502 # for nose >= 0.11, though unfortunately nose 0.10 doesn't support it.
492 503 argv.append('--traverse-namespace')
493 504
494 505 # use our plugin for doctesting. It will remove the standard doctest plugin
495 506 # if it finds it enabled
496 507 plugins = [ExclusionPlugin(section.excludes), IPythonDoctest(), KnownFailure(),
497 508 SubprocessStreamCapturePlugin() ]
498 509
499 510 # Use working directory set by parent process (see iptestcontroller)
500 511 if 'IPTEST_WORKING_DIR' in os.environ:
501 512 os.chdir(os.environ['IPTEST_WORKING_DIR'])
502 513
503 514 # We need a global ipython running in this process, but the special
504 515 # in-process group spawns its own IPython kernels, so for *that* group we
505 516 # must avoid also opening the global one (otherwise there's a conflict of
506 517 # singletons). Ultimately the solution to this problem is to refactor our
507 518 # assumptions about what needs to be a singleton and what doesn't (app
508 519 # objects should, individual shells shouldn't). But for now, this
509 520 # workaround allows the test suite for the inprocess module to complete.
510 521 if 'kernel.inprocess' not in section.name:
511 522 from IPython.testing import globalipapp
512 523 globalipapp.start_ipython()
513 524
514 525 # Now nose can run
515 526 TestProgram(argv=argv, addplugins=plugins)
516 527
517 528 if __name__ == '__main__':
518 529 run_iptest()
519 530
@@ -1,501 +1,507 b''
1 1 # -*- coding: utf-8 -*-
2 2 """IPython Test Process Controller
3 3
4 4 This module runs one or more subprocesses which will actually run the IPython
5 5 test suite.
6 6
7 7 """
8 8
9 9 #-----------------------------------------------------------------------------
10 10 # Copyright (C) 2009-2011 The IPython Development Team
11 11 #
12 12 # Distributed under the terms of the BSD License. The full license is in
13 13 # the file COPYING, distributed as part of this software.
14 14 #-----------------------------------------------------------------------------
15 15
16 16 #-----------------------------------------------------------------------------
17 17 # Imports
18 18 #-----------------------------------------------------------------------------
19 19 from __future__ import print_function
20 20
21 21 import argparse
22 22 import multiprocessing.pool
23 23 from multiprocessing import Process, Queue
24 24 import os
25 25 import shutil
26 26 import signal
27 27 import sys
28 28 import subprocess
29 29 import time
30 30
31 31 from .iptest import have, test_group_names as py_test_group_names, test_sections
32 32 from IPython.utils.path import compress_user
33 33 from IPython.utils.py3compat import bytes_to_str
34 34 from IPython.utils.sysinfo import get_sys_info
35 35 from IPython.utils.tempdir import TemporaryDirectory
36 36
37 37
38 38 class TestController(object):
39 39 """Run tests in a subprocess
40 40 """
41 41 #: str, IPython test suite to be executed.
42 42 section = None
43 43 #: list, command line arguments to be executed
44 44 cmd = None
45 45 #: dict, extra environment variables to set for the subprocess
46 46 env = None
47 47 #: list, TemporaryDirectory instances to clear up when the process finishes
48 48 dirs = None
49 49 #: subprocess.Popen instance
50 50 process = None
51 51 #: str, process stdout+stderr
52 52 stdout = None
53 53 #: bool, whether to capture process stdout & stderr
54 54 buffer_output = False
55 55
56 56 def __init__(self):
57 57 self.cmd = []
58 58 self.env = {}
59 59 self.dirs = []
60 60
61 61 def launch(self):
62 62 # print('*** ENV:', self.env) # dbg
63 63 # print('*** CMD:', self.cmd) # dbg
64 64 env = os.environ.copy()
65 65 env.update(self.env)
66 66 output = subprocess.PIPE if self.buffer_output else None
67 67 stdout = subprocess.STDOUT if self.buffer_output else None
68 68 self.process = subprocess.Popen(self.cmd, stdout=output,
69 69 stderr=stdout, env=env)
70 70
71 71 def wait(self):
72 72 self.stdout, _ = self.process.communicate()
73 73 return self.process.returncode
74 74
75 75 def cleanup_process(self):
76 76 """Cleanup on exit by killing any leftover processes."""
77 77 subp = self.process
78 78 if subp is None or (subp.poll() is not None):
79 79 return # Process doesn't exist, or is already dead.
80 80
81 81 try:
82 82 print('Cleaning up stale PID: %d' % subp.pid)
83 83 subp.kill()
84 84 except: # (OSError, WindowsError) ?
85 85 # This is just a best effort, if we fail or the process was
86 86 # really gone, ignore it.
87 87 pass
88 88 else:
89 89 for i in range(10):
90 90 if subp.poll() is None:
91 91 time.sleep(0.1)
92 92 else:
93 93 break
94 94
95 95 if subp.poll() is None:
96 96 # The process did not die...
97 97 print('... failed. Manual cleanup may be required.')
98 98
99 99 def cleanup(self):
100 100 "Kill process if it's still alive, and clean up temporary directories"
101 101 self.cleanup_process()
102 102 for td in self.dirs:
103 103 td.cleanup()
104 104
105 105 __del__ = cleanup
106 106
107 107 class PyTestController(TestController):
108 108 """Run Python tests using IPython.testing.iptest"""
109 109 #: str, Python command to execute in subprocess
110 110 pycmd = None
111 111
112 112 def __init__(self, section):
113 113 """Create new test runner."""
114 114 TestController.__init__(self)
115 115 self.section = section
116 116 # pycmd is put into cmd[2] in PyTestController.launch()
117 117 self.cmd = [sys.executable, '-c', None, section]
118 118 self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()"
119 119 ipydir = TemporaryDirectory()
120 120 self.dirs.append(ipydir)
121 121 self.env['IPYTHONDIR'] = ipydir.name
122 122 self.workingdir = workingdir = TemporaryDirectory()
123 123 self.dirs.append(workingdir)
124 124 self.env['IPTEST_WORKING_DIR'] = workingdir.name
125 125 # This means we won't get odd effects from our own matplotlib config
126 126 self.env['MPLCONFIGDIR'] = workingdir.name
127 127
128 128 @property
129 129 def will_run(self):
130 130 try:
131 131 return test_sections[self.section].will_run
132 132 except KeyError:
133 133 return True
134 134
135 135 def add_xunit(self):
136 136 xunit_file = os.path.abspath(self.section + '.xunit.xml')
137 137 self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file])
138 138
139 139 def add_coverage(self):
140 140 try:
141 141 sources = test_sections[self.section].includes
142 142 except KeyError:
143 143 sources = ['IPython']
144 144
145 145 coverage_rc = ("[run]\n"
146 146 "data_file = {data_file}\n"
147 147 "source =\n"
148 148 " {source}\n"
149 149 ).format(data_file=os.path.abspath('.coverage.'+self.section),
150 150 source="\n ".join(sources))
151 151 config_file = os.path.join(self.workingdir.name, '.coveragerc')
152 152 with open(config_file, 'w') as f:
153 153 f.write(coverage_rc)
154 154
155 155 self.env['COVERAGE_PROCESS_START'] = config_file
156 156 self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd
157 157
158 158 def launch(self):
159 159 self.cmd[2] = self.pycmd
160 160 super(PyTestController, self).launch()
161 161
162 162 class JSController(TestController):
163 163 """Run CasperJS tests """
164 164 def __init__(self, section):
165 165 """Create new test runner."""
166 166 TestController.__init__(self)
167 167 self.section = section
168 168
169 169 self.ipydir = TemporaryDirectory()
170 170 self.dirs.append(self.ipydir)
171 171 self.env['IPYTHONDIR'] = self.ipydir.name
172 172
173 173 def launch(self):
174 174 # start the ipython notebook, so we get the port number
175 175 self._init_server()
176 176
177 177 import IPython.html.tests as t
178 178 test_dir = os.path.join(os.path.dirname(t.__file__), 'casperjs')
179 179 includes = '--includes=' + os.path.join(test_dir,'util.js')
180 180 test_cases = os.path.join(test_dir, 'test_cases')
181 181 port = '--port=' + str(self.server_port)
182 182 self.cmd = ['casperjs', 'test', port, includes, test_cases]
183 183
184 184 super(JSController, self).launch()
185 185
186 186 @property
187 187 def will_run(self):
188 188 return all(have[a] for a in ['zmq', 'tornado', 'jinja2', 'casperjs'])
189 189
190 190 def _init_server(self):
191 191 "Start the notebook server in a separate process"
192 192 self.queue = q = Queue()
193 193 self.server = Process(target=run_webapp, args=(q, self.ipydir.name))
194 194 self.server.start()
195 195 self.server_port = q.get()
196 196
197 197 def cleanup(self):
198 198 self.server.terminate()
199 199 self.server.join()
200 200 TestController.cleanup(self)
201 201
202 202 js_test_group_names = {'js'}
203 203
204 204 def run_webapp(q, nbdir, loglevel=0):
205 205 """start the IPython Notebook, and pass port back to the queue"""
206 206 import os
207 207 import IPython.html.notebookapp as nbapp
208 208 import sys
209 209 sys.stderr = open(os.devnull, 'w')
210 210 os.environ["IPYTHONDIR"] = nbdir
211 211 server = nbapp.NotebookApp()
212 212 args = ['--no-browser']
213 213 args.append('--notebook-dir='+nbdir)
214 214 args.append('--profile-dir='+nbdir)
215 215 args.append('--log-level='+str(loglevel))
216 216 server.initialize(args)
217 217 # communicate the port number to the parent process
218 218 q.put(server.port)
219 219 server.start()
220 220
221 221 def prepare_controllers(options):
222 222 """Returns two lists of TestController instances, those to run, and those
223 223 not to run."""
224 224 testgroups = options.testgroups
225 225
226 226 if testgroups:
227 227 py_testgroups = [g for g in testgroups if (g in py_test_group_names) \
228 228 or g.startswith('IPython')]
229 229 js_testgroups = [g for g in testgroups if g in js_test_group_names]
230 230 else:
231 231 py_testgroups = py_test_group_names
232 232 js_testgroups = js_test_group_names
233 233 if not options.all:
234 234 test_sections['parallel'].enabled = False
235 235
236 236 c_js = [JSController(name) for name in js_testgroups]
237 237 c_py = [PyTestController(name) for name in py_testgroups]
238 238
239 239 configure_py_controllers(c_py, xunit=options.xunit,
240 coverage=options.coverage, extra_args=options.extra_args)
240 coverage=options.coverage, subproc_streams=options.subproc_streams,
241 extra_args=options.extra_args)
241 242
242 243 controllers = c_py + c_js
243 244 to_run = [c for c in controllers if c.will_run]
244 245 not_run = [c for c in controllers if not c.will_run]
245 246 return to_run, not_run
246 247
247 def configure_py_controllers(controllers, xunit=False, coverage=False, extra_args=()):
248 def configure_py_controllers(controllers, xunit=False, coverage=False,
249 subproc_streams='capture', extra_args=()):
248 250 """Apply options for a collection of TestController objects."""
249 251 for controller in controllers:
250 252 if xunit:
251 253 controller.add_xunit()
252 254 if coverage:
253 255 controller.add_coverage()
256 controller.env['IPTEST_SUBPROC_STREAMS'] = subproc_streams
254 257 controller.cmd.extend(extra_args)
255 258
256 259 def do_run(controller):
257 260 try:
258 261 try:
259 262 controller.launch()
260 263 except Exception:
261 264 import traceback
262 265 traceback.print_exc()
263 266 return controller, 1 # signal failure
264 267
265 268 exitcode = controller.wait()
266 269 return controller, exitcode
267 270
268 271 except KeyboardInterrupt:
269 272 return controller, -signal.SIGINT
270 273 finally:
271 274 controller.cleanup()
272 275
273 276 def report():
274 277 """Return a string with a summary report of test-related variables."""
275 278 inf = get_sys_info()
276 279 out = []
277 280 def _add(name, value):
278 281 out.append((name, value))
279 282
280 283 _add('IPython version', inf['ipython_version'])
281 284 _add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source']))
282 285 _add('IPython package', compress_user(inf['ipython_path']))
283 286 _add('Python version', inf['sys_version'].replace('\n',''))
284 287 _add('sys.executable', compress_user(inf['sys_executable']))
285 288 _add('Platform', inf['platform'])
286 289
287 290 width = max(len(n) for (n,v) in out)
288 291 out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out]
289 292
290 293 avail = []
291 294 not_avail = []
292 295
293 296 for k, is_avail in have.items():
294 297 if is_avail:
295 298 avail.append(k)
296 299 else:
297 300 not_avail.append(k)
298 301
299 302 if avail:
300 303 out.append('\nTools and libraries available at test time:\n')
301 304 avail.sort()
302 305 out.append(' ' + ' '.join(avail)+'\n')
303 306
304 307 if not_avail:
305 308 out.append('\nTools and libraries NOT available at test time:\n')
306 309 not_avail.sort()
307 310 out.append(' ' + ' '.join(not_avail)+'\n')
308 311
309 312 return ''.join(out)
310 313
311 314 def run_iptestall(options):
312 315 """Run the entire IPython test suite by calling nose and trial.
313 316
314 317 This function constructs :class:`IPTester` instances for all IPython
315 318 modules and package and then runs each of them. This causes the modules
316 319 and packages of IPython to be tested each in their own subprocess using
317 320 nose.
318 321
319 322 Parameters
320 323 ----------
321 324
322 325 All parameters are passed as attributes of the options object.
323 326
324 327 testgroups : list of str
325 328 Run only these sections of the test suite. If empty, run all the available
326 329 sections.
327 330
328 331 fast : int or None
329 332 Run the test suite in parallel, using n simultaneous processes. If None
330 333 is passed, one process is used per CPU core. Default 1 (i.e. sequential)
331 334
332 335 inc_slow : bool
333 336 Include slow tests, like IPython.parallel. By default, these tests aren't
334 337 run.
335 338
336 339 xunit : bool
337 340 Produce Xunit XML output. This is written to multiple foo.xunit.xml files.
338 341
339 342 coverage : bool or str
340 343 Measure code coverage from tests. True will store the raw coverage data,
341 344 or pass 'html' or 'xml' to get reports.
342 345
343 346 extra_args : list
344 347 Extra arguments to pass to the test subprocesses, e.g. '-v'
345 348 """
346 349 if options.fast != 1:
347 350 # If running in parallel, capture output so it doesn't get interleaved
348 351 TestController.buffer_output = True
349 352
350 353 to_run, not_run = prepare_controllers(options)
351 354
352 355 def justify(ltext, rtext, width=70, fill='-'):
353 356 ltext += ' '
354 357 rtext = (' ' + rtext).rjust(width - len(ltext), fill)
355 358 return ltext + rtext
356 359
357 360 # Run all test runners, tracking execution time
358 361 failed = []
359 362 t_start = time.time()
360 363
361 364 print()
362 365 if options.fast == 1:
363 366 # This actually means sequential, i.e. with 1 job
364 367 for controller in to_run:
365 368 print('IPython test group:', controller.section)
366 369 sys.stdout.flush() # Show in correct order when output is piped
367 370 controller, res = do_run(controller)
368 371 if res:
369 372 failed.append(controller)
370 373 if res == -signal.SIGINT:
371 374 print("Interrupted")
372 375 break
373 376 print()
374 377
375 378 else:
376 379 # Run tests concurrently
377 380 try:
378 381 pool = multiprocessing.pool.ThreadPool(options.fast)
379 382 for (controller, res) in pool.imap_unordered(do_run, to_run):
380 383 res_string = 'OK' if res == 0 else 'FAILED'
381 384 print(justify('IPython test group: ' + controller.section, res_string))
382 385 if res:
383 386 print(bytes_to_str(controller.stdout))
384 387 failed.append(controller)
385 388 if res == -signal.SIGINT:
386 389 print("Interrupted")
387 390 break
388 391 except KeyboardInterrupt:
389 392 return
390 393
391 394 for controller in not_run:
392 395 print(justify('IPython test group: ' + controller.section, 'NOT RUN'))
393 396
394 397 t_end = time.time()
395 398 t_tests = t_end - t_start
396 399 nrunners = len(to_run)
397 400 nfail = len(failed)
398 401 # summarize results
399 402 print('_'*70)
400 403 print('Test suite completed for system with the following information:')
401 404 print(report())
402 405 took = "Took %.3fs." % t_tests
403 406 print('Status: ', end='')
404 407 if not failed:
405 408 print('OK (%d test groups).' % nrunners, took)
406 409 else:
407 410 # If anything went wrong, point out what command to rerun manually to
408 411 # see the actual errors and individual summary
409 412 failed_sections = [c.section for c in failed]
410 413 print('ERROR - {} out of {} test groups failed ({}).'.format(nfail,
411 414 nrunners, ', '.join(failed_sections)), took)
412 415 print()
413 416 print('You may wish to rerun these, with:')
414 417 print(' iptest', *failed_sections)
415 418 print()
416 419
417 420 if options.coverage:
418 421 from coverage import coverage
419 422 cov = coverage(data_file='.coverage')
420 423 cov.combine()
421 424 cov.save()
422 425
423 426 # Coverage HTML report
424 427 if options.coverage == 'html':
425 428 html_dir = 'ipy_htmlcov'
426 429 shutil.rmtree(html_dir, ignore_errors=True)
427 430 print("Writing HTML coverage report to %s/ ... " % html_dir, end="")
428 431 sys.stdout.flush()
429 432
430 433 # Custom HTML reporter to clean up module names.
431 434 from coverage.html import HtmlReporter
432 435 class CustomHtmlReporter(HtmlReporter):
433 436 def find_code_units(self, morfs):
434 437 super(CustomHtmlReporter, self).find_code_units(morfs)
435 438 for cu in self.code_units:
436 439 nameparts = cu.name.split(os.sep)
437 440 if 'IPython' not in nameparts:
438 441 continue
439 442 ix = nameparts.index('IPython')
440 443 cu.name = '.'.join(nameparts[ix:])
441 444
442 445 # Reimplement the html_report method with our custom reporter
443 446 cov._harvest_data()
444 447 cov.config.from_args(omit='*%stests' % os.sep, html_dir=html_dir,
445 448 html_title='IPython test coverage',
446 449 )
447 450 reporter = CustomHtmlReporter(cov, cov.config)
448 451 reporter.report(None)
449 452 print('done.')
450 453
451 454 # Coverage XML report
452 455 elif options.coverage == 'xml':
453 456 cov.xml_report(outfile='ipy_coverage.xml')
454 457
455 458 if failed:
456 459 # Ensure that our exit code indicates failure
457 460 sys.exit(1)
458 461
459 462 argparser = argparse.ArgumentParser(description='Run IPython test suite')
460 463 argparser.add_argument('testgroups', nargs='*',
461 464 help='Run specified groups of tests. If omitted, run '
462 465 'all tests.')
463 466 argparser.add_argument('--all', action='store_true',
464 467 help='Include slow tests not run by default.')
465 468 argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int,
466 469 help='Run test sections in parallel.')
467 470 argparser.add_argument('--xunit', action='store_true',
468 471 help='Produce Xunit XML results')
469 472 argparser.add_argument('--coverage', nargs='?', const=True, default=False,
470 473 help="Measure test coverage. Specify 'html' or "
471 474 "'xml' to get reports.")
475 argparser.add_argument('--subproc-streams', default='capture',
476 help="What to do with stdout/stderr from subprocesses. "
477 "'capture' (default), 'show' and 'discard' are the options.")
472 478
473 479 def default_options():
474 480 """Get an argparse Namespace object with the default arguments, to pass to
475 481 :func:`run_iptestall`.
476 482 """
477 483 options = argparser.parse_args([])
478 484 options.extra_args = []
479 485 return options
480 486
481 487 def main():
482 488 # Arguments after -- should be passed through to nose. Argparse treats
483 489 # everything after -- as regular positional arguments, so we separate them
484 490 # first.
485 491 try:
486 492 ix = sys.argv.index('--')
487 493 except ValueError:
488 494 to_parse = sys.argv[1:]
489 495 extra_args = []
490 496 else:
491 497 to_parse = sys.argv[1:ix]
492 498 extra_args = sys.argv[ix+1:]
493 499
494 500 options = argparser.parse_args(to_parse)
495 501 options.extra_args = extra_args
496 502
497 503 run_iptestall(options)
498 504
499 505
500 506 if __name__ == '__main__':
501 507 main()
General Comments 0
You need to be logged in to leave comments. Login now