##// END OF EJS Templates
Merge pull request #4393 from takluyver/tests-subproc-stream-capture...
Thomas Kluyver -
r13241:1a4b088e merge
parent child Browse files
Show More
@@ -1,170 +1,173 b''
1 1 """utilities for testing IPython kernels"""
2 2
3 3 #-------------------------------------------------------------------------------
4 4 # Copyright (C) 2013 The IPython Development Team
5 5 #
6 6 # Distributed under the terms of the BSD License. The full license is in
7 7 # the file COPYING, distributed as part of this software.
8 8 #-------------------------------------------------------------------------------
9 9
10 10 #-------------------------------------------------------------------------------
11 11 # Imports
12 12 #-------------------------------------------------------------------------------
13 13
14 14 import atexit
15 15
16 16 from contextlib import contextmanager
17 from subprocess import PIPE
17 from subprocess import PIPE, STDOUT
18 18 from Queue import Empty
19 19
20 import nose
20 21 import nose.tools as nt
21 22
22 23 from IPython.kernel import KernelManager
23 24
24 25 #-------------------------------------------------------------------------------
25 26 # Globals
26 27 #-------------------------------------------------------------------------------
27 28
28 29 STARTUP_TIMEOUT = 60
29 30 TIMEOUT = 15
30 31
31 32 KM = None
32 33 KC = None
33 34
34 35 #-------------------------------------------------------------------------------
35 36 # code
36 37 #-------------------------------------------------------------------------------
37 38
38 39
39 40 def start_new_kernel(argv=None):
40 41 """start a new kernel, and return its Manager and Client"""
41 42 km = KernelManager()
42 kwargs = dict(stdout=PIPE, stderr=PIPE)
43 kwargs = dict(stdout=PIPE, stderr=STDOUT)
43 44 if argv:
44 45 kwargs['extra_arguments'] = argv
45 46 km.start_kernel(**kwargs)
47 nose.ipy_stream_capturer.add_stream(km.kernel.stdout.fileno())
48 nose.ipy_stream_capturer.ensure_started()
46 49 kc = km.client()
47 50 kc.start_channels()
48 51
49 52 msg_id = kc.kernel_info()
50 53 kc.get_shell_msg(block=True, timeout=STARTUP_TIMEOUT)
51 54 flush_channels(kc)
52 55 return km, kc
53 56
54 57 def flush_channels(kc=None):
55 58 """flush any messages waiting on the queue"""
56 59 from .test_message_spec import validate_message
57 60
58 61 if kc is None:
59 62 kc = KC
60 63 for channel in (kc.shell_channel, kc.iopub_channel):
61 64 while True:
62 65 try:
63 66 msg = channel.get_msg(block=True, timeout=0.1)
64 67 except Empty:
65 68 break
66 69 else:
67 70 validate_message(msg)
68 71
69 72
70 73 def execute(code='', kc=None, **kwargs):
71 74 """wrapper for doing common steps for validating an execution request"""
72 75 from .test_message_spec import validate_message
73 76 if kc is None:
74 77 kc = KC
75 78 msg_id = kc.execute(code=code, **kwargs)
76 79 reply = kc.get_shell_msg(timeout=TIMEOUT)
77 80 validate_message(reply, 'execute_reply', msg_id)
78 81 busy = kc.get_iopub_msg(timeout=TIMEOUT)
79 82 validate_message(busy, 'status', msg_id)
80 83 nt.assert_equal(busy['content']['execution_state'], 'busy')
81 84
82 85 if not kwargs.get('silent'):
83 86 pyin = kc.get_iopub_msg(timeout=TIMEOUT)
84 87 validate_message(pyin, 'pyin', msg_id)
85 88 nt.assert_equal(pyin['content']['code'], code)
86 89
87 90 return msg_id, reply['content']
88 91
89 92 def start_global_kernel():
90 93 """start the global kernel (if it isn't running) and return its client"""
91 94 global KM, KC
92 95 if KM is None:
93 96 KM, KC = start_new_kernel()
94 97 atexit.register(stop_global_kernel)
95 98 return KC
96 99
97 100 @contextmanager
98 101 def kernel():
99 102 """Context manager for the global kernel instance
100 103
101 104 Should be used for most kernel tests
102 105
103 106 Returns
104 107 -------
105 108 kernel_client: connected KernelClient instance
106 109 """
107 110 yield start_global_kernel()
108 111
109 112 def uses_kernel(test_f):
110 113 """Decorator for tests that use the global kernel"""
111 114 def wrapped_test():
112 115 with kernel() as kc:
113 116 test_f(kc)
114 117 wrapped_test.__doc__ = test_f.__doc__
115 118 wrapped_test.__name__ = test_f.__name__
116 119 return wrapped_test
117 120
118 121 def stop_global_kernel():
119 122 """Stop the global shared kernel instance, if it exists"""
120 123 global KM, KC
121 124 KC.stop_channels()
122 125 KC = None
123 126 if KM is None:
124 127 return
125 128 KM.shutdown_kernel(now=True)
126 129 KM = None
127 130
128 131 @contextmanager
129 132 def new_kernel(argv=None):
130 133 """Context manager for a new kernel in a subprocess
131 134
132 135 Should only be used for tests where the kernel must not be re-used.
133 136
134 137 Returns
135 138 -------
136 139 kernel_client: connected KernelClient instance
137 140 """
138 141 km, kc = start_new_kernel(argv)
139 142 try:
140 143 yield kc
141 144 finally:
142 145 kc.stop_channels()
143 146 km.shutdown_kernel(now=True)
144 147
145 148
146 149 def assemble_output(iopub):
147 150 """assemble stdout/err from an execution"""
148 151 stdout = ''
149 152 stderr = ''
150 153 while True:
151 154 msg = iopub.get_msg(block=True, timeout=1)
152 155 msg_type = msg['msg_type']
153 156 content = msg['content']
154 157 if msg_type == 'status' and content['execution_state'] == 'idle':
155 158 # idle message signals end of output
156 159 break
157 160 elif msg['msg_type'] == 'stream':
158 161 if content['name'] == 'stdout':
159 162 stdout += content['data']
160 163 elif content['name'] == 'stderr':
161 164 stderr += content['data']
162 165 else:
163 166 raise KeyError("bad stream: %r" % content['name'])
164 167 else:
165 168 # other output, ignored
166 169 pass
167 170 return stdout, stderr
168 171
169 172
170 173
@@ -1,125 +1,132 b''
1 1 """toplevel setup/teardown for parallel tests."""
2 2
3 3 #-------------------------------------------------------------------------------
4 4 # Copyright (C) 2011 The IPython Development Team
5 5 #
6 6 # Distributed under the terms of the BSD License. The full license is in
7 7 # the file COPYING, distributed as part of this software.
8 8 #-------------------------------------------------------------------------------
9 9
10 10 #-------------------------------------------------------------------------------
11 11 # Imports
12 12 #-------------------------------------------------------------------------------
13 13
14 14 import os
15 15 import tempfile
16 16 import time
17 from subprocess import Popen
17 from subprocess import Popen, PIPE, STDOUT
18
19 import nose
18 20
19 21 from IPython.utils.path import get_ipython_dir
20 22 from IPython.parallel import Client
21 23 from IPython.parallel.apps.launcher import (LocalProcessLauncher,
22 24 ipengine_cmd_argv,
23 25 ipcontroller_cmd_argv,
24 26 SIGKILL,
25 27 ProcessStateError,
26 28 )
27 29
28 30 # globals
29 31 launchers = []
30 32 blackhole = open(os.devnull, 'w')
31 33
32 34 # Launcher class
33 35 class TestProcessLauncher(LocalProcessLauncher):
34 36 """subclass LocalProcessLauncher, to prevent extra sockets and threads being created on Windows"""
35 37 def start(self):
36 38 if self.state == 'before':
37 39 self.process = Popen(self.args,
38 stdout=blackhole, stderr=blackhole,
40 stdout=PIPE, stderr=STDOUT,
39 41 env=os.environ,
40 42 cwd=self.work_dir
41 43 )
42 44 self.notify_start(self.process.pid)
43 45 self.poll = self.process.poll
46 # Store stdout & stderr to show with failing tests.
47 # This is defined in IPython.testing.iptest
48 nose.ipy_stream_capturer.add_stream(self.process.stdout.fileno())
49 nose.ipy_stream_capturer.ensure_started()
44 50 else:
45 51 s = 'The process was already started and has state: %r' % self.state
46 52 raise ProcessStateError(s)
47 53
48 54 # nose setup/teardown
49 55
50 56 def setup():
51 57 cluster_dir = os.path.join(get_ipython_dir(), 'profile_iptest')
52 58 engine_json = os.path.join(cluster_dir, 'security', 'ipcontroller-engine.json')
53 59 client_json = os.path.join(cluster_dir, 'security', 'ipcontroller-client.json')
54 60 for json in (engine_json, client_json):
55 61 if os.path.exists(json):
56 62 os.remove(json)
57 63
58 64 cp = TestProcessLauncher()
59 65 cp.cmd_and_args = ipcontroller_cmd_argv + \
60 ['--profile=iptest', '--log-level=50', '--ping=250', '--dictdb']
66 ['--profile=iptest', '--log-level=20', '--ping=250', '--dictdb']
61 67 cp.start()
62 68 launchers.append(cp)
63 69 tic = time.time()
64 70 while not os.path.exists(engine_json) or not os.path.exists(client_json):
65 71 if cp.poll() is not None:
66 72 raise RuntimeError("The test controller exited with status %s" % cp.poll())
67 73 elif time.time()-tic > 15:
68 74 raise RuntimeError("Timeout waiting for the test controller to start.")
69 75 time.sleep(0.1)
70 76 add_engines(1)
71 77
72 78 def add_engines(n=1, profile='iptest', total=False):
73 79 """add a number of engines to a given profile.
74 80
75 81 If total is True, then already running engines are counted, and only
76 82 the additional engines necessary (if any) are started.
77 83 """
78 84 rc = Client(profile=profile)
79 85 base = len(rc)
80 86
81 87 if total:
82 88 n = max(n - base, 0)
83 89
84 90 eps = []
85 91 for i in range(n):
86 92 ep = TestProcessLauncher()
87 93 ep.cmd_and_args = ipengine_cmd_argv + [
88 94 '--profile=%s' % profile,
89 95 '--log-level=50',
90 96 '--InteractiveShell.colors=nocolor'
91 97 ]
92 98 ep.start()
93 99 launchers.append(ep)
94 100 eps.append(ep)
95 101 tic = time.time()
96 102 while len(rc) < base+n:
97 103 if any([ ep.poll() is not None for ep in eps ]):
98 104 raise RuntimeError("A test engine failed to start.")
99 105 elif time.time()-tic > 15:
100 106 raise RuntimeError("Timeout waiting for engines to connect.")
101 107 time.sleep(.1)
102 108 rc.spin()
103 109 rc.close()
104 110 return eps
105 111
106 112 def teardown():
107 113 time.sleep(1)
108 114 while launchers:
109 115 p = launchers.pop()
116 nose.ipy_stream_capturer.remove_stream(p.process.stdout.fileno())
110 117 if p.poll() is None:
111 118 try:
112 119 p.stop()
113 120 except Exception as e:
114 121 print e
115 122 pass
116 123 if p.poll() is None:
117 124 time.sleep(.25)
118 125 if p.poll() is None:
119 126 try:
120 127 print 'cleaning up test process...'
121 128 p.signal(SIGKILL)
122 129 except:
123 130 print "couldn't shutdown process: ", p
124 131 blackhole.close()
125 132
@@ -1,433 +1,525 b''
1 1 # -*- coding: utf-8 -*-
2 2 """IPython Test Suite Runner.
3 3
4 4 This module provides a main entry point to a user script to test IPython
5 5 itself from the command line. There are two ways of running this script:
6 6
7 7 1. With the syntax `iptest all`. This runs our entire test suite by
8 8 calling this script (with different arguments) recursively. This
9 9 causes modules and package to be tested in different processes, using nose
10 10 or trial where appropriate.
11 11 2. With the regular nose syntax, like `iptest -vvs IPython`. In this form
12 12 the script simply calls nose, but with special command line flags and
13 13 plugins loaded.
14 14
15 15 """
16 16
17 17 #-----------------------------------------------------------------------------
18 18 # Copyright (C) 2009-2011 The IPython Development Team
19 19 #
20 20 # Distributed under the terms of the BSD License. The full license is in
21 21 # the file COPYING, distributed as part of this software.
22 22 #-----------------------------------------------------------------------------
23 23
24 24 #-----------------------------------------------------------------------------
25 25 # Imports
26 26 #-----------------------------------------------------------------------------
27 27 from __future__ import print_function
28 28
29 29 # Stdlib
30 30 import glob
31 from io import BytesIO
31 32 import os
32 33 import os.path as path
33 import re
34 from select import select
34 35 import sys
36 from threading import Thread, Lock, Event
35 37 import warnings
36 38
37 39 # Now, proceed to import nose itself
38 40 import nose.plugins.builtin
39 41 from nose.plugins.xunit import Xunit
40 42 from nose import SkipTest
41 43 from nose.core import TestProgram
42 44 from nose.plugins import Plugin
45 from nose.util import safe_str
43 46
44 47 # Our own imports
45 48 from IPython.utils.importstring import import_item
46 49 from IPython.testing.plugin.ipdoctest import IPythonDoctest
47 50 from IPython.external.decorators import KnownFailure, knownfailureif
48 51
49 52 pjoin = path.join
50 53
51 54
52 55 #-----------------------------------------------------------------------------
53 56 # Globals
54 57 #-----------------------------------------------------------------------------
55 58
56 59
57 60 #-----------------------------------------------------------------------------
58 61 # Warnings control
59 62 #-----------------------------------------------------------------------------
60 63
61 64 # Twisted generates annoying warnings with Python 2.6, as will do other code
62 65 # that imports 'sets' as of today
63 66 warnings.filterwarnings('ignore', 'the sets module is deprecated',
64 67 DeprecationWarning )
65 68
66 69 # This one also comes from Twisted
67 70 warnings.filterwarnings('ignore', 'the sha module is deprecated',
68 71 DeprecationWarning)
69 72
70 73 # Wx on Fedora11 spits these out
71 74 warnings.filterwarnings('ignore', 'wxPython/wxWidgets release number mismatch',
72 75 UserWarning)
73 76
74 77 # ------------------------------------------------------------------------------
75 78 # Monkeypatch Xunit to count known failures as skipped.
76 79 # ------------------------------------------------------------------------------
77 80 def monkeypatch_xunit():
78 81 try:
79 82 knownfailureif(True)(lambda: None)()
80 83 except Exception as e:
81 84 KnownFailureTest = type(e)
82 85
83 86 def addError(self, test, err, capt=None):
84 87 if issubclass(err[0], KnownFailureTest):
85 88 err = (SkipTest,) + err[1:]
86 89 return self.orig_addError(test, err, capt)
87 90
88 91 Xunit.orig_addError = Xunit.addError
89 92 Xunit.addError = addError
90 93
91 94 #-----------------------------------------------------------------------------
92 95 # Check which dependencies are installed and greater than minimum version.
93 96 #-----------------------------------------------------------------------------
94 97 def extract_version(mod):
95 98 return mod.__version__
96 99
97 100 def test_for(item, min_version=None, callback=extract_version):
98 101 """Test to see if item is importable, and optionally check against a minimum
99 102 version.
100 103
101 104 If min_version is given, the default behavior is to check against the
102 105 `__version__` attribute of the item, but specifying `callback` allows you to
103 106 extract the value you are interested in. e.g::
104 107
105 108 In [1]: import sys
106 109
107 110 In [2]: from IPython.testing.iptest import test_for
108 111
109 112 In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info)
110 113 Out[3]: True
111 114
112 115 """
113 116 try:
114 117 check = import_item(item)
115 118 except (ImportError, RuntimeError):
116 119 # GTK reports Runtime error if it can't be initialized even if it's
117 120 # importable.
118 121 return False
119 122 else:
120 123 if min_version:
121 124 if callback:
122 125 # extra processing step to get version to compare
123 126 check = callback(check)
124 127
125 128 return check >= min_version
126 129 else:
127 130 return True
128 131
129 132 # Global dict where we can store information on what we have and what we don't
130 133 # have available at test run time
131 134 have = {}
132 135
133 136 have['curses'] = test_for('_curses')
134 137 have['matplotlib'] = test_for('matplotlib')
135 138 have['numpy'] = test_for('numpy')
136 139 have['pexpect'] = test_for('IPython.external.pexpect')
137 140 have['pymongo'] = test_for('pymongo')
138 141 have['pygments'] = test_for('pygments')
139 142 have['qt'] = test_for('IPython.external.qt')
140 143 have['rpy2'] = test_for('rpy2')
141 144 have['sqlite3'] = test_for('sqlite3')
142 145 have['cython'] = test_for('Cython')
143 146 have['oct2py'] = test_for('oct2py')
144 147 have['tornado'] = test_for('tornado.version_info', (3,1,0), callback=None)
145 148 have['jinja2'] = test_for('jinja2')
146 149 have['wx'] = test_for('wx')
147 150 have['wx.aui'] = test_for('wx.aui')
148 151 have['azure'] = test_for('azure')
149 152 have['requests'] = test_for('requests')
150 153 have['sphinx'] = test_for('sphinx')
151 154
152 155 min_zmq = (2,1,11)
153 156
154 157 have['zmq'] = test_for('zmq.pyzmq_version_info', min_zmq, callback=lambda x: x())
155 158
156 159 #-----------------------------------------------------------------------------
157 160 # Test suite definitions
158 161 #-----------------------------------------------------------------------------
159 162
160 163 test_group_names = ['parallel', 'kernel', 'kernel.inprocess', 'config', 'core',
161 164 'extensions', 'lib', 'terminal', 'testing', 'utils',
162 165 'nbformat', 'qt', 'html', 'nbconvert'
163 166 ]
164 167
165 168 class TestSection(object):
166 169 def __init__(self, name, includes):
167 170 self.name = name
168 171 self.includes = includes
169 172 self.excludes = []
170 173 self.dependencies = []
171 174 self.enabled = True
172 175
173 176 def exclude(self, module):
174 177 if not module.startswith('IPython'):
175 178 module = self.includes[0] + "." + module
176 179 self.excludes.append(module.replace('.', os.sep))
177 180
178 181 def requires(self, *packages):
179 182 self.dependencies.extend(packages)
180 183
181 184 @property
182 185 def will_run(self):
183 186 return self.enabled and all(have[p] for p in self.dependencies)
184 187
185 188 # Name -> (include, exclude, dependencies_met)
186 189 test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names}
187 190
188 191 # Exclusions and dependencies
189 192 # ---------------------------
190 193
191 194 # core:
192 195 sec = test_sections['core']
193 196 if not have['sqlite3']:
194 197 sec.exclude('tests.test_history')
195 198 sec.exclude('history')
196 199 if not have['matplotlib']:
197 200 sec.exclude('pylabtools'),
198 201 sec.exclude('tests.test_pylabtools')
199 202
200 203 # lib:
201 204 sec = test_sections['lib']
202 205 if not have['wx']:
203 206 sec.exclude('inputhookwx')
204 207 if not have['pexpect']:
205 208 sec.exclude('irunner')
206 209 sec.exclude('tests.test_irunner')
207 210 if not have['zmq']:
208 211 sec.exclude('kernel')
209 212 # We do this unconditionally, so that the test suite doesn't import
210 213 # gtk, changing the default encoding and masking some unicode bugs.
211 214 sec.exclude('inputhookgtk')
212 215 # Testing inputhook will need a lot of thought, to figure out
213 216 # how to have tests that don't lock up with the gui event
214 217 # loops in the picture
215 218 sec.exclude('inputhook')
216 219
217 220 # testing:
218 221 sec = test_sections['testing']
219 222 # This guy is probably attic material
220 223 sec.exclude('mkdoctests')
221 224 # These have to be skipped on win32 because they use echo, rm, cd, etc.
222 225 # See ticket https://github.com/ipython/ipython/issues/87
223 226 if sys.platform == 'win32':
224 227 sec.exclude('plugin.test_exampleip')
225 228 sec.exclude('plugin.dtexample')
226 229
227 230 # terminal:
228 231 if (not have['pexpect']) or (not have['zmq']):
229 232 test_sections['terminal'].exclude('console')
230 233
231 234 # parallel
232 235 sec = test_sections['parallel']
233 236 sec.requires('zmq')
234 237 if not have['pymongo']:
235 238 sec.exclude('controller.mongodb')
236 239 sec.exclude('tests.test_mongodb')
237 240
238 241 # kernel:
239 242 sec = test_sections['kernel']
240 243 sec.requires('zmq')
241 244 # The in-process kernel tests are done in a separate section
242 245 sec.exclude('inprocess')
243 246 # importing gtk sets the default encoding, which we want to avoid
244 247 sec.exclude('zmq.gui.gtkembed')
245 248 if not have['matplotlib']:
246 249 sec.exclude('zmq.pylab')
247 250
248 251 # kernel.inprocess:
249 252 test_sections['kernel.inprocess'].requires('zmq')
250 253
251 254 # extensions:
252 255 sec = test_sections['extensions']
253 256 if not have['cython']:
254 257 sec.exclude('cythonmagic')
255 258 sec.exclude('tests.test_cythonmagic')
256 259 if not have['oct2py']:
257 260 sec.exclude('octavemagic')
258 261 sec.exclude('tests.test_octavemagic')
259 262 if not have['rpy2'] or not have['numpy']:
260 263 sec.exclude('rmagic')
261 264 sec.exclude('tests.test_rmagic')
262 265 # autoreload does some strange stuff, so move it to its own test section
263 266 sec.exclude('autoreload')
264 267 sec.exclude('tests.test_autoreload')
265 268 test_sections['autoreload'] = TestSection('autoreload',
266 269 ['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload'])
267 270 test_group_names.append('autoreload')
268 271
269 272 # qt:
270 273 test_sections['qt'].requires('zmq', 'qt', 'pygments')
271 274
272 275 # html:
273 276 sec = test_sections['html']
274 277 sec.requires('zmq', 'tornado', 'requests')
275 278 # The notebook 'static' directory contains JS, css and other
276 279 # files for web serving. Occasionally projects may put a .py
277 280 # file in there (MathJax ships a conf.py), so we might as
278 281 # well play it safe and skip the whole thing.
279 282 sec.exclude('static')
280 283 sec.exclude('fabfile')
281 284 if not have['jinja2']:
282 285 sec.exclude('notebookapp')
283 286 if not have['azure']:
284 287 sec.exclude('services.notebooks.azurenbmanager')
285 288
286 289 # config:
287 290 # Config files aren't really importable stand-alone
288 291 test_sections['config'].exclude('profile')
289 292
290 293 # nbconvert:
291 294 sec = test_sections['nbconvert']
292 295 sec.requires('pygments', 'jinja2', 'sphinx')
293 296 # Exclude nbconvert directories containing config files used to test.
294 297 # Executing the config files with iptest would cause an exception.
295 298 sec.exclude('tests.files')
296 299 sec.exclude('exporters.tests.files')
297 300 if not have['tornado']:
298 301 sec.exclude('nbconvert.post_processors.serve')
299 302 sec.exclude('nbconvert.post_processors.tests.test_serve')
300 303
301 304 #-----------------------------------------------------------------------------
302 305 # Functions and classes
303 306 #-----------------------------------------------------------------------------
304 307
305 308 def check_exclusions_exist():
306 309 from IPython.utils.path import get_ipython_package_dir
307 310 from IPython.utils.warn import warn
308 311 parent = os.path.dirname(get_ipython_package_dir())
309 312 for sec in test_sections:
310 313 for pattern in sec.exclusions:
311 314 fullpath = pjoin(parent, pattern)
312 315 if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'):
313 316 warn("Excluding nonexistent file: %r" % pattern)
314 317
315 318
316 319 class ExclusionPlugin(Plugin):
317 320 """A nose plugin to effect our exclusions of files and directories.
318 321 """
319 322 name = 'exclusions'
320 323 score = 3000 # Should come before any other plugins
321 324
322 325 def __init__(self, exclude_patterns=None):
323 326 """
324 327 Parameters
325 328 ----------
326 329
327 330 exclude_patterns : sequence of strings, optional
328 331 Filenames containing these patterns (as raw strings, not as regular
329 332 expressions) are excluded from the tests.
330 333 """
331 334 self.exclude_patterns = exclude_patterns or []
332 335 super(ExclusionPlugin, self).__init__()
333 336
334 337 def options(self, parser, env=os.environ):
335 338 Plugin.options(self, parser, env)
336 339
337 340 def configure(self, options, config):
338 341 Plugin.configure(self, options, config)
339 342 # Override nose trying to disable plugin.
340 343 self.enabled = True
341 344
342 345 def wantFile(self, filename):
343 346 """Return whether the given filename should be scanned for tests.
344 347 """
345 348 if any(pat in filename for pat in self.exclude_patterns):
346 349 return False
347 350 return None
348 351
349 352 def wantDirectory(self, directory):
350 353 """Return whether the given directory should be scanned for tests.
351 354 """
352 355 if any(pat in directory for pat in self.exclude_patterns):
353 356 return False
354 357 return None
355 358
356 359
360 class StreamCapturer(Thread):
361 started = False
362 def __init__(self):
363 super(StreamCapturer, self).__init__()
364 self.streams = []
365 self.buffer = BytesIO()
366 self.streams_lock = Lock()
367 self.buffer_lock = Lock()
368 self.stream_added = Event()
369 self.stop = Event()
370
371 def run(self):
372 self.started = True
373 while not self.stop.is_set():
374 with self.streams_lock:
375 streams = self.streams
376
377 if not streams:
378 self.stream_added.wait(timeout=1)
379 self.stream_added.clear()
380 continue
381
382 ready = select(streams, [], [], 0.5)[0]
383 with self.buffer_lock:
384 for fd in ready:
385 self.buffer.write(os.read(fd, 1024))
386
387 def add_stream(self, fd):
388 with self.streams_lock:
389 self.streams.append(fd)
390 self.stream_added.set()
391
392 def remove_stream(self, fd):
393 with self.streams_lock:
394 self.streams.remove(fd)
395
396 def reset_buffer(self):
397 with self.buffer_lock:
398 self.buffer.truncate(0)
399 self.buffer.seek(0)
400
401 def get_buffer(self):
402 with self.buffer_lock:
403 return self.buffer.getvalue()
404
405 def ensure_started(self):
406 if not self.started:
407 self.start()
408
409 class SubprocessStreamCapturePlugin(Plugin):
410 name='subprocstreams'
411 def __init__(self):
412 Plugin.__init__(self)
413 self.stream_capturer = StreamCapturer()
414 # This is ugly, but distant parts of the test machinery need to be able
415 # to add streams, so we make the object globally accessible.
416 nose.ipy_stream_capturer = self.stream_capturer
417
418 def configure(self, options, config):
419 Plugin.configure(self, options, config)
420 # Override nose trying to disable plugin.
421 self.enabled = True
422
423 def startTest(self, test):
424 # Reset log capture
425 self.stream_capturer.reset_buffer()
426
427 def formatFailure(self, test, err):
428 # Show output
429 ec, ev, tb = err
430 captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace')
431 if captured.strip():
432 ev = safe_str(ev)
433 out = [ev, '>> begin captured subprocess output <<',
434 captured,
435 '>> end captured subprocess output <<']
436 return ec, '\n'.join(out), tb
437
438 return err
439
440 formatError = formatFailure
441
442 def finalize(self, result):
443 if self.stream_capturer.started:
444 self.stream_capturer.stop.set()
445 self.stream_capturer.join()
446
447
357 448 def run_iptest():
358 449 """Run the IPython test suite using nose.
359 450
360 451 This function is called when this script is **not** called with the form
361 452 `iptest all`. It simply calls nose with appropriate command line flags
362 453 and accepts all of the standard nose arguments.
363 454 """
364 455 # Apply our monkeypatch to Xunit
365 456 if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'):
366 457 monkeypatch_xunit()
367 458
368 459 warnings.filterwarnings('ignore',
369 460 'This will be removed soon. Use IPython.testing.util instead')
370 461
371 462 arg1 = sys.argv[1]
372 463 if arg1 in test_sections:
373 464 section = test_sections[arg1]
374 465 sys.argv[1:2] = section.includes
375 466 elif arg1.startswith('IPython.') and arg1[8:] in test_sections:
376 467 section = test_sections[arg1[8:]]
377 468 sys.argv[1:2] = section.includes
378 469 else:
379 470 section = TestSection(arg1, includes=[arg1])
380 471
381 472
382 473 argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks
383 474
384 475 '--with-ipdoctest',
385 476 '--ipdoctest-tests','--ipdoctest-extension=txt',
386 477
387 478 # We add --exe because of setuptools' imbecility (it
388 479 # blindly does chmod +x on ALL files). Nose does the
389 480 # right thing and it tries to avoid executables,
390 481 # setuptools unfortunately forces our hand here. This
391 482 # has been discussed on the distutils list and the
392 483 # setuptools devs refuse to fix this problem!
393 484 '--exe',
394 485 ]
395 486 if '-a' not in argv and '-A' not in argv:
396 487 argv = argv + ['-a', '!crash']
397 488
398 489 if nose.__version__ >= '0.11':
399 490 # I don't fully understand why we need this one, but depending on what
400 491 # directory the test suite is run from, if we don't give it, 0 tests
401 492 # get run. Specifically, if the test suite is run from the source dir
402 493 # with an argument (like 'iptest.py IPython.core', 0 tests are run,
403 494 # even if the same call done in this directory works fine). It appears
404 495 # that if the requested package is in the current dir, nose bails early
405 496 # by default. Since it's otherwise harmless, leave it in by default
406 497 # for nose >= 0.11, though unfortunately nose 0.10 doesn't support it.
407 498 argv.append('--traverse-namespace')
408 499
409 500 # use our plugin for doctesting. It will remove the standard doctest plugin
410 501 # if it finds it enabled
411 plugins = [ExclusionPlugin(section.excludes), IPythonDoctest(), KnownFailure()]
502 plugins = [ExclusionPlugin(section.excludes), IPythonDoctest(), KnownFailure(),
503 SubprocessStreamCapturePlugin() ]
412 504
413 505 # Use working directory set by parent process (see iptestcontroller)
414 506 if 'IPTEST_WORKING_DIR' in os.environ:
415 507 os.chdir(os.environ['IPTEST_WORKING_DIR'])
416 508
417 509 # We need a global ipython running in this process, but the special
418 510 # in-process group spawns its own IPython kernels, so for *that* group we
419 511 # must avoid also opening the global one (otherwise there's a conflict of
420 512 # singletons). Ultimately the solution to this problem is to refactor our
421 513 # assumptions about what needs to be a singleton and what doesn't (app
422 514 # objects should, individual shells shouldn't). But for now, this
423 515 # workaround allows the test suite for the inprocess module to complete.
424 516 if 'kernel.inprocess' not in section.name:
425 517 from IPython.testing import globalipapp
426 518 globalipapp.start_ipython()
427 519
428 520 # Now nose can run
429 521 TestProgram(argv=argv, addplugins=plugins)
430 522
431 523 if __name__ == '__main__':
432 524 run_iptest()
433 525
General Comments 0
You need to be logged in to leave comments. Login now