##// END OF EJS Templates
Simplify StreamCapturer for subprocess testing...
Thomas Kluyver -
Show More
@@ -1,176 +1,175 b''
1 1 """utilities for testing IPython kernels"""
2 2
3 3 #-------------------------------------------------------------------------------
4 4 # Copyright (C) 2013 The IPython Development Team
5 5 #
6 6 # Distributed under the terms of the BSD License. The full license is in
7 7 # the file COPYING, distributed as part of this software.
8 8 #-------------------------------------------------------------------------------
9 9
10 10 #-------------------------------------------------------------------------------
11 11 # Imports
12 12 #-------------------------------------------------------------------------------
13 13
14 14 import atexit
15 15
16 16 from contextlib import contextmanager
17 17 from subprocess import PIPE, STDOUT
18 18 try:
19 19 from queue import Empty # Py 3
20 20 except ImportError:
21 21 from Queue import Empty # Py 2
22 22
23 23 import nose
24 24 import nose.tools as nt
25 25
26 26 from IPython.kernel import KernelManager
27 27
28 28 #-------------------------------------------------------------------------------
29 29 # Globals
30 30 #-------------------------------------------------------------------------------
31 31
32 32 STARTUP_TIMEOUT = 60
33 33 TIMEOUT = 15
34 34
35 35 KM = None
36 36 KC = None
37 37
38 38 #-------------------------------------------------------------------------------
39 39 # code
40 40 #-------------------------------------------------------------------------------
41 41
42 42
43 43 def start_new_kernel(argv=None):
44 44 """start a new kernel, and return its Manager and Client"""
45 45 km = KernelManager()
46 kwargs = dict(stdout=PIPE, stderr=STDOUT)
46 kwargs = dict(stdout=nose.ipy_stream_capturer.writefd, stderr=STDOUT)
47 47 if argv:
48 48 kwargs['extra_arguments'] = argv
49 49 km.start_kernel(**kwargs)
50 nose.ipy_stream_capturer.add_stream(km.kernel.stdout.fileno())
51 50 nose.ipy_stream_capturer.ensure_started()
52 51 kc = km.client()
53 52 kc.start_channels()
54 53
55 54 msg_id = kc.kernel_info()
56 55 kc.get_shell_msg(block=True, timeout=STARTUP_TIMEOUT)
57 56 flush_channels(kc)
58 57 return km, kc
59 58
60 59 def flush_channels(kc=None):
61 60 """flush any messages waiting on the queue"""
62 61 from .test_message_spec import validate_message
63 62
64 63 if kc is None:
65 64 kc = KC
66 65 for channel in (kc.shell_channel, kc.iopub_channel):
67 66 while True:
68 67 try:
69 68 msg = channel.get_msg(block=True, timeout=0.1)
70 69 except Empty:
71 70 break
72 71 else:
73 72 validate_message(msg)
74 73
75 74
76 75 def execute(code='', kc=None, **kwargs):
77 76 """wrapper for doing common steps for validating an execution request"""
78 77 from .test_message_spec import validate_message
79 78 if kc is None:
80 79 kc = KC
81 80 msg_id = kc.execute(code=code, **kwargs)
82 81 reply = kc.get_shell_msg(timeout=TIMEOUT)
83 82 validate_message(reply, 'execute_reply', msg_id)
84 83 busy = kc.get_iopub_msg(timeout=TIMEOUT)
85 84 validate_message(busy, 'status', msg_id)
86 85 nt.assert_equal(busy['content']['execution_state'], 'busy')
87 86
88 87 if not kwargs.get('silent'):
89 88 pyin = kc.get_iopub_msg(timeout=TIMEOUT)
90 89 validate_message(pyin, 'pyin', msg_id)
91 90 nt.assert_equal(pyin['content']['code'], code)
92 91
93 92 return msg_id, reply['content']
94 93
95 94 def start_global_kernel():
96 95 """start the global kernel (if it isn't running) and return its client"""
97 96 global KM, KC
98 97 if KM is None:
99 98 KM, KC = start_new_kernel()
100 99 atexit.register(stop_global_kernel)
101 100 return KC
102 101
103 102 @contextmanager
104 103 def kernel():
105 104 """Context manager for the global kernel instance
106 105
107 106 Should be used for most kernel tests
108 107
109 108 Returns
110 109 -------
111 110 kernel_client: connected KernelClient instance
112 111 """
113 112 yield start_global_kernel()
114 113
115 114 def uses_kernel(test_f):
116 115 """Decorator for tests that use the global kernel"""
117 116 def wrapped_test():
118 117 with kernel() as kc:
119 118 test_f(kc)
120 119 wrapped_test.__doc__ = test_f.__doc__
121 120 wrapped_test.__name__ = test_f.__name__
122 121 return wrapped_test
123 122
124 123 def stop_global_kernel():
125 124 """Stop the global shared kernel instance, if it exists"""
126 125 global KM, KC
127 126 KC.stop_channels()
128 127 KC = None
129 128 if KM is None:
130 129 return
131 130 KM.shutdown_kernel(now=True)
132 131 KM = None
133 132
134 133 @contextmanager
135 134 def new_kernel(argv=None):
136 135 """Context manager for a new kernel in a subprocess
137 136
138 137 Should only be used for tests where the kernel must not be re-used.
139 138
140 139 Returns
141 140 -------
142 141 kernel_client: connected KernelClient instance
143 142 """
144 143 km, kc = start_new_kernel(argv)
145 144 try:
146 145 yield kc
147 146 finally:
148 147 kc.stop_channels()
149 148 km.shutdown_kernel(now=True)
150 149
151 150
152 151 def assemble_output(iopub):
153 152 """assemble stdout/err from an execution"""
154 153 stdout = ''
155 154 stderr = ''
156 155 while True:
157 156 msg = iopub.get_msg(block=True, timeout=1)
158 157 msg_type = msg['msg_type']
159 158 content = msg['content']
160 159 if msg_type == 'status' and content['execution_state'] == 'idle':
161 160 # idle message signals end of output
162 161 break
163 162 elif msg['msg_type'] == 'stream':
164 163 if content['name'] == 'stdout':
165 164 stdout += content['data']
166 165 elif content['name'] == 'stderr':
167 166 stderr += content['data']
168 167 else:
169 168 raise KeyError("bad stream: %r" % content['name'])
170 169 else:
171 170 # other output, ignored
172 171 pass
173 172 return stdout, stderr
174 173
175 174
176 175
@@ -1,133 +1,131 b''
1 1 """toplevel setup/teardown for parallel tests."""
2 2 from __future__ import print_function
3 3
4 4 #-------------------------------------------------------------------------------
5 5 # Copyright (C) 2011 The IPython Development Team
6 6 #
7 7 # Distributed under the terms of the BSD License. The full license is in
8 8 # the file COPYING, distributed as part of this software.
9 9 #-------------------------------------------------------------------------------
10 10
11 11 #-------------------------------------------------------------------------------
12 12 # Imports
13 13 #-------------------------------------------------------------------------------
14 14
15 15 import os
16 16 import tempfile
17 17 import time
18 18 from subprocess import Popen, PIPE, STDOUT
19 19
20 20 import nose
21 21
22 22 from IPython.utils.path import get_ipython_dir
23 23 from IPython.parallel import Client
24 24 from IPython.parallel.apps.launcher import (LocalProcessLauncher,
25 25 ipengine_cmd_argv,
26 26 ipcontroller_cmd_argv,
27 27 SIGKILL,
28 28 ProcessStateError,
29 29 )
30 30
31 31 # globals
32 32 launchers = []
33 33 blackhole = open(os.devnull, 'w')
34 34
35 35 # Launcher class
36 36 class TestProcessLauncher(LocalProcessLauncher):
37 37 """subclass LocalProcessLauncher, to prevent extra sockets and threads being created on Windows"""
38 38 def start(self):
39 39 if self.state == 'before':
40 40 self.process = Popen(self.args,
41 stdout=PIPE, stderr=STDOUT,
41 stdout=nose.ipy_stream_capturer.writefd, stderr=STDOUT,
42 42 env=os.environ,
43 43 cwd=self.work_dir
44 44 )
45 45 self.notify_start(self.process.pid)
46 46 self.poll = self.process.poll
47 47 # Store stdout & stderr to show with failing tests.
48 48 # This is defined in IPython.testing.iptest
49 nose.ipy_stream_capturer.add_stream(self.process.stdout.fileno())
50 49 nose.ipy_stream_capturer.ensure_started()
51 50 else:
52 51 s = 'The process was already started and has state: %r' % self.state
53 52 raise ProcessStateError(s)
54 53
55 54 # nose setup/teardown
56 55
57 56 def setup():
58 57 cluster_dir = os.path.join(get_ipython_dir(), 'profile_iptest')
59 58 engine_json = os.path.join(cluster_dir, 'security', 'ipcontroller-engine.json')
60 59 client_json = os.path.join(cluster_dir, 'security', 'ipcontroller-client.json')
61 60 for json in (engine_json, client_json):
62 61 if os.path.exists(json):
63 62 os.remove(json)
64 63
65 64 cp = TestProcessLauncher()
66 65 cp.cmd_and_args = ipcontroller_cmd_argv + \
67 66 ['--profile=iptest', '--log-level=20', '--ping=250', '--dictdb']
68 67 cp.start()
69 68 launchers.append(cp)
70 69 tic = time.time()
71 70 while not os.path.exists(engine_json) or not os.path.exists(client_json):
72 71 if cp.poll() is not None:
73 72 raise RuntimeError("The test controller exited with status %s" % cp.poll())
74 73 elif time.time()-tic > 15:
75 74 raise RuntimeError("Timeout waiting for the test controller to start.")
76 75 time.sleep(0.1)
77 76 add_engines(1)
78 77
79 78 def add_engines(n=1, profile='iptest', total=False):
80 79 """add a number of engines to a given profile.
81 80
82 81 If total is True, then already running engines are counted, and only
83 82 the additional engines necessary (if any) are started.
84 83 """
85 84 rc = Client(profile=profile)
86 85 base = len(rc)
87 86
88 87 if total:
89 88 n = max(n - base, 0)
90 89
91 90 eps = []
92 91 for i in range(n):
93 92 ep = TestProcessLauncher()
94 93 ep.cmd_and_args = ipengine_cmd_argv + [
95 94 '--profile=%s' % profile,
96 95 '--log-level=50',
97 96 '--InteractiveShell.colors=nocolor'
98 97 ]
99 98 ep.start()
100 99 launchers.append(ep)
101 100 eps.append(ep)
102 101 tic = time.time()
103 102 while len(rc) < base+n:
104 103 if any([ ep.poll() is not None for ep in eps ]):
105 104 raise RuntimeError("A test engine failed to start.")
106 105 elif time.time()-tic > 15:
107 106 raise RuntimeError("Timeout waiting for engines to connect.")
108 107 time.sleep(.1)
109 108 rc.spin()
110 109 rc.close()
111 110 return eps
112 111
113 112 def teardown():
114 113 time.sleep(1)
115 114 while launchers:
116 115 p = launchers.pop()
117 nose.ipy_stream_capturer.remove_stream(p.process.stdout.fileno())
118 116 if p.poll() is None:
119 117 try:
120 118 p.stop()
121 119 except Exception as e:
122 120 print(e)
123 121 pass
124 122 if p.poll() is None:
125 123 time.sleep(.25)
126 124 if p.poll() is None:
127 125 try:
128 126 print('cleaning up test process...')
129 127 p.signal(SIGKILL)
130 128 except:
131 129 print("couldn't shutdown process: ", p)
132 130 blackhole.close()
133 131
@@ -1,539 +1,514 b''
1 1 # -*- coding: utf-8 -*-
2 2 """IPython Test Suite Runner.
3 3
4 4 This module provides a main entry point to a user script to test IPython
5 5 itself from the command line. There are two ways of running this script:
6 6
7 7 1. With the syntax `iptest all`. This runs our entire test suite by
8 8 calling this script (with different arguments) recursively. This
9 9 causes modules and package to be tested in different processes, using nose
10 10 or trial where appropriate.
11 11 2. With the regular nose syntax, like `iptest -vvs IPython`. In this form
12 12 the script simply calls nose, but with special command line flags and
13 13 plugins loaded.
14 14
15 15 """
16 16
17 17 #-----------------------------------------------------------------------------
18 18 # Copyright (C) 2009-2011 The IPython Development Team
19 19 #
20 20 # Distributed under the terms of the BSD License. The full license is in
21 21 # the file COPYING, distributed as part of this software.
22 22 #-----------------------------------------------------------------------------
23 23
24 24 #-----------------------------------------------------------------------------
25 25 # Imports
26 26 #-----------------------------------------------------------------------------
27 27 from __future__ import print_function
28 28
29 29 # Stdlib
30 30 import glob
31 31 from io import BytesIO
32 32 import os
33 33 import os.path as path
34 34 from select import select
35 35 import sys
36 36 from threading import Thread, Lock, Event
37 37 import warnings
38 38
39 39 # Now, proceed to import nose itself
40 40 import nose.plugins.builtin
41 41 from nose.plugins.xunit import Xunit
42 42 from nose import SkipTest
43 43 from nose.core import TestProgram
44 44 from nose.plugins import Plugin
45 45 from nose.util import safe_str
46 46
47 47 # Our own imports
48 48 from IPython.utils.process import is_cmd_found
49 49 from IPython.utils.importstring import import_item
50 50 from IPython.testing.plugin.ipdoctest import IPythonDoctest
51 51 from IPython.external.decorators import KnownFailure, knownfailureif
52 52
53 53 pjoin = path.join
54 54
55 55
56 56 #-----------------------------------------------------------------------------
57 57 # Globals
58 58 #-----------------------------------------------------------------------------
59 59
60 60
61 61 #-----------------------------------------------------------------------------
62 62 # Warnings control
63 63 #-----------------------------------------------------------------------------
64 64
65 65 # Twisted generates annoying warnings with Python 2.6, as will do other code
66 66 # that imports 'sets' as of today
67 67 warnings.filterwarnings('ignore', 'the sets module is deprecated',
68 68 DeprecationWarning )
69 69
70 70 # This one also comes from Twisted
71 71 warnings.filterwarnings('ignore', 'the sha module is deprecated',
72 72 DeprecationWarning)
73 73
74 74 # Wx on Fedora11 spits these out
75 75 warnings.filterwarnings('ignore', 'wxPython/wxWidgets release number mismatch',
76 76 UserWarning)
77 77
78 78 # ------------------------------------------------------------------------------
79 79 # Monkeypatch Xunit to count known failures as skipped.
80 80 # ------------------------------------------------------------------------------
81 81 def monkeypatch_xunit():
82 82 try:
83 83 knownfailureif(True)(lambda: None)()
84 84 except Exception as e:
85 85 KnownFailureTest = type(e)
86 86
87 87 def addError(self, test, err, capt=None):
88 88 if issubclass(err[0], KnownFailureTest):
89 89 err = (SkipTest,) + err[1:]
90 90 return self.orig_addError(test, err, capt)
91 91
92 92 Xunit.orig_addError = Xunit.addError
93 93 Xunit.addError = addError
94 94
95 95 #-----------------------------------------------------------------------------
96 96 # Check which dependencies are installed and greater than minimum version.
97 97 #-----------------------------------------------------------------------------
98 98 def extract_version(mod):
99 99 return mod.__version__
100 100
101 101 def test_for(item, min_version=None, callback=extract_version):
102 102 """Test to see if item is importable, and optionally check against a minimum
103 103 version.
104 104
105 105 If min_version is given, the default behavior is to check against the
106 106 `__version__` attribute of the item, but specifying `callback` allows you to
107 107 extract the value you are interested in. e.g::
108 108
109 109 In [1]: import sys
110 110
111 111 In [2]: from IPython.testing.iptest import test_for
112 112
113 113 In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info)
114 114 Out[3]: True
115 115
116 116 """
117 117 try:
118 118 check = import_item(item)
119 119 except (ImportError, RuntimeError):
120 120 # GTK reports Runtime error if it can't be initialized even if it's
121 121 # importable.
122 122 return False
123 123 else:
124 124 if min_version:
125 125 if callback:
126 126 # extra processing step to get version to compare
127 127 check = callback(check)
128 128
129 129 return check >= min_version
130 130 else:
131 131 return True
132 132
133 133 # Global dict where we can store information on what we have and what we don't
134 134 # have available at test run time
135 135 have = {}
136 136
137 137 have['curses'] = test_for('_curses')
138 138 have['matplotlib'] = test_for('matplotlib')
139 139 have['numpy'] = test_for('numpy')
140 140 have['pexpect'] = test_for('IPython.external.pexpect')
141 141 have['pymongo'] = test_for('pymongo')
142 142 have['pygments'] = test_for('pygments')
143 143 have['qt'] = test_for('IPython.external.qt')
144 144 have['rpy2'] = test_for('rpy2')
145 145 have['sqlite3'] = test_for('sqlite3')
146 146 have['cython'] = test_for('Cython')
147 147 have['oct2py'] = test_for('oct2py')
148 148 have['tornado'] = test_for('tornado.version_info', (3,1,0), callback=None)
149 149 have['jinja2'] = test_for('jinja2')
150 150 have['wx'] = test_for('wx')
151 151 have['wx.aui'] = test_for('wx.aui')
152 152 have['azure'] = test_for('azure')
153 153 have['requests'] = test_for('requests')
154 154 have['sphinx'] = test_for('sphinx')
155 155 have['casperjs'] = is_cmd_found('casperjs')
156 156
157 157 min_zmq = (2,1,11)
158 158
159 159 have['zmq'] = test_for('zmq.pyzmq_version_info', min_zmq, callback=lambda x: x())
160 160
161 161 #-----------------------------------------------------------------------------
162 162 # Test suite definitions
163 163 #-----------------------------------------------------------------------------
164 164
165 165 test_group_names = ['parallel', 'kernel', 'kernel.inprocess', 'config', 'core',
166 166 'extensions', 'lib', 'terminal', 'testing', 'utils',
167 167 'nbformat', 'qt', 'html', 'nbconvert'
168 168 ]
169 169
170 170 class TestSection(object):
171 171 def __init__(self, name, includes):
172 172 self.name = name
173 173 self.includes = includes
174 174 self.excludes = []
175 175 self.dependencies = []
176 176 self.enabled = True
177 177
178 178 def exclude(self, module):
179 179 if not module.startswith('IPython'):
180 180 module = self.includes[0] + "." + module
181 181 self.excludes.append(module.replace('.', os.sep))
182 182
183 183 def requires(self, *packages):
184 184 self.dependencies.extend(packages)
185 185
186 186 @property
187 187 def will_run(self):
188 188 return self.enabled and all(have[p] for p in self.dependencies)
189 189
190 190 # Name -> (include, exclude, dependencies_met)
191 191 test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names}
192 192
193 193 # Exclusions and dependencies
194 194 # ---------------------------
195 195
196 196 # core:
197 197 sec = test_sections['core']
198 198 if not have['sqlite3']:
199 199 sec.exclude('tests.test_history')
200 200 sec.exclude('history')
201 201 if not have['matplotlib']:
202 202 sec.exclude('pylabtools'),
203 203 sec.exclude('tests.test_pylabtools')
204 204
205 205 # lib:
206 206 sec = test_sections['lib']
207 207 if not have['wx']:
208 208 sec.exclude('inputhookwx')
209 209 if not have['pexpect']:
210 210 sec.exclude('irunner')
211 211 sec.exclude('tests.test_irunner')
212 212 if not have['zmq']:
213 213 sec.exclude('kernel')
214 214 # We do this unconditionally, so that the test suite doesn't import
215 215 # gtk, changing the default encoding and masking some unicode bugs.
216 216 sec.exclude('inputhookgtk')
217 217 # Testing inputhook will need a lot of thought, to figure out
218 218 # how to have tests that don't lock up with the gui event
219 219 # loops in the picture
220 220 sec.exclude('inputhook')
221 221
222 222 # testing:
223 223 sec = test_sections['testing']
224 224 # This guy is probably attic material
225 225 sec.exclude('mkdoctests')
226 226 # These have to be skipped on win32 because they use echo, rm, cd, etc.
227 227 # See ticket https://github.com/ipython/ipython/issues/87
228 228 if sys.platform == 'win32':
229 229 sec.exclude('plugin.test_exampleip')
230 230 sec.exclude('plugin.dtexample')
231 231
232 232 # terminal:
233 233 if (not have['pexpect']) or (not have['zmq']):
234 234 test_sections['terminal'].exclude('console')
235 235
236 236 # parallel
237 237 sec = test_sections['parallel']
238 238 sec.requires('zmq')
239 239 if not have['pymongo']:
240 240 sec.exclude('controller.mongodb')
241 241 sec.exclude('tests.test_mongodb')
242 242
243 243 # kernel:
244 244 sec = test_sections['kernel']
245 245 sec.requires('zmq')
246 246 # The in-process kernel tests are done in a separate section
247 247 sec.exclude('inprocess')
248 248 # importing gtk sets the default encoding, which we want to avoid
249 249 sec.exclude('zmq.gui.gtkembed')
250 250 if not have['matplotlib']:
251 251 sec.exclude('zmq.pylab')
252 252
253 253 # kernel.inprocess:
254 254 test_sections['kernel.inprocess'].requires('zmq')
255 255
256 256 # extensions:
257 257 sec = test_sections['extensions']
258 258 if not have['cython']:
259 259 sec.exclude('cythonmagic')
260 260 sec.exclude('tests.test_cythonmagic')
261 261 if not have['oct2py']:
262 262 sec.exclude('octavemagic')
263 263 sec.exclude('tests.test_octavemagic')
264 264 if not have['rpy2'] or not have['numpy']:
265 265 sec.exclude('rmagic')
266 266 sec.exclude('tests.test_rmagic')
267 267 # autoreload does some strange stuff, so move it to its own test section
268 268 sec.exclude('autoreload')
269 269 sec.exclude('tests.test_autoreload')
270 270 test_sections['autoreload'] = TestSection('autoreload',
271 271 ['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload'])
272 272 test_group_names.append('autoreload')
273 273
274 274 # qt:
275 275 test_sections['qt'].requires('zmq', 'qt', 'pygments')
276 276
277 277 # html:
278 278 sec = test_sections['html']
279 279 sec.requires('zmq', 'tornado', 'requests')
280 280 # The notebook 'static' directory contains JS, css and other
281 281 # files for web serving. Occasionally projects may put a .py
282 282 # file in there (MathJax ships a conf.py), so we might as
283 283 # well play it safe and skip the whole thing.
284 284 sec.exclude('static')
285 285 sec.exclude('fabfile')
286 286 if not have['jinja2']:
287 287 sec.exclude('notebookapp')
288 288 if not have['azure']:
289 289 sec.exclude('services.notebooks.azurenbmanager')
290 290
291 291 # config:
292 292 # Config files aren't really importable stand-alone
293 293 test_sections['config'].exclude('profile')
294 294
295 295 # nbconvert:
296 296 sec = test_sections['nbconvert']
297 297 sec.requires('pygments', 'jinja2', 'sphinx')
298 298 # Exclude nbconvert directories containing config files used to test.
299 299 # Executing the config files with iptest would cause an exception.
300 300 sec.exclude('tests.files')
301 301 sec.exclude('exporters.tests.files')
302 302 if not have['tornado']:
303 303 sec.exclude('nbconvert.post_processors.serve')
304 304 sec.exclude('nbconvert.post_processors.tests.test_serve')
305 305
306 306 #-----------------------------------------------------------------------------
307 307 # Functions and classes
308 308 #-----------------------------------------------------------------------------
309 309
310 310 def check_exclusions_exist():
311 311 from IPython.utils.path import get_ipython_package_dir
312 312 from IPython.utils.warn import warn
313 313 parent = os.path.dirname(get_ipython_package_dir())
314 314 for sec in test_sections:
315 315 for pattern in sec.exclusions:
316 316 fullpath = pjoin(parent, pattern)
317 317 if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'):
318 318 warn("Excluding nonexistent file: %r" % pattern)
319 319
320 320
321 321 class ExclusionPlugin(Plugin):
322 322 """A nose plugin to effect our exclusions of files and directories.
323 323 """
324 324 name = 'exclusions'
325 325 score = 3000 # Should come before any other plugins
326 326
327 327 def __init__(self, exclude_patterns=None):
328 328 """
329 329 Parameters
330 330 ----------
331 331
332 332 exclude_patterns : sequence of strings, optional
333 333 Filenames containing these patterns (as raw strings, not as regular
334 334 expressions) are excluded from the tests.
335 335 """
336 336 self.exclude_patterns = exclude_patterns or []
337 337 super(ExclusionPlugin, self).__init__()
338 338
339 339 def options(self, parser, env=os.environ):
340 340 Plugin.options(self, parser, env)
341 341
342 342 def configure(self, options, config):
343 343 Plugin.configure(self, options, config)
344 344 # Override nose trying to disable plugin.
345 345 self.enabled = True
346 346
347 347 def wantFile(self, filename):
348 348 """Return whether the given filename should be scanned for tests.
349 349 """
350 350 if any(pat in filename for pat in self.exclude_patterns):
351 351 return False
352 352 return None
353 353
354 354 def wantDirectory(self, directory):
355 355 """Return whether the given directory should be scanned for tests.
356 356 """
357 357 if any(pat in directory for pat in self.exclude_patterns):
358 358 return False
359 359 return None
360 360
361 361
362 362 class StreamCapturer(Thread):
363 363 started = False
364 364 def __init__(self):
365 365 super(StreamCapturer, self).__init__()
366 366 self.streams = []
367 367 self.buffer = BytesIO()
368 self.streams_lock = Lock()
368 self.readfd, self.writefd = os.pipe()
369 369 self.buffer_lock = Lock()
370 self.stream_added = Event()
371 370 self.stop = Event()
372 371
373 372 def run(self):
374 373 self.started = True
375 while not self.stop.is_set():
376 with self.streams_lock:
377 streams = self.streams
378 374
379 if not streams:
380 self.stream_added.wait(timeout=1)
381 self.stream_added.clear()
382 continue
375 while not self.stop.is_set():
376 ready = select([self.readfd], [], [], 1)[0]
383 377
384 ready = select(streams, [], [], 0.5)[0]
385 dead = []
378 if ready:
386 379 with self.buffer_lock:
387 for fd in ready:
388 try:
389 self.buffer.write(os.read(fd, 1024))
390 except OSError as e:
391 import errno
392 if e.errno == errno.EBADF:
393 dead.append(fd)
394 else:
395 raise
396
397 with self.streams_lock:
398 for fd in dead:
399 self.streams.remove(fd)
400
401 def add_stream(self, fd):
402 with self.streams_lock:
403 self.streams.append(fd)
404 self.stream_added.set()
380 self.buffer.write(os.read(self.readfd, 1024))
405 381
406 def remove_stream(self, fd):
407 with self.streams_lock:
408 self.streams.remove(fd)
382 os.close(self.readfd)
383 os.close(self.writefd)
409 384
410 385 def reset_buffer(self):
411 386 with self.buffer_lock:
412 387 self.buffer.truncate(0)
413 388 self.buffer.seek(0)
414 389
415 390 def get_buffer(self):
416 391 with self.buffer_lock:
417 392 return self.buffer.getvalue()
418 393
419 394 def ensure_started(self):
420 395 if not self.started:
421 396 self.start()
422 397
423 398 class SubprocessStreamCapturePlugin(Plugin):
424 399 name='subprocstreams'
425 400 def __init__(self):
426 401 Plugin.__init__(self)
427 402 self.stream_capturer = StreamCapturer()
428 403 # This is ugly, but distant parts of the test machinery need to be able
429 # to add streams, so we make the object globally accessible.
404 # to redirect streams, so we make the object globally accessible.
430 405 nose.ipy_stream_capturer = self.stream_capturer
431 406
432 407 def configure(self, options, config):
433 408 Plugin.configure(self, options, config)
434 409 # Override nose trying to disable plugin.
435 410 self.enabled = True
436 411
437 412 def startTest(self, test):
438 413 # Reset log capture
439 414 self.stream_capturer.reset_buffer()
440 415
441 416 def formatFailure(self, test, err):
442 417 # Show output
443 418 ec, ev, tb = err
444 419 captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace')
445 420 if captured.strip():
446 421 ev = safe_str(ev)
447 422 out = [ev, '>> begin captured subprocess output <<',
448 423 captured,
449 424 '>> end captured subprocess output <<']
450 425 return ec, '\n'.join(out), tb
451 426
452 427 return err
453 428
454 429 formatError = formatFailure
455 430
456 431 def finalize(self, result):
457 432 if self.stream_capturer.started:
458 433 self.stream_capturer.stop.set()
459 434 self.stream_capturer.join()
460 435
461 436
462 437 def run_iptest():
463 438 """Run the IPython test suite using nose.
464 439
465 440 This function is called when this script is **not** called with the form
466 441 `iptest all`. It simply calls nose with appropriate command line flags
467 442 and accepts all of the standard nose arguments.
468 443 """
469 444 # Apply our monkeypatch to Xunit
470 445 if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'):
471 446 monkeypatch_xunit()
472 447
473 448 warnings.filterwarnings('ignore',
474 449 'This will be removed soon. Use IPython.testing.util instead')
475 450
476 451 arg1 = sys.argv[1]
477 452 if arg1 in test_sections:
478 453 section = test_sections[arg1]
479 454 sys.argv[1:2] = section.includes
480 455 elif arg1.startswith('IPython.') and arg1[8:] in test_sections:
481 456 section = test_sections[arg1[8:]]
482 457 sys.argv[1:2] = section.includes
483 458 else:
484 459 section = TestSection(arg1, includes=[arg1])
485 460
486 461
487 462 argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks
488 463
489 464 '--with-ipdoctest',
490 465 '--ipdoctest-tests','--ipdoctest-extension=txt',
491 466
492 467 # We add --exe because of setuptools' imbecility (it
493 468 # blindly does chmod +x on ALL files). Nose does the
494 469 # right thing and it tries to avoid executables,
495 470 # setuptools unfortunately forces our hand here. This
496 471 # has been discussed on the distutils list and the
497 472 # setuptools devs refuse to fix this problem!
498 473 '--exe',
499 474 ]
500 475 if '-a' not in argv and '-A' not in argv:
501 476 argv = argv + ['-a', '!crash']
502 477
503 478 if nose.__version__ >= '0.11':
504 479 # I don't fully understand why we need this one, but depending on what
505 480 # directory the test suite is run from, if we don't give it, 0 tests
506 481 # get run. Specifically, if the test suite is run from the source dir
507 482 # with an argument (like 'iptest.py IPython.core', 0 tests are run,
508 483 # even if the same call done in this directory works fine). It appears
509 484 # that if the requested package is in the current dir, nose bails early
510 485 # by default. Since it's otherwise harmless, leave it in by default
511 486 # for nose >= 0.11, though unfortunately nose 0.10 doesn't support it.
512 487 argv.append('--traverse-namespace')
513 488
514 489 # use our plugin for doctesting. It will remove the standard doctest plugin
515 490 # if it finds it enabled
516 491 plugins = [ExclusionPlugin(section.excludes), IPythonDoctest(), KnownFailure(),
517 492 SubprocessStreamCapturePlugin() ]
518 493
519 494 # Use working directory set by parent process (see iptestcontroller)
520 495 if 'IPTEST_WORKING_DIR' in os.environ:
521 496 os.chdir(os.environ['IPTEST_WORKING_DIR'])
522 497
523 498 # We need a global ipython running in this process, but the special
524 499 # in-process group spawns its own IPython kernels, so for *that* group we
525 500 # must avoid also opening the global one (otherwise there's a conflict of
526 501 # singletons). Ultimately the solution to this problem is to refactor our
527 502 # assumptions about what needs to be a singleton and what doesn't (app
528 503 # objects should, individual shells shouldn't). But for now, this
529 504 # workaround allows the test suite for the inprocess module to complete.
530 505 if 'kernel.inprocess' not in section.name:
531 506 from IPython.testing import globalipapp
532 507 globalipapp.start_ipython()
533 508
534 509 # Now nose can run
535 510 TestProgram(argv=argv, addplugins=plugins)
536 511
537 512 if __name__ == '__main__':
538 513 run_iptest()
539 514
General Comments 0
You need to be logged in to leave comments. Login now