Show More
@@ -1,458 +1,463 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """IPython Test Process Controller |
|
2 | """IPython Test Process Controller | |
3 |
|
3 | |||
4 | This module runs one or more subprocesses which will actually run the IPython |
|
4 | This module runs one or more subprocesses which will actually run the IPython | |
5 | test suite. |
|
5 | test suite. | |
6 |
|
6 | |||
7 | """ |
|
7 | """ | |
8 |
|
8 | |||
9 | #----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
10 | # Copyright (C) 2009-2011 The IPython Development Team |
|
10 | # Copyright (C) 2009-2011 The IPython Development Team | |
11 | # |
|
11 | # | |
12 | # Distributed under the terms of the BSD License. The full license is in |
|
12 | # Distributed under the terms of the BSD License. The full license is in | |
13 | # the file COPYING, distributed as part of this software. |
|
13 | # the file COPYING, distributed as part of this software. | |
14 | #----------------------------------------------------------------------------- |
|
14 | #----------------------------------------------------------------------------- | |
15 |
|
15 | |||
16 | #----------------------------------------------------------------------------- |
|
16 | #----------------------------------------------------------------------------- | |
17 | # Imports |
|
17 | # Imports | |
18 | #----------------------------------------------------------------------------- |
|
18 | #----------------------------------------------------------------------------- | |
19 | from __future__ import print_function |
|
19 | from __future__ import print_function | |
20 |
|
20 | |||
21 | import argparse |
|
21 | import argparse | |
22 | import multiprocessing.pool |
|
22 | import multiprocessing.pool | |
23 | from multiprocessing import Process, Queue |
|
23 | from multiprocessing import Process, Queue | |
24 | import os |
|
24 | import os | |
25 | import shutil |
|
25 | import shutil | |
26 | import signal |
|
26 | import signal | |
27 | import sys |
|
27 | import sys | |
28 | import subprocess |
|
28 | import subprocess | |
29 | import time |
|
29 | import time | |
30 |
|
30 | |||
31 | from .iptest import have, test_group_names, test_sections |
|
31 | from .iptest import have, test_group_names, test_sections | |
32 | from IPython.utils.py3compat import bytes_to_str |
|
32 | from IPython.utils.py3compat import bytes_to_str | |
33 | from IPython.utils.sysinfo import sys_info |
|
33 | from IPython.utils.sysinfo import sys_info | |
34 | from IPython.utils.tempdir import TemporaryDirectory |
|
34 | from IPython.utils.tempdir import TemporaryDirectory | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | class TestController(object): |
|
37 | class TestController(object): | |
38 | """Run tests in a subprocess |
|
38 | """Run tests in a subprocess | |
39 | """ |
|
39 | """ | |
40 | #: str, IPython test suite to be executed. |
|
40 | #: str, IPython test suite to be executed. | |
41 | section = None |
|
41 | section = None | |
42 | #: list, command line arguments to be executed |
|
42 | #: list, command line arguments to be executed | |
43 | cmd = None |
|
43 | cmd = None | |
44 | #: dict, extra environment variables to set for the subprocess |
|
44 | #: dict, extra environment variables to set for the subprocess | |
45 | env = None |
|
45 | env = None | |
46 | #: list, TemporaryDirectory instances to clear up when the process finishes |
|
46 | #: list, TemporaryDirectory instances to clear up when the process finishes | |
47 | dirs = None |
|
47 | dirs = None | |
48 | #: subprocess.Popen instance |
|
48 | #: subprocess.Popen instance | |
49 | process = None |
|
49 | process = None | |
50 | #: str, process stdout+stderr |
|
50 | #: str, process stdout+stderr | |
51 | stdout = None |
|
51 | stdout = None | |
52 | #: bool, whether to capture process stdout & stderr |
|
52 | #: bool, whether to capture process stdout & stderr | |
53 | buffer_output = False |
|
53 | buffer_output = False | |
54 |
|
54 | |||
55 | def __init__(self): |
|
55 | def __init__(self): | |
56 | self.cmd = [] |
|
56 | self.cmd = [] | |
57 | self.env = {} |
|
57 | self.env = {} | |
58 | self.dirs = [] |
|
58 | self.dirs = [] | |
59 |
|
59 | |||
60 |
|
60 | |||
61 | @property |
|
61 | @property | |
62 | def will_run(self): |
|
62 | def will_run(self): | |
63 | try: |
|
63 | try: | |
64 | return test_sections[self.section].will_run |
|
64 | return test_sections[self.section].will_run | |
65 | except KeyError: |
|
65 | except KeyError: | |
66 | return True |
|
66 | return True | |
67 |
|
67 | |||
68 | def launch(self): |
|
68 | def launch(self): | |
69 | # print('*** ENV:', self.env) # dbg |
|
69 | # print('*** ENV:', self.env) # dbg | |
70 | # print('*** CMD:', self.cmd) # dbg |
|
70 | # print('*** CMD:', self.cmd) # dbg | |
71 | env = os.environ.copy() |
|
71 | env = os.environ.copy() | |
72 | env.update(self.env) |
|
72 | env.update(self.env) | |
73 | output = subprocess.PIPE if self.buffer_output else None |
|
73 | output = subprocess.PIPE if self.buffer_output else None | |
74 | stdout = subprocess.STDOUT if self.buffer_output else None |
|
74 | stdout = subprocess.STDOUT if self.buffer_output else None | |
75 | self.process = subprocess.Popen(self.cmd, stdout=output, |
|
75 | self.process = subprocess.Popen(self.cmd, stdout=output, | |
76 | stderr=stdout, env=env) |
|
76 | stderr=stdout, env=env) | |
77 |
|
77 | |||
78 | def wait(self): |
|
78 | def wait(self): | |
79 | self.stdout, _ = self.process.communicate() |
|
79 | self.stdout, _ = self.process.communicate() | |
80 | return self.process.returncode |
|
80 | return self.process.returncode | |
81 |
|
81 | |||
82 | def cleanup_process(self): |
|
82 | def cleanup_process(self): | |
83 | """Cleanup on exit by killing any leftover processes.""" |
|
83 | """Cleanup on exit by killing any leftover processes.""" | |
84 | subp = self.process |
|
84 | subp = self.process | |
85 | if subp is None or (subp.poll() is not None): |
|
85 | if subp is None or (subp.poll() is not None): | |
86 | return # Process doesn't exist, or is already dead. |
|
86 | return # Process doesn't exist, or is already dead. | |
87 |
|
87 | |||
88 | try: |
|
88 | try: | |
89 | print('Cleaning up stale PID: %d' % subp.pid) |
|
89 | print('Cleaning up stale PID: %d' % subp.pid) | |
90 | subp.kill() |
|
90 | subp.kill() | |
91 | except: # (OSError, WindowsError) ? |
|
91 | except: # (OSError, WindowsError) ? | |
92 | # This is just a best effort, if we fail or the process was |
|
92 | # This is just a best effort, if we fail or the process was | |
93 | # really gone, ignore it. |
|
93 | # really gone, ignore it. | |
94 | pass |
|
94 | pass | |
95 | else: |
|
95 | else: | |
96 | for i in range(10): |
|
96 | for i in range(10): | |
97 | if subp.poll() is None: |
|
97 | if subp.poll() is None: | |
98 | time.sleep(0.1) |
|
98 | time.sleep(0.1) | |
99 | else: |
|
99 | else: | |
100 | break |
|
100 | break | |
101 |
|
101 | |||
102 | if subp.poll() is None: |
|
102 | if subp.poll() is None: | |
103 | # The process did not die... |
|
103 | # The process did not die... | |
104 | print('... failed. Manual cleanup may be required.') |
|
104 | print('... failed. Manual cleanup may be required.') | |
105 |
|
105 | |||
106 | def cleanup(self): |
|
106 | def cleanup(self): | |
107 | "Kill process if it's still alive, and clean up temporary directories" |
|
107 | "Kill process if it's still alive, and clean up temporary directories" | |
108 | self.cleanup_process() |
|
108 | self.cleanup_process() | |
109 | for td in self.dirs: |
|
109 | for td in self.dirs: | |
110 | td.cleanup() |
|
110 | td.cleanup() | |
111 |
|
111 | |||
112 | __del__ = cleanup |
|
112 | __del__ = cleanup | |
113 |
|
113 | |||
114 | class PyTestController(TestController): |
|
114 | class PyTestController(TestController): | |
115 | """Run Python tests using IPython.testing.iptest""" |
|
115 | """Run Python tests using IPython.testing.iptest""" | |
116 | #: str, Python command to execute in subprocess |
|
116 | #: str, Python command to execute in subprocess | |
117 | pycmd = None |
|
117 | pycmd = None | |
118 |
|
118 | |||
119 | def __init__(self, section): |
|
119 | def __init__(self, section): | |
120 | """Create new test runner.""" |
|
120 | """Create new test runner.""" | |
121 | TestController.__init__(self) |
|
121 | TestController.__init__(self) | |
122 | self.section = section |
|
122 | self.section = section | |
123 | # pycmd is put into cmd[2] in PyTestController.launch() |
|
123 | # pycmd is put into cmd[2] in PyTestController.launch() | |
124 | self.cmd = [sys.executable, '-c', None, section] |
|
124 | self.cmd = [sys.executable, '-c', None, section] | |
125 | self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()" |
|
125 | self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()" | |
126 | ipydir = TemporaryDirectory() |
|
126 | ipydir = TemporaryDirectory() | |
127 | self.dirs.append(ipydir) |
|
127 | self.dirs.append(ipydir) | |
128 | self.env['IPYTHONDIR'] = ipydir.name |
|
128 | self.env['IPYTHONDIR'] = ipydir.name | |
129 | self.workingdir = workingdir = TemporaryDirectory() |
|
129 | self.workingdir = workingdir = TemporaryDirectory() | |
130 | self.dirs.append(workingdir) |
|
130 | self.dirs.append(workingdir) | |
131 | self.env['IPTEST_WORKING_DIR'] = workingdir.name |
|
131 | self.env['IPTEST_WORKING_DIR'] = workingdir.name | |
132 | # This means we won't get odd effects from our own matplotlib config |
|
132 | # This means we won't get odd effects from our own matplotlib config | |
133 | self.env['MPLCONFIGDIR'] = workingdir.name |
|
133 | self.env['MPLCONFIGDIR'] = workingdir.name | |
134 |
|
134 | |||
135 | def add_xunit(self): |
|
135 | def add_xunit(self): | |
136 | xunit_file = os.path.abspath(self.section + '.xunit.xml') |
|
136 | xunit_file = os.path.abspath(self.section + '.xunit.xml') | |
137 | self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file]) |
|
137 | self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file]) | |
138 |
|
138 | |||
139 | def add_coverage(self): |
|
139 | def add_coverage(self): | |
140 | try: |
|
140 | try: | |
141 | sources = test_sections[self.section].includes |
|
141 | sources = test_sections[self.section].includes | |
142 | except KeyError: |
|
142 | except KeyError: | |
143 | sources = ['IPython'] |
|
143 | sources = ['IPython'] | |
144 |
|
144 | |||
145 | coverage_rc = ("[run]\n" |
|
145 | coverage_rc = ("[run]\n" | |
146 | "data_file = {data_file}\n" |
|
146 | "data_file = {data_file}\n" | |
147 | "source =\n" |
|
147 | "source =\n" | |
148 | " {source}\n" |
|
148 | " {source}\n" | |
149 | ).format(data_file=os.path.abspath('.coverage.'+self.section), |
|
149 | ).format(data_file=os.path.abspath('.coverage.'+self.section), | |
150 | source="\n ".join(sources)) |
|
150 | source="\n ".join(sources)) | |
151 | config_file = os.path.join(self.workingdir.name, '.coveragerc') |
|
151 | config_file = os.path.join(self.workingdir.name, '.coveragerc') | |
152 | with open(config_file, 'w') as f: |
|
152 | with open(config_file, 'w') as f: | |
153 | f.write(coverage_rc) |
|
153 | f.write(coverage_rc) | |
154 |
|
154 | |||
155 | self.env['COVERAGE_PROCESS_START'] = config_file |
|
155 | self.env['COVERAGE_PROCESS_START'] = config_file | |
156 | self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd |
|
156 | self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd | |
157 |
|
157 | |||
158 | def launch(self): |
|
158 | def launch(self): | |
159 | self.cmd[2] = self.pycmd |
|
159 | self.cmd[2] = self.pycmd | |
160 | super(PyTestController, self).launch() |
|
160 | super(PyTestController, self).launch() | |
161 |
|
161 | |||
162 | class JSController(TestController): |
|
162 | class JSController(TestController): | |
163 | """Run CasperJS tests """ |
|
163 | """Run CasperJS tests """ | |
164 | def __init__(self, section): |
|
164 | def __init__(self, section): | |
165 | """Create new test runner.""" |
|
165 | """Create new test runner.""" | |
166 | TestController.__init__(self) |
|
166 | TestController.__init__(self) | |
167 | self.section = section |
|
167 | self.section = section | |
168 |
|
168 | |||
|
169 | self.ipydir = TemporaryDirectory() | |||
|
170 | self.dirs.append(self.ipydir) | |||
|
171 | self.env['IPYTHONDIR'] = self.ipydir.name | |||
|
172 | ||||
169 | # start the ipython notebook, so we get the port number |
|
173 | # start the ipython notebook, so we get the port number | |
170 | self._init_server() |
|
174 | self._init_server() | |
171 |
|
175 | |||
172 | import IPython.html.tests as t |
|
176 | import IPython.html.tests as t | |
173 | test_dir = os.path.join(os.path.dirname(t.__file__), 'casperjs') |
|
177 | test_dir = os.path.join(os.path.dirname(t.__file__), 'casperjs') | |
174 | includes = '--includes=' + os.path.join(test_dir,'util.js') |
|
178 | includes = '--includes=' + os.path.join(test_dir,'util.js') | |
175 | test_cases = os.path.join(test_dir, 'test_cases') |
|
179 | test_cases = os.path.join(test_dir, 'test_cases') | |
176 | port = '--port=' + str(self.server_port) |
|
180 | port = '--port=' + str(self.server_port) | |
177 | self.cmd = ['casperjs', 'test', port, includes, test_cases] |
|
181 | self.cmd = ['casperjs', 'test', port, includes, test_cases] | |
178 |
|
182 | |||
179 |
|
183 | |||
180 | def _init_server(self): |
|
184 | def _init_server(self): | |
181 | "Start the notebook server in a separate process" |
|
185 | "Start the notebook server in a separate process" | |
182 | self.queue = q = Queue() |
|
186 | self.queue = q = Queue() | |
183 |
self. |
|
187 | self.server = Process(target=run_webapp, args=(q, self.ipydir.name)) | |
184 | server.start() |
|
188 | self.server.start() | |
185 | self.server_port = q.get() |
|
189 | self.server_port = q.get() | |
186 |
|
190 | |||
187 | def cleanup(self): |
|
191 | def cleanup(self): | |
188 | self.server.terminate() |
|
192 | self.server.terminate() | |
|
193 | self.server.join() | |||
189 | TestController.cleanup(self) |
|
194 | TestController.cleanup(self) | |
190 |
|
195 | |||
191 |
|
196 | |||
192 | def run_webapp(q): |
|
197 | def run_webapp(q, nbdir): | |
193 | """start the IPython Notebook, and pass port back to the queue""" |
|
198 | """start the IPython Notebook, and pass port back to the queue""" | |
194 | import IPython.html.notebookapp as nbapp |
|
199 | import IPython.html.notebookapp as nbapp | |
195 | server = nbapp.NotebookApp() |
|
200 | server = nbapp.NotebookApp() | |
196 | server.initialize(['--no-browser']) |
|
201 | server.initialize(['--no-browser', '--notebook-dir='+nbdir]) | |
197 | # communicate the port number to the parent process |
|
202 | # communicate the port number to the parent process | |
198 | q.put(server.port) |
|
203 | q.put(server.port) | |
199 | server.start() |
|
204 | server.start() | |
200 |
|
205 | |||
201 | def prepare_controllers(options): |
|
206 | def prepare_controllers(options): | |
202 | """Returns two lists of TestController instances, those to run, and those |
|
207 | """Returns two lists of TestController instances, those to run, and those | |
203 | not to run.""" |
|
208 | not to run.""" | |
204 | testgroups = options.testgroups |
|
209 | testgroups = options.testgroups | |
205 |
|
210 | |||
206 | if not testgroups: |
|
211 | if not testgroups: | |
207 | testgroups = test_group_names |
|
212 | testgroups = test_group_names | |
208 | if not options.all: |
|
213 | if not options.all: | |
209 | test_sections['parallel'].enabled = False |
|
214 | test_sections['parallel'].enabled = False | |
210 |
|
215 | |||
211 | c_js = [JSController(name) for name in testgroups if 'js' in name] |
|
216 | c_js = [JSController(name) for name in testgroups if 'js' in name] | |
212 | c_py = [PyTestController(name) for name in testgroups if 'js' not in name] |
|
217 | c_py = [PyTestController(name) for name in testgroups if 'js' not in name] | |
213 |
|
218 | |||
214 | configure_py_controllers(c_py, xunit=options.xunit, |
|
219 | configure_py_controllers(c_py, xunit=options.xunit, | |
215 | coverage=options.coverage) |
|
220 | coverage=options.coverage) | |
216 |
|
221 | |||
217 | controllers = c_py + c_js |
|
222 | controllers = c_py + c_js | |
218 | to_run = [c for c in controllers if c.will_run] |
|
223 | to_run = [c for c in controllers if c.will_run] | |
219 | not_run = [c for c in controllers if not c.will_run] |
|
224 | not_run = [c for c in controllers if not c.will_run] | |
220 | return to_run, not_run |
|
225 | return to_run, not_run | |
221 |
|
226 | |||
222 | def configure_py_controllers(controllers, xunit=False, coverage=False, extra_args=()): |
|
227 | def configure_py_controllers(controllers, xunit=False, coverage=False, extra_args=()): | |
223 | """Apply options for a collection of TestController objects.""" |
|
228 | """Apply options for a collection of TestController objects.""" | |
224 | for controller in controllers: |
|
229 | for controller in controllers: | |
225 | if xunit: |
|
230 | if xunit: | |
226 | controller.add_xunit() |
|
231 | controller.add_xunit() | |
227 | if coverage: |
|
232 | if coverage: | |
228 | controller.add_coverage() |
|
233 | controller.add_coverage() | |
229 | controller.cmd.extend(extra_args) |
|
234 | controller.cmd.extend(extra_args) | |
230 |
|
235 | |||
231 | def do_run(controller): |
|
236 | def do_run(controller): | |
232 | try: |
|
237 | try: | |
233 | try: |
|
238 | try: | |
234 | controller.launch() |
|
239 | controller.launch() | |
235 | except Exception: |
|
240 | except Exception: | |
236 | import traceback |
|
241 | import traceback | |
237 | traceback.print_exc() |
|
242 | traceback.print_exc() | |
238 | return controller, 1 # signal failure |
|
243 | return controller, 1 # signal failure | |
239 |
|
244 | |||
240 | exitcode = controller.wait() |
|
245 | exitcode = controller.wait() | |
241 | return controller, exitcode |
|
246 | return controller, exitcode | |
242 |
|
247 | |||
243 | except KeyboardInterrupt: |
|
248 | except KeyboardInterrupt: | |
244 | return controller, -signal.SIGINT |
|
249 | return controller, -signal.SIGINT | |
245 | finally: |
|
250 | finally: | |
246 | controller.cleanup() |
|
251 | controller.cleanup() | |
247 |
|
252 | |||
248 | def report(): |
|
253 | def report(): | |
249 | """Return a string with a summary report of test-related variables.""" |
|
254 | """Return a string with a summary report of test-related variables.""" | |
250 |
|
255 | |||
251 | out = [ sys_info(), '\n'] |
|
256 | out = [ sys_info(), '\n'] | |
252 |
|
257 | |||
253 | avail = [] |
|
258 | avail = [] | |
254 | not_avail = [] |
|
259 | not_avail = [] | |
255 |
|
260 | |||
256 | for k, is_avail in have.items(): |
|
261 | for k, is_avail in have.items(): | |
257 | if is_avail: |
|
262 | if is_avail: | |
258 | avail.append(k) |
|
263 | avail.append(k) | |
259 | else: |
|
264 | else: | |
260 | not_avail.append(k) |
|
265 | not_avail.append(k) | |
261 |
|
266 | |||
262 | if avail: |
|
267 | if avail: | |
263 | out.append('\nTools and libraries available at test time:\n') |
|
268 | out.append('\nTools and libraries available at test time:\n') | |
264 | avail.sort() |
|
269 | avail.sort() | |
265 | out.append(' ' + ' '.join(avail)+'\n') |
|
270 | out.append(' ' + ' '.join(avail)+'\n') | |
266 |
|
271 | |||
267 | if not_avail: |
|
272 | if not_avail: | |
268 | out.append('\nTools and libraries NOT available at test time:\n') |
|
273 | out.append('\nTools and libraries NOT available at test time:\n') | |
269 | not_avail.sort() |
|
274 | not_avail.sort() | |
270 | out.append(' ' + ' '.join(not_avail)+'\n') |
|
275 | out.append(' ' + ' '.join(not_avail)+'\n') | |
271 |
|
276 | |||
272 | return ''.join(out) |
|
277 | return ''.join(out) | |
273 |
|
278 | |||
274 | def run_iptestall(options): |
|
279 | def run_iptestall(options): | |
275 | """Run the entire IPython test suite by calling nose and trial. |
|
280 | """Run the entire IPython test suite by calling nose and trial. | |
276 |
|
281 | |||
277 | This function constructs :class:`IPTester` instances for all IPython |
|
282 | This function constructs :class:`IPTester` instances for all IPython | |
278 | modules and package and then runs each of them. This causes the modules |
|
283 | modules and package and then runs each of them. This causes the modules | |
279 | and packages of IPython to be tested each in their own subprocess using |
|
284 | and packages of IPython to be tested each in their own subprocess using | |
280 | nose. |
|
285 | nose. | |
281 |
|
286 | |||
282 | Parameters |
|
287 | Parameters | |
283 | ---------- |
|
288 | ---------- | |
284 |
|
289 | |||
285 | All parameters are passed as attributes of the options object. |
|
290 | All parameters are passed as attributes of the options object. | |
286 |
|
291 | |||
287 | testgroups : list of str |
|
292 | testgroups : list of str | |
288 | Run only these sections of the test suite. If empty, run all the available |
|
293 | Run only these sections of the test suite. If empty, run all the available | |
289 | sections. |
|
294 | sections. | |
290 |
|
295 | |||
291 | fast : int or None |
|
296 | fast : int or None | |
292 | Run the test suite in parallel, using n simultaneous processes. If None |
|
297 | Run the test suite in parallel, using n simultaneous processes. If None | |
293 | is passed, one process is used per CPU core. Default 1 (i.e. sequential) |
|
298 | is passed, one process is used per CPU core. Default 1 (i.e. sequential) | |
294 |
|
299 | |||
295 | inc_slow : bool |
|
300 | inc_slow : bool | |
296 | Include slow tests, like IPython.parallel. By default, these tests aren't |
|
301 | Include slow tests, like IPython.parallel. By default, these tests aren't | |
297 | run. |
|
302 | run. | |
298 |
|
303 | |||
299 | xunit : bool |
|
304 | xunit : bool | |
300 | Produce Xunit XML output. This is written to multiple foo.xunit.xml files. |
|
305 | Produce Xunit XML output. This is written to multiple foo.xunit.xml files. | |
301 |
|
306 | |||
302 | coverage : bool or str |
|
307 | coverage : bool or str | |
303 | Measure code coverage from tests. True will store the raw coverage data, |
|
308 | Measure code coverage from tests. True will store the raw coverage data, | |
304 | or pass 'html' or 'xml' to get reports. |
|
309 | or pass 'html' or 'xml' to get reports. | |
305 |
|
310 | |||
306 | extra_args : list |
|
311 | extra_args : list | |
307 | Extra arguments to pass to the test subprocesses, e.g. '-v' |
|
312 | Extra arguments to pass to the test subprocesses, e.g. '-v' | |
308 | """ |
|
313 | """ | |
309 | if options.fast != 1: |
|
314 | if options.fast != 1: | |
310 | # If running in parallel, capture output so it doesn't get interleaved |
|
315 | # If running in parallel, capture output so it doesn't get interleaved | |
311 | TestController.buffer_output = True |
|
316 | TestController.buffer_output = True | |
312 |
|
317 | |||
313 | to_run, not_run = prepare_controllers(options) |
|
318 | to_run, not_run = prepare_controllers(options) | |
314 |
|
319 | |||
315 | def justify(ltext, rtext, width=70, fill='-'): |
|
320 | def justify(ltext, rtext, width=70, fill='-'): | |
316 | ltext += ' ' |
|
321 | ltext += ' ' | |
317 | rtext = (' ' + rtext).rjust(width - len(ltext), fill) |
|
322 | rtext = (' ' + rtext).rjust(width - len(ltext), fill) | |
318 | return ltext + rtext |
|
323 | return ltext + rtext | |
319 |
|
324 | |||
320 | # Run all test runners, tracking execution time |
|
325 | # Run all test runners, tracking execution time | |
321 | failed = [] |
|
326 | failed = [] | |
322 | t_start = time.time() |
|
327 | t_start = time.time() | |
323 |
|
328 | |||
324 | print() |
|
329 | print() | |
325 | if options.fast == 1: |
|
330 | if options.fast == 1: | |
326 | # This actually means sequential, i.e. with 1 job |
|
331 | # This actually means sequential, i.e. with 1 job | |
327 | for controller in to_run: |
|
332 | for controller in to_run: | |
328 | print('IPython test group:', controller.section) |
|
333 | print('IPython test group:', controller.section) | |
329 | sys.stdout.flush() # Show in correct order when output is piped |
|
334 | sys.stdout.flush() # Show in correct order when output is piped | |
330 | controller, res = do_run(controller) |
|
335 | controller, res = do_run(controller) | |
331 | if res: |
|
336 | if res: | |
332 | failed.append(controller) |
|
337 | failed.append(controller) | |
333 | if res == -signal.SIGINT: |
|
338 | if res == -signal.SIGINT: | |
334 | print("Interrupted") |
|
339 | print("Interrupted") | |
335 | break |
|
340 | break | |
336 | print() |
|
341 | print() | |
337 |
|
342 | |||
338 | else: |
|
343 | else: | |
339 | # Run tests concurrently |
|
344 | # Run tests concurrently | |
340 | try: |
|
345 | try: | |
341 | pool = multiprocessing.pool.ThreadPool(options.fast) |
|
346 | pool = multiprocessing.pool.ThreadPool(options.fast) | |
342 | for (controller, res) in pool.imap_unordered(do_run, to_run): |
|
347 | for (controller, res) in pool.imap_unordered(do_run, to_run): | |
343 | res_string = 'OK' if res == 0 else 'FAILED' |
|
348 | res_string = 'OK' if res == 0 else 'FAILED' | |
344 | print(justify('IPython test group: ' + controller.section, res_string)) |
|
349 | print(justify('IPython test group: ' + controller.section, res_string)) | |
345 | if res: |
|
350 | if res: | |
346 | print(bytes_to_str(controller.stdout)) |
|
351 | print(bytes_to_str(controller.stdout)) | |
347 | failed.append(controller) |
|
352 | failed.append(controller) | |
348 | if res == -signal.SIGINT: |
|
353 | if res == -signal.SIGINT: | |
349 | print("Interrupted") |
|
354 | print("Interrupted") | |
350 | break |
|
355 | break | |
351 | except KeyboardInterrupt: |
|
356 | except KeyboardInterrupt: | |
352 | return |
|
357 | return | |
353 |
|
358 | |||
354 | for controller in not_run: |
|
359 | for controller in not_run: | |
355 | print(justify('IPython test group: ' + controller.section, 'NOT RUN')) |
|
360 | print(justify('IPython test group: ' + controller.section, 'NOT RUN')) | |
356 |
|
361 | |||
357 | t_end = time.time() |
|
362 | t_end = time.time() | |
358 | t_tests = t_end - t_start |
|
363 | t_tests = t_end - t_start | |
359 | nrunners = len(to_run) |
|
364 | nrunners = len(to_run) | |
360 | nfail = len(failed) |
|
365 | nfail = len(failed) | |
361 | # summarize results |
|
366 | # summarize results | |
362 | print('_'*70) |
|
367 | print('_'*70) | |
363 | print('Test suite completed for system with the following information:') |
|
368 | print('Test suite completed for system with the following information:') | |
364 | print(report()) |
|
369 | print(report()) | |
365 | print('Ran %s test groups in %.3fs' % (nrunners, t_tests)) |
|
370 | print('Ran %s test groups in %.3fs' % (nrunners, t_tests)) | |
366 | print() |
|
371 | print() | |
367 | print('Status: ', end='') |
|
372 | print('Status: ', end='') | |
368 | if not failed: |
|
373 | if not failed: | |
369 | print('OK') |
|
374 | print('OK') | |
370 | else: |
|
375 | else: | |
371 | # If anything went wrong, point out what command to rerun manually to |
|
376 | # If anything went wrong, point out what command to rerun manually to | |
372 | # see the actual errors and individual summary |
|
377 | # see the actual errors and individual summary | |
373 | failed_sections = [c.section for c in failed] |
|
378 | failed_sections = [c.section for c in failed] | |
374 | print('ERROR - {} out of {} test groups failed ({}).'.format(nfail, |
|
379 | print('ERROR - {} out of {} test groups failed ({}).'.format(nfail, | |
375 | nrunners, ', '.join(failed_sections))) |
|
380 | nrunners, ', '.join(failed_sections))) | |
376 | print() |
|
381 | print() | |
377 | print('You may wish to rerun these, with:') |
|
382 | print('You may wish to rerun these, with:') | |
378 | print(' iptest', *failed_sections) |
|
383 | print(' iptest', *failed_sections) | |
379 | print() |
|
384 | print() | |
380 |
|
385 | |||
381 | if options.coverage: |
|
386 | if options.coverage: | |
382 | from coverage import coverage |
|
387 | from coverage import coverage | |
383 | cov = coverage(data_file='.coverage') |
|
388 | cov = coverage(data_file='.coverage') | |
384 | cov.combine() |
|
389 | cov.combine() | |
385 | cov.save() |
|
390 | cov.save() | |
386 |
|
391 | |||
387 | # Coverage HTML report |
|
392 | # Coverage HTML report | |
388 | if options.coverage == 'html': |
|
393 | if options.coverage == 'html': | |
389 | html_dir = 'ipy_htmlcov' |
|
394 | html_dir = 'ipy_htmlcov' | |
390 | shutil.rmtree(html_dir, ignore_errors=True) |
|
395 | shutil.rmtree(html_dir, ignore_errors=True) | |
391 | print("Writing HTML coverage report to %s/ ... " % html_dir, end="") |
|
396 | print("Writing HTML coverage report to %s/ ... " % html_dir, end="") | |
392 | sys.stdout.flush() |
|
397 | sys.stdout.flush() | |
393 |
|
398 | |||
394 | # Custom HTML reporter to clean up module names. |
|
399 | # Custom HTML reporter to clean up module names. | |
395 | from coverage.html import HtmlReporter |
|
400 | from coverage.html import HtmlReporter | |
396 | class CustomHtmlReporter(HtmlReporter): |
|
401 | class CustomHtmlReporter(HtmlReporter): | |
397 | def find_code_units(self, morfs): |
|
402 | def find_code_units(self, morfs): | |
398 | super(CustomHtmlReporter, self).find_code_units(morfs) |
|
403 | super(CustomHtmlReporter, self).find_code_units(morfs) | |
399 | for cu in self.code_units: |
|
404 | for cu in self.code_units: | |
400 | nameparts = cu.name.split(os.sep) |
|
405 | nameparts = cu.name.split(os.sep) | |
401 | if 'IPython' not in nameparts: |
|
406 | if 'IPython' not in nameparts: | |
402 | continue |
|
407 | continue | |
403 | ix = nameparts.index('IPython') |
|
408 | ix = nameparts.index('IPython') | |
404 | cu.name = '.'.join(nameparts[ix:]) |
|
409 | cu.name = '.'.join(nameparts[ix:]) | |
405 |
|
410 | |||
406 | # Reimplement the html_report method with our custom reporter |
|
411 | # Reimplement the html_report method with our custom reporter | |
407 | cov._harvest_data() |
|
412 | cov._harvest_data() | |
408 | cov.config.from_args(omit='*%stests' % os.sep, html_dir=html_dir, |
|
413 | cov.config.from_args(omit='*%stests' % os.sep, html_dir=html_dir, | |
409 | html_title='IPython test coverage', |
|
414 | html_title='IPython test coverage', | |
410 | ) |
|
415 | ) | |
411 | reporter = CustomHtmlReporter(cov, cov.config) |
|
416 | reporter = CustomHtmlReporter(cov, cov.config) | |
412 | reporter.report(None) |
|
417 | reporter.report(None) | |
413 | print('done.') |
|
418 | print('done.') | |
414 |
|
419 | |||
415 | # Coverage XML report |
|
420 | # Coverage XML report | |
416 | elif options.coverage == 'xml': |
|
421 | elif options.coverage == 'xml': | |
417 | cov.xml_report(outfile='ipy_coverage.xml') |
|
422 | cov.xml_report(outfile='ipy_coverage.xml') | |
418 |
|
423 | |||
419 | if failed: |
|
424 | if failed: | |
420 | # Ensure that our exit code indicates failure |
|
425 | # Ensure that our exit code indicates failure | |
421 | sys.exit(1) |
|
426 | sys.exit(1) | |
422 |
|
427 | |||
423 |
|
428 | |||
424 | def main(): |
|
429 | def main(): | |
425 | # Arguments after -- should be passed through to nose. Argparse treats |
|
430 | # Arguments after -- should be passed through to nose. Argparse treats | |
426 | # everything after -- as regular positional arguments, so we separate them |
|
431 | # everything after -- as regular positional arguments, so we separate them | |
427 | # first. |
|
432 | # first. | |
428 | try: |
|
433 | try: | |
429 | ix = sys.argv.index('--') |
|
434 | ix = sys.argv.index('--') | |
430 | except ValueError: |
|
435 | except ValueError: | |
431 | to_parse = sys.argv[1:] |
|
436 | to_parse = sys.argv[1:] | |
432 | extra_args = [] |
|
437 | extra_args = [] | |
433 | else: |
|
438 | else: | |
434 | to_parse = sys.argv[1:ix] |
|
439 | to_parse = sys.argv[1:ix] | |
435 | extra_args = sys.argv[ix+1:] |
|
440 | extra_args = sys.argv[ix+1:] | |
436 |
|
441 | |||
437 | parser = argparse.ArgumentParser(description='Run IPython test suite') |
|
442 | parser = argparse.ArgumentParser(description='Run IPython test suite') | |
438 | parser.add_argument('testgroups', nargs='*', |
|
443 | parser.add_argument('testgroups', nargs='*', | |
439 | help='Run specified groups of tests. If omitted, run ' |
|
444 | help='Run specified groups of tests. If omitted, run ' | |
440 | 'all tests.') |
|
445 | 'all tests.') | |
441 | parser.add_argument('--all', action='store_true', |
|
446 | parser.add_argument('--all', action='store_true', | |
442 | help='Include slow tests not run by default.') |
|
447 | help='Include slow tests not run by default.') | |
443 | parser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int, |
|
448 | parser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int, | |
444 | help='Run test sections in parallel.') |
|
449 | help='Run test sections in parallel.') | |
445 | parser.add_argument('--xunit', action='store_true', |
|
450 | parser.add_argument('--xunit', action='store_true', | |
446 | help='Produce Xunit XML results') |
|
451 | help='Produce Xunit XML results') | |
447 | parser.add_argument('--coverage', nargs='?', const=True, default=False, |
|
452 | parser.add_argument('--coverage', nargs='?', const=True, default=False, | |
448 | help="Measure test coverage. Specify 'html' or " |
|
453 | help="Measure test coverage. Specify 'html' or " | |
449 | "'xml' to get reports.") |
|
454 | "'xml' to get reports.") | |
450 |
|
455 | |||
451 | options = parser.parse_args(to_parse) |
|
456 | options = parser.parse_args(to_parse) | |
452 | options.extra_args = extra_args |
|
457 | options.extra_args = extra_args | |
453 |
|
458 | |||
454 | run_iptestall(options) |
|
459 | run_iptestall(options) | |
455 |
|
460 | |||
456 |
|
461 | |||
457 | if __name__ == '__main__': |
|
462 | if __name__ == '__main__': | |
458 | main() |
|
463 | main() |
General Comments 0
You need to be logged in to leave comments.
Login now