Show More
@@ -1,175 +1,174 b'' | |||||
1 | """utilities for testing IPython kernels""" |
|
1 | """utilities for testing IPython kernels""" | |
2 |
|
2 | |||
3 | #------------------------------------------------------------------------------- |
|
3 | #------------------------------------------------------------------------------- | |
4 | # Copyright (C) 2013 The IPython Development Team |
|
4 | # Copyright (C) 2013 The IPython Development Team | |
5 | # |
|
5 | # | |
6 | # Distributed under the terms of the BSD License. The full license is in |
|
6 | # Distributed under the terms of the BSD License. The full license is in | |
7 | # the file COPYING, distributed as part of this software. |
|
7 | # the file COPYING, distributed as part of this software. | |
8 | #------------------------------------------------------------------------------- |
|
8 | #------------------------------------------------------------------------------- | |
9 |
|
9 | |||
10 | #------------------------------------------------------------------------------- |
|
10 | #------------------------------------------------------------------------------- | |
11 | # Imports |
|
11 | # Imports | |
12 | #------------------------------------------------------------------------------- |
|
12 | #------------------------------------------------------------------------------- | |
13 |
|
13 | |||
14 | import atexit |
|
14 | import atexit | |
15 |
|
15 | |||
16 | from contextlib import contextmanager |
|
16 | from contextlib import contextmanager | |
17 | from subprocess import PIPE, STDOUT |
|
17 | from subprocess import PIPE, STDOUT | |
18 | try: |
|
18 | try: | |
19 | from queue import Empty # Py 3 |
|
19 | from queue import Empty # Py 3 | |
20 | except ImportError: |
|
20 | except ImportError: | |
21 | from Queue import Empty # Py 2 |
|
21 | from Queue import Empty # Py 2 | |
22 |
|
22 | |||
23 | import nose |
|
23 | import nose | |
24 | import nose.tools as nt |
|
24 | import nose.tools as nt | |
25 |
|
25 | |||
26 | from IPython.kernel import KernelManager |
|
26 | from IPython.kernel import KernelManager | |
27 |
|
27 | |||
28 | #------------------------------------------------------------------------------- |
|
28 | #------------------------------------------------------------------------------- | |
29 | # Globals |
|
29 | # Globals | |
30 | #------------------------------------------------------------------------------- |
|
30 | #------------------------------------------------------------------------------- | |
31 |
|
31 | |||
32 | STARTUP_TIMEOUT = 60 |
|
32 | STARTUP_TIMEOUT = 60 | |
33 | TIMEOUT = 15 |
|
33 | TIMEOUT = 15 | |
34 |
|
34 | |||
35 | KM = None |
|
35 | KM = None | |
36 | KC = None |
|
36 | KC = None | |
37 |
|
37 | |||
38 | #------------------------------------------------------------------------------- |
|
38 | #------------------------------------------------------------------------------- | |
39 | # code |
|
39 | # code | |
40 | #------------------------------------------------------------------------------- |
|
40 | #------------------------------------------------------------------------------- | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | def start_new_kernel(argv=None): |
|
43 | def start_new_kernel(argv=None): | |
44 | """start a new kernel, and return its Manager and Client""" |
|
44 | """start a new kernel, and return its Manager and Client""" | |
45 | km = KernelManager() |
|
45 | km = KernelManager() | |
46 |
kwargs = dict(stdout=nose.ip |
|
46 | kwargs = dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT) | |
47 | if argv: |
|
47 | if argv: | |
48 | kwargs['extra_arguments'] = argv |
|
48 | kwargs['extra_arguments'] = argv | |
49 | km.start_kernel(**kwargs) |
|
49 | km.start_kernel(**kwargs) | |
50 | nose.ipy_stream_capturer.ensure_started() |
|
|||
51 | kc = km.client() |
|
50 | kc = km.client() | |
52 | kc.start_channels() |
|
51 | kc.start_channels() | |
53 |
|
52 | |||
54 | msg_id = kc.kernel_info() |
|
53 | msg_id = kc.kernel_info() | |
55 | kc.get_shell_msg(block=True, timeout=STARTUP_TIMEOUT) |
|
54 | kc.get_shell_msg(block=True, timeout=STARTUP_TIMEOUT) | |
56 | flush_channels(kc) |
|
55 | flush_channels(kc) | |
57 | return km, kc |
|
56 | return km, kc | |
58 |
|
57 | |||
59 | def flush_channels(kc=None): |
|
58 | def flush_channels(kc=None): | |
60 | """flush any messages waiting on the queue""" |
|
59 | """flush any messages waiting on the queue""" | |
61 | from .test_message_spec import validate_message |
|
60 | from .test_message_spec import validate_message | |
62 |
|
61 | |||
63 | if kc is None: |
|
62 | if kc is None: | |
64 | kc = KC |
|
63 | kc = KC | |
65 | for channel in (kc.shell_channel, kc.iopub_channel): |
|
64 | for channel in (kc.shell_channel, kc.iopub_channel): | |
66 | while True: |
|
65 | while True: | |
67 | try: |
|
66 | try: | |
68 | msg = channel.get_msg(block=True, timeout=0.1) |
|
67 | msg = channel.get_msg(block=True, timeout=0.1) | |
69 | except Empty: |
|
68 | except Empty: | |
70 | break |
|
69 | break | |
71 | else: |
|
70 | else: | |
72 | validate_message(msg) |
|
71 | validate_message(msg) | |
73 |
|
72 | |||
74 |
|
73 | |||
75 | def execute(code='', kc=None, **kwargs): |
|
74 | def execute(code='', kc=None, **kwargs): | |
76 | """wrapper for doing common steps for validating an execution request""" |
|
75 | """wrapper for doing common steps for validating an execution request""" | |
77 | from .test_message_spec import validate_message |
|
76 | from .test_message_spec import validate_message | |
78 | if kc is None: |
|
77 | if kc is None: | |
79 | kc = KC |
|
78 | kc = KC | |
80 | msg_id = kc.execute(code=code, **kwargs) |
|
79 | msg_id = kc.execute(code=code, **kwargs) | |
81 | reply = kc.get_shell_msg(timeout=TIMEOUT) |
|
80 | reply = kc.get_shell_msg(timeout=TIMEOUT) | |
82 | validate_message(reply, 'execute_reply', msg_id) |
|
81 | validate_message(reply, 'execute_reply', msg_id) | |
83 | busy = kc.get_iopub_msg(timeout=TIMEOUT) |
|
82 | busy = kc.get_iopub_msg(timeout=TIMEOUT) | |
84 | validate_message(busy, 'status', msg_id) |
|
83 | validate_message(busy, 'status', msg_id) | |
85 | nt.assert_equal(busy['content']['execution_state'], 'busy') |
|
84 | nt.assert_equal(busy['content']['execution_state'], 'busy') | |
86 |
|
85 | |||
87 | if not kwargs.get('silent'): |
|
86 | if not kwargs.get('silent'): | |
88 | pyin = kc.get_iopub_msg(timeout=TIMEOUT) |
|
87 | pyin = kc.get_iopub_msg(timeout=TIMEOUT) | |
89 | validate_message(pyin, 'pyin', msg_id) |
|
88 | validate_message(pyin, 'pyin', msg_id) | |
90 | nt.assert_equal(pyin['content']['code'], code) |
|
89 | nt.assert_equal(pyin['content']['code'], code) | |
91 |
|
90 | |||
92 | return msg_id, reply['content'] |
|
91 | return msg_id, reply['content'] | |
93 |
|
92 | |||
94 | def start_global_kernel(): |
|
93 | def start_global_kernel(): | |
95 | """start the global kernel (if it isn't running) and return its client""" |
|
94 | """start the global kernel (if it isn't running) and return its client""" | |
96 | global KM, KC |
|
95 | global KM, KC | |
97 | if KM is None: |
|
96 | if KM is None: | |
98 | KM, KC = start_new_kernel() |
|
97 | KM, KC = start_new_kernel() | |
99 | atexit.register(stop_global_kernel) |
|
98 | atexit.register(stop_global_kernel) | |
100 | return KC |
|
99 | return KC | |
101 |
|
100 | |||
102 | @contextmanager |
|
101 | @contextmanager | |
103 | def kernel(): |
|
102 | def kernel(): | |
104 | """Context manager for the global kernel instance |
|
103 | """Context manager for the global kernel instance | |
105 |
|
104 | |||
106 | Should be used for most kernel tests |
|
105 | Should be used for most kernel tests | |
107 |
|
106 | |||
108 | Returns |
|
107 | Returns | |
109 | ------- |
|
108 | ------- | |
110 | kernel_client: connected KernelClient instance |
|
109 | kernel_client: connected KernelClient instance | |
111 | """ |
|
110 | """ | |
112 | yield start_global_kernel() |
|
111 | yield start_global_kernel() | |
113 |
|
112 | |||
114 | def uses_kernel(test_f): |
|
113 | def uses_kernel(test_f): | |
115 | """Decorator for tests that use the global kernel""" |
|
114 | """Decorator for tests that use the global kernel""" | |
116 | def wrapped_test(): |
|
115 | def wrapped_test(): | |
117 | with kernel() as kc: |
|
116 | with kernel() as kc: | |
118 | test_f(kc) |
|
117 | test_f(kc) | |
119 | wrapped_test.__doc__ = test_f.__doc__ |
|
118 | wrapped_test.__doc__ = test_f.__doc__ | |
120 | wrapped_test.__name__ = test_f.__name__ |
|
119 | wrapped_test.__name__ = test_f.__name__ | |
121 | return wrapped_test |
|
120 | return wrapped_test | |
122 |
|
121 | |||
123 | def stop_global_kernel(): |
|
122 | def stop_global_kernel(): | |
124 | """Stop the global shared kernel instance, if it exists""" |
|
123 | """Stop the global shared kernel instance, if it exists""" | |
125 | global KM, KC |
|
124 | global KM, KC | |
126 | KC.stop_channels() |
|
125 | KC.stop_channels() | |
127 | KC = None |
|
126 | KC = None | |
128 | if KM is None: |
|
127 | if KM is None: | |
129 | return |
|
128 | return | |
130 | KM.shutdown_kernel(now=True) |
|
129 | KM.shutdown_kernel(now=True) | |
131 | KM = None |
|
130 | KM = None | |
132 |
|
131 | |||
133 | @contextmanager |
|
132 | @contextmanager | |
134 | def new_kernel(argv=None): |
|
133 | def new_kernel(argv=None): | |
135 | """Context manager for a new kernel in a subprocess |
|
134 | """Context manager for a new kernel in a subprocess | |
136 |
|
135 | |||
137 | Should only be used for tests where the kernel must not be re-used. |
|
136 | Should only be used for tests where the kernel must not be re-used. | |
138 |
|
137 | |||
139 | Returns |
|
138 | Returns | |
140 | ------- |
|
139 | ------- | |
141 | kernel_client: connected KernelClient instance |
|
140 | kernel_client: connected KernelClient instance | |
142 | """ |
|
141 | """ | |
143 | km, kc = start_new_kernel(argv) |
|
142 | km, kc = start_new_kernel(argv) | |
144 | try: |
|
143 | try: | |
145 | yield kc |
|
144 | yield kc | |
146 | finally: |
|
145 | finally: | |
147 | kc.stop_channels() |
|
146 | kc.stop_channels() | |
148 | km.shutdown_kernel(now=True) |
|
147 | km.shutdown_kernel(now=True) | |
149 |
|
148 | |||
150 |
|
149 | |||
151 | def assemble_output(iopub): |
|
150 | def assemble_output(iopub): | |
152 | """assemble stdout/err from an execution""" |
|
151 | """assemble stdout/err from an execution""" | |
153 | stdout = '' |
|
152 | stdout = '' | |
154 | stderr = '' |
|
153 | stderr = '' | |
155 | while True: |
|
154 | while True: | |
156 | msg = iopub.get_msg(block=True, timeout=1) |
|
155 | msg = iopub.get_msg(block=True, timeout=1) | |
157 | msg_type = msg['msg_type'] |
|
156 | msg_type = msg['msg_type'] | |
158 | content = msg['content'] |
|
157 | content = msg['content'] | |
159 | if msg_type == 'status' and content['execution_state'] == 'idle': |
|
158 | if msg_type == 'status' and content['execution_state'] == 'idle': | |
160 | # idle message signals end of output |
|
159 | # idle message signals end of output | |
161 | break |
|
160 | break | |
162 | elif msg['msg_type'] == 'stream': |
|
161 | elif msg['msg_type'] == 'stream': | |
163 | if content['name'] == 'stdout': |
|
162 | if content['name'] == 'stdout': | |
164 | stdout += content['data'] |
|
163 | stdout += content['data'] | |
165 | elif content['name'] == 'stderr': |
|
164 | elif content['name'] == 'stderr': | |
166 | stderr += content['data'] |
|
165 | stderr += content['data'] | |
167 | else: |
|
166 | else: | |
168 | raise KeyError("bad stream: %r" % content['name']) |
|
167 | raise KeyError("bad stream: %r" % content['name']) | |
169 | else: |
|
168 | else: | |
170 | # other output, ignored |
|
169 | # other output, ignored | |
171 | pass |
|
170 | pass | |
172 | return stdout, stderr |
|
171 | return stdout, stderr | |
173 |
|
172 | |||
174 |
|
173 | |||
175 |
|
174 |
@@ -1,131 +1,130 b'' | |||||
1 | """toplevel setup/teardown for parallel tests.""" |
|
1 | """toplevel setup/teardown for parallel tests.""" | |
2 | from __future__ import print_function |
|
2 | from __future__ import print_function | |
3 |
|
3 | |||
4 | #------------------------------------------------------------------------------- |
|
4 | #------------------------------------------------------------------------------- | |
5 | # Copyright (C) 2011 The IPython Development Team |
|
5 | # Copyright (C) 2011 The IPython Development Team | |
6 | # |
|
6 | # | |
7 | # Distributed under the terms of the BSD License. The full license is in |
|
7 | # Distributed under the terms of the BSD License. The full license is in | |
8 | # the file COPYING, distributed as part of this software. |
|
8 | # the file COPYING, distributed as part of this software. | |
9 | #------------------------------------------------------------------------------- |
|
9 | #------------------------------------------------------------------------------- | |
10 |
|
10 | |||
11 | #------------------------------------------------------------------------------- |
|
11 | #------------------------------------------------------------------------------- | |
12 | # Imports |
|
12 | # Imports | |
13 | #------------------------------------------------------------------------------- |
|
13 | #------------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | import os |
|
15 | import os | |
16 | import tempfile |
|
16 | import tempfile | |
17 | import time |
|
17 | import time | |
18 | from subprocess import Popen, PIPE, STDOUT |
|
18 | from subprocess import Popen, PIPE, STDOUT | |
19 |
|
19 | |||
20 | import nose |
|
20 | import nose | |
21 |
|
21 | |||
22 | from IPython.utils.path import get_ipython_dir |
|
22 | from IPython.utils.path import get_ipython_dir | |
23 | from IPython.parallel import Client |
|
23 | from IPython.parallel import Client | |
24 | from IPython.parallel.apps.launcher import (LocalProcessLauncher, |
|
24 | from IPython.parallel.apps.launcher import (LocalProcessLauncher, | |
25 | ipengine_cmd_argv, |
|
25 | ipengine_cmd_argv, | |
26 | ipcontroller_cmd_argv, |
|
26 | ipcontroller_cmd_argv, | |
27 | SIGKILL, |
|
27 | SIGKILL, | |
28 | ProcessStateError, |
|
28 | ProcessStateError, | |
29 | ) |
|
29 | ) | |
30 |
|
30 | |||
31 | # globals |
|
31 | # globals | |
32 | launchers = [] |
|
32 | launchers = [] | |
33 | blackhole = open(os.devnull, 'w') |
|
33 | blackhole = open(os.devnull, 'w') | |
34 |
|
34 | |||
35 | # Launcher class |
|
35 | # Launcher class | |
36 | class TestProcessLauncher(LocalProcessLauncher): |
|
36 | class TestProcessLauncher(LocalProcessLauncher): | |
37 | """subclass LocalProcessLauncher, to prevent extra sockets and threads being created on Windows""" |
|
37 | """subclass LocalProcessLauncher, to prevent extra sockets and threads being created on Windows""" | |
38 | def start(self): |
|
38 | def start(self): | |
39 | if self.state == 'before': |
|
39 | if self.state == 'before': | |
|
40 | # Store stdout & stderr to show with failing tests. | |||
|
41 | # This is defined in IPython.testing.iptest | |||
40 | self.process = Popen(self.args, |
|
42 | self.process = Popen(self.args, | |
41 |
stdout=nose.ip |
|
43 | stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT, | |
42 | env=os.environ, |
|
44 | env=os.environ, | |
43 | cwd=self.work_dir |
|
45 | cwd=self.work_dir | |
44 | ) |
|
46 | ) | |
45 | self.notify_start(self.process.pid) |
|
47 | self.notify_start(self.process.pid) | |
46 | self.poll = self.process.poll |
|
48 | self.poll = self.process.poll | |
47 | # Store stdout & stderr to show with failing tests. |
|
|||
48 | # This is defined in IPython.testing.iptest |
|
|||
49 | nose.ipy_stream_capturer.ensure_started() |
|
|||
50 | else: |
|
49 | else: | |
51 | s = 'The process was already started and has state: %r' % self.state |
|
50 | s = 'The process was already started and has state: %r' % self.state | |
52 | raise ProcessStateError(s) |
|
51 | raise ProcessStateError(s) | |
53 |
|
52 | |||
54 | # nose setup/teardown |
|
53 | # nose setup/teardown | |
55 |
|
54 | |||
56 | def setup(): |
|
55 | def setup(): | |
57 | cluster_dir = os.path.join(get_ipython_dir(), 'profile_iptest') |
|
56 | cluster_dir = os.path.join(get_ipython_dir(), 'profile_iptest') | |
58 | engine_json = os.path.join(cluster_dir, 'security', 'ipcontroller-engine.json') |
|
57 | engine_json = os.path.join(cluster_dir, 'security', 'ipcontroller-engine.json') | |
59 | client_json = os.path.join(cluster_dir, 'security', 'ipcontroller-client.json') |
|
58 | client_json = os.path.join(cluster_dir, 'security', 'ipcontroller-client.json') | |
60 | for json in (engine_json, client_json): |
|
59 | for json in (engine_json, client_json): | |
61 | if os.path.exists(json): |
|
60 | if os.path.exists(json): | |
62 | os.remove(json) |
|
61 | os.remove(json) | |
63 |
|
62 | |||
64 | cp = TestProcessLauncher() |
|
63 | cp = TestProcessLauncher() | |
65 | cp.cmd_and_args = ipcontroller_cmd_argv + \ |
|
64 | cp.cmd_and_args = ipcontroller_cmd_argv + \ | |
66 | ['--profile=iptest', '--log-level=20', '--ping=250', '--dictdb'] |
|
65 | ['--profile=iptest', '--log-level=20', '--ping=250', '--dictdb'] | |
67 | cp.start() |
|
66 | cp.start() | |
68 | launchers.append(cp) |
|
67 | launchers.append(cp) | |
69 | tic = time.time() |
|
68 | tic = time.time() | |
70 | while not os.path.exists(engine_json) or not os.path.exists(client_json): |
|
69 | while not os.path.exists(engine_json) or not os.path.exists(client_json): | |
71 | if cp.poll() is not None: |
|
70 | if cp.poll() is not None: | |
72 | raise RuntimeError("The test controller exited with status %s" % cp.poll()) |
|
71 | raise RuntimeError("The test controller exited with status %s" % cp.poll()) | |
73 | elif time.time()-tic > 15: |
|
72 | elif time.time()-tic > 15: | |
74 | raise RuntimeError("Timeout waiting for the test controller to start.") |
|
73 | raise RuntimeError("Timeout waiting for the test controller to start.") | |
75 | time.sleep(0.1) |
|
74 | time.sleep(0.1) | |
76 | add_engines(1) |
|
75 | add_engines(1) | |
77 |
|
76 | |||
78 | def add_engines(n=1, profile='iptest', total=False): |
|
77 | def add_engines(n=1, profile='iptest', total=False): | |
79 | """add a number of engines to a given profile. |
|
78 | """add a number of engines to a given profile. | |
80 |
|
79 | |||
81 | If total is True, then already running engines are counted, and only |
|
80 | If total is True, then already running engines are counted, and only | |
82 | the additional engines necessary (if any) are started. |
|
81 | the additional engines necessary (if any) are started. | |
83 | """ |
|
82 | """ | |
84 | rc = Client(profile=profile) |
|
83 | rc = Client(profile=profile) | |
85 | base = len(rc) |
|
84 | base = len(rc) | |
86 |
|
85 | |||
87 | if total: |
|
86 | if total: | |
88 | n = max(n - base, 0) |
|
87 | n = max(n - base, 0) | |
89 |
|
88 | |||
90 | eps = [] |
|
89 | eps = [] | |
91 | for i in range(n): |
|
90 | for i in range(n): | |
92 | ep = TestProcessLauncher() |
|
91 | ep = TestProcessLauncher() | |
93 | ep.cmd_and_args = ipengine_cmd_argv + [ |
|
92 | ep.cmd_and_args = ipengine_cmd_argv + [ | |
94 | '--profile=%s' % profile, |
|
93 | '--profile=%s' % profile, | |
95 | '--log-level=50', |
|
94 | '--log-level=50', | |
96 | '--InteractiveShell.colors=nocolor' |
|
95 | '--InteractiveShell.colors=nocolor' | |
97 | ] |
|
96 | ] | |
98 | ep.start() |
|
97 | ep.start() | |
99 | launchers.append(ep) |
|
98 | launchers.append(ep) | |
100 | eps.append(ep) |
|
99 | eps.append(ep) | |
101 | tic = time.time() |
|
100 | tic = time.time() | |
102 | while len(rc) < base+n: |
|
101 | while len(rc) < base+n: | |
103 | if any([ ep.poll() is not None for ep in eps ]): |
|
102 | if any([ ep.poll() is not None for ep in eps ]): | |
104 | raise RuntimeError("A test engine failed to start.") |
|
103 | raise RuntimeError("A test engine failed to start.") | |
105 | elif time.time()-tic > 15: |
|
104 | elif time.time()-tic > 15: | |
106 | raise RuntimeError("Timeout waiting for engines to connect.") |
|
105 | raise RuntimeError("Timeout waiting for engines to connect.") | |
107 | time.sleep(.1) |
|
106 | time.sleep(.1) | |
108 | rc.spin() |
|
107 | rc.spin() | |
109 | rc.close() |
|
108 | rc.close() | |
110 | return eps |
|
109 | return eps | |
111 |
|
110 | |||
112 | def teardown(): |
|
111 | def teardown(): | |
113 | time.sleep(1) |
|
112 | time.sleep(1) | |
114 | while launchers: |
|
113 | while launchers: | |
115 | p = launchers.pop() |
|
114 | p = launchers.pop() | |
116 | if p.poll() is None: |
|
115 | if p.poll() is None: | |
117 | try: |
|
116 | try: | |
118 | p.stop() |
|
117 | p.stop() | |
119 | except Exception as e: |
|
118 | except Exception as e: | |
120 | print(e) |
|
119 | print(e) | |
121 | pass |
|
120 | pass | |
122 | if p.poll() is None: |
|
121 | if p.poll() is None: | |
123 | time.sleep(.25) |
|
122 | time.sleep(.25) | |
124 | if p.poll() is None: |
|
123 | if p.poll() is None: | |
125 | try: |
|
124 | try: | |
126 | print('cleaning up test process...') |
|
125 | print('cleaning up test process...') | |
127 | p.signal(SIGKILL) |
|
126 | p.signal(SIGKILL) | |
128 | except: |
|
127 | except: | |
129 | print("couldn't shutdown process: ", p) |
|
128 | print("couldn't shutdown process: ", p) | |
130 | blackhole.close() |
|
129 | blackhole.close() | |
131 |
|
130 |
@@ -1,519 +1,530 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """IPython Test Suite Runner. |
|
2 | """IPython Test Suite Runner. | |
3 |
|
3 | |||
4 | This module provides a main entry point to a user script to test IPython |
|
4 | This module provides a main entry point to a user script to test IPython | |
5 | itself from the command line. There are two ways of running this script: |
|
5 | itself from the command line. There are two ways of running this script: | |
6 |
|
6 | |||
7 | 1. With the syntax `iptest all`. This runs our entire test suite by |
|
7 | 1. With the syntax `iptest all`. This runs our entire test suite by | |
8 | calling this script (with different arguments) recursively. This |
|
8 | calling this script (with different arguments) recursively. This | |
9 | causes modules and package to be tested in different processes, using nose |
|
9 | causes modules and package to be tested in different processes, using nose | |
10 | or trial where appropriate. |
|
10 | or trial where appropriate. | |
11 | 2. With the regular nose syntax, like `iptest -vvs IPython`. In this form |
|
11 | 2. With the regular nose syntax, like `iptest -vvs IPython`. In this form | |
12 | the script simply calls nose, but with special command line flags and |
|
12 | the script simply calls nose, but with special command line flags and | |
13 | plugins loaded. |
|
13 | plugins loaded. | |
14 |
|
14 | |||
15 | """ |
|
15 | """ | |
16 |
|
16 | |||
17 | #----------------------------------------------------------------------------- |
|
17 | #----------------------------------------------------------------------------- | |
18 | # Copyright (C) 2009-2011 The IPython Development Team |
|
18 | # Copyright (C) 2009-2011 The IPython Development Team | |
19 | # |
|
19 | # | |
20 | # Distributed under the terms of the BSD License. The full license is in |
|
20 | # Distributed under the terms of the BSD License. The full license is in | |
21 | # the file COPYING, distributed as part of this software. |
|
21 | # the file COPYING, distributed as part of this software. | |
22 | #----------------------------------------------------------------------------- |
|
22 | #----------------------------------------------------------------------------- | |
23 |
|
23 | |||
24 | #----------------------------------------------------------------------------- |
|
24 | #----------------------------------------------------------------------------- | |
25 | # Imports |
|
25 | # Imports | |
26 | #----------------------------------------------------------------------------- |
|
26 | #----------------------------------------------------------------------------- | |
27 | from __future__ import print_function |
|
27 | from __future__ import print_function | |
28 |
|
28 | |||
29 | # Stdlib |
|
29 | # Stdlib | |
30 | import glob |
|
30 | import glob | |
31 | from io import BytesIO |
|
31 | from io import BytesIO | |
32 | import os |
|
32 | import os | |
33 | import os.path as path |
|
33 | import os.path as path | |
34 | import sys |
|
34 | import sys | |
35 | from threading import Thread, Lock, Event |
|
35 | from threading import Thread, Lock, Event | |
36 | import warnings |
|
36 | import warnings | |
37 |
|
37 | |||
38 | # Now, proceed to import nose itself |
|
38 | # Now, proceed to import nose itself | |
39 | import nose.plugins.builtin |
|
39 | import nose.plugins.builtin | |
40 | from nose.plugins.xunit import Xunit |
|
40 | from nose.plugins.xunit import Xunit | |
41 | from nose import SkipTest |
|
41 | from nose import SkipTest | |
42 | from nose.core import TestProgram |
|
42 | from nose.core import TestProgram | |
43 | from nose.plugins import Plugin |
|
43 | from nose.plugins import Plugin | |
44 | from nose.util import safe_str |
|
44 | from nose.util import safe_str | |
45 |
|
45 | |||
46 | # Our own imports |
|
46 | # Our own imports | |
47 | from IPython.utils.process import is_cmd_found |
|
47 | from IPython.utils.process import is_cmd_found | |
48 | from IPython.utils.importstring import import_item |
|
48 | from IPython.utils.importstring import import_item | |
49 | from IPython.testing.plugin.ipdoctest import IPythonDoctest |
|
49 | from IPython.testing.plugin.ipdoctest import IPythonDoctest | |
50 | from IPython.external.decorators import KnownFailure, knownfailureif |
|
50 | from IPython.external.decorators import KnownFailure, knownfailureif | |
51 |
|
51 | |||
52 | pjoin = path.join |
|
52 | pjoin = path.join | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | #----------------------------------------------------------------------------- |
|
55 | #----------------------------------------------------------------------------- | |
56 | # Globals |
|
56 | # Globals | |
57 | #----------------------------------------------------------------------------- |
|
57 | #----------------------------------------------------------------------------- | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | #----------------------------------------------------------------------------- |
|
60 | #----------------------------------------------------------------------------- | |
61 | # Warnings control |
|
61 | # Warnings control | |
62 | #----------------------------------------------------------------------------- |
|
62 | #----------------------------------------------------------------------------- | |
63 |
|
63 | |||
64 | # Twisted generates annoying warnings with Python 2.6, as will do other code |
|
64 | # Twisted generates annoying warnings with Python 2.6, as will do other code | |
65 | # that imports 'sets' as of today |
|
65 | # that imports 'sets' as of today | |
66 | warnings.filterwarnings('ignore', 'the sets module is deprecated', |
|
66 | warnings.filterwarnings('ignore', 'the sets module is deprecated', | |
67 | DeprecationWarning ) |
|
67 | DeprecationWarning ) | |
68 |
|
68 | |||
69 | # This one also comes from Twisted |
|
69 | # This one also comes from Twisted | |
70 | warnings.filterwarnings('ignore', 'the sha module is deprecated', |
|
70 | warnings.filterwarnings('ignore', 'the sha module is deprecated', | |
71 | DeprecationWarning) |
|
71 | DeprecationWarning) | |
72 |
|
72 | |||
73 | # Wx on Fedora11 spits these out |
|
73 | # Wx on Fedora11 spits these out | |
74 | warnings.filterwarnings('ignore', 'wxPython/wxWidgets release number mismatch', |
|
74 | warnings.filterwarnings('ignore', 'wxPython/wxWidgets release number mismatch', | |
75 | UserWarning) |
|
75 | UserWarning) | |
76 |
|
76 | |||
77 | # ------------------------------------------------------------------------------ |
|
77 | # ------------------------------------------------------------------------------ | |
78 | # Monkeypatch Xunit to count known failures as skipped. |
|
78 | # Monkeypatch Xunit to count known failures as skipped. | |
79 | # ------------------------------------------------------------------------------ |
|
79 | # ------------------------------------------------------------------------------ | |
80 | def monkeypatch_xunit(): |
|
80 | def monkeypatch_xunit(): | |
81 | try: |
|
81 | try: | |
82 | knownfailureif(True)(lambda: None)() |
|
82 | knownfailureif(True)(lambda: None)() | |
83 | except Exception as e: |
|
83 | except Exception as e: | |
84 | KnownFailureTest = type(e) |
|
84 | KnownFailureTest = type(e) | |
85 |
|
85 | |||
86 | def addError(self, test, err, capt=None): |
|
86 | def addError(self, test, err, capt=None): | |
87 | if issubclass(err[0], KnownFailureTest): |
|
87 | if issubclass(err[0], KnownFailureTest): | |
88 | err = (SkipTest,) + err[1:] |
|
88 | err = (SkipTest,) + err[1:] | |
89 | return self.orig_addError(test, err, capt) |
|
89 | return self.orig_addError(test, err, capt) | |
90 |
|
90 | |||
91 | Xunit.orig_addError = Xunit.addError |
|
91 | Xunit.orig_addError = Xunit.addError | |
92 | Xunit.addError = addError |
|
92 | Xunit.addError = addError | |
93 |
|
93 | |||
94 | #----------------------------------------------------------------------------- |
|
94 | #----------------------------------------------------------------------------- | |
95 | # Check which dependencies are installed and greater than minimum version. |
|
95 | # Check which dependencies are installed and greater than minimum version. | |
96 | #----------------------------------------------------------------------------- |
|
96 | #----------------------------------------------------------------------------- | |
97 | def extract_version(mod): |
|
97 | def extract_version(mod): | |
98 | return mod.__version__ |
|
98 | return mod.__version__ | |
99 |
|
99 | |||
100 | def test_for(item, min_version=None, callback=extract_version): |
|
100 | def test_for(item, min_version=None, callback=extract_version): | |
101 | """Test to see if item is importable, and optionally check against a minimum |
|
101 | """Test to see if item is importable, and optionally check against a minimum | |
102 | version. |
|
102 | version. | |
103 |
|
103 | |||
104 | If min_version is given, the default behavior is to check against the |
|
104 | If min_version is given, the default behavior is to check against the | |
105 | `__version__` attribute of the item, but specifying `callback` allows you to |
|
105 | `__version__` attribute of the item, but specifying `callback` allows you to | |
106 | extract the value you are interested in. e.g:: |
|
106 | extract the value you are interested in. e.g:: | |
107 |
|
107 | |||
108 | In [1]: import sys |
|
108 | In [1]: import sys | |
109 |
|
109 | |||
110 | In [2]: from IPython.testing.iptest import test_for |
|
110 | In [2]: from IPython.testing.iptest import test_for | |
111 |
|
111 | |||
112 | In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info) |
|
112 | In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info) | |
113 | Out[3]: True |
|
113 | Out[3]: True | |
114 |
|
114 | |||
115 | """ |
|
115 | """ | |
116 | try: |
|
116 | try: | |
117 | check = import_item(item) |
|
117 | check = import_item(item) | |
118 | except (ImportError, RuntimeError): |
|
118 | except (ImportError, RuntimeError): | |
119 | # GTK reports Runtime error if it can't be initialized even if it's |
|
119 | # GTK reports Runtime error if it can't be initialized even if it's | |
120 | # importable. |
|
120 | # importable. | |
121 | return False |
|
121 | return False | |
122 | else: |
|
122 | else: | |
123 | if min_version: |
|
123 | if min_version: | |
124 | if callback: |
|
124 | if callback: | |
125 | # extra processing step to get version to compare |
|
125 | # extra processing step to get version to compare | |
126 | check = callback(check) |
|
126 | check = callback(check) | |
127 |
|
127 | |||
128 | return check >= min_version |
|
128 | return check >= min_version | |
129 | else: |
|
129 | else: | |
130 | return True |
|
130 | return True | |
131 |
|
131 | |||
132 | # Global dict where we can store information on what we have and what we don't |
|
132 | # Global dict where we can store information on what we have and what we don't | |
133 | # have available at test run time |
|
133 | # have available at test run time | |
134 | have = {} |
|
134 | have = {} | |
135 |
|
135 | |||
136 | have['curses'] = test_for('_curses') |
|
136 | have['curses'] = test_for('_curses') | |
137 | have['matplotlib'] = test_for('matplotlib') |
|
137 | have['matplotlib'] = test_for('matplotlib') | |
138 | have['numpy'] = test_for('numpy') |
|
138 | have['numpy'] = test_for('numpy') | |
139 | have['pexpect'] = test_for('IPython.external.pexpect') |
|
139 | have['pexpect'] = test_for('IPython.external.pexpect') | |
140 | have['pymongo'] = test_for('pymongo') |
|
140 | have['pymongo'] = test_for('pymongo') | |
141 | have['pygments'] = test_for('pygments') |
|
141 | have['pygments'] = test_for('pygments') | |
142 | have['qt'] = test_for('IPython.external.qt') |
|
142 | have['qt'] = test_for('IPython.external.qt') | |
143 | have['rpy2'] = test_for('rpy2') |
|
143 | have['rpy2'] = test_for('rpy2') | |
144 | have['sqlite3'] = test_for('sqlite3') |
|
144 | have['sqlite3'] = test_for('sqlite3') | |
145 | have['cython'] = test_for('Cython') |
|
145 | have['cython'] = test_for('Cython') | |
146 | have['oct2py'] = test_for('oct2py') |
|
146 | have['oct2py'] = test_for('oct2py') | |
147 | have['tornado'] = test_for('tornado.version_info', (3,1,0), callback=None) |
|
147 | have['tornado'] = test_for('tornado.version_info', (3,1,0), callback=None) | |
148 | have['jinja2'] = test_for('jinja2') |
|
148 | have['jinja2'] = test_for('jinja2') | |
149 | have['azure'] = test_for('azure') |
|
149 | have['azure'] = test_for('azure') | |
150 | have['requests'] = test_for('requests') |
|
150 | have['requests'] = test_for('requests') | |
151 | have['sphinx'] = test_for('sphinx') |
|
151 | have['sphinx'] = test_for('sphinx') | |
152 | have['casperjs'] = is_cmd_found('casperjs') |
|
152 | have['casperjs'] = is_cmd_found('casperjs') | |
153 |
|
153 | |||
154 | min_zmq = (2,1,11) |
|
154 | min_zmq = (2,1,11) | |
155 |
|
155 | |||
156 | have['zmq'] = test_for('zmq.pyzmq_version_info', min_zmq, callback=lambda x: x()) |
|
156 | have['zmq'] = test_for('zmq.pyzmq_version_info', min_zmq, callback=lambda x: x()) | |
157 |
|
157 | |||
158 | #----------------------------------------------------------------------------- |
|
158 | #----------------------------------------------------------------------------- | |
159 | # Test suite definitions |
|
159 | # Test suite definitions | |
160 | #----------------------------------------------------------------------------- |
|
160 | #----------------------------------------------------------------------------- | |
161 |
|
161 | |||
162 | test_group_names = ['parallel', 'kernel', 'kernel.inprocess', 'config', 'core', |
|
162 | test_group_names = ['parallel', 'kernel', 'kernel.inprocess', 'config', 'core', | |
163 | 'extensions', 'lib', 'terminal', 'testing', 'utils', |
|
163 | 'extensions', 'lib', 'terminal', 'testing', 'utils', | |
164 | 'nbformat', 'qt', 'html', 'nbconvert' |
|
164 | 'nbformat', 'qt', 'html', 'nbconvert' | |
165 | ] |
|
165 | ] | |
166 |
|
166 | |||
167 | class TestSection(object): |
|
167 | class TestSection(object): | |
168 | def __init__(self, name, includes): |
|
168 | def __init__(self, name, includes): | |
169 | self.name = name |
|
169 | self.name = name | |
170 | self.includes = includes |
|
170 | self.includes = includes | |
171 | self.excludes = [] |
|
171 | self.excludes = [] | |
172 | self.dependencies = [] |
|
172 | self.dependencies = [] | |
173 | self.enabled = True |
|
173 | self.enabled = True | |
174 |
|
174 | |||
175 | def exclude(self, module): |
|
175 | def exclude(self, module): | |
176 | if not module.startswith('IPython'): |
|
176 | if not module.startswith('IPython'): | |
177 | module = self.includes[0] + "." + module |
|
177 | module = self.includes[0] + "." + module | |
178 | self.excludes.append(module.replace('.', os.sep)) |
|
178 | self.excludes.append(module.replace('.', os.sep)) | |
179 |
|
179 | |||
180 | def requires(self, *packages): |
|
180 | def requires(self, *packages): | |
181 | self.dependencies.extend(packages) |
|
181 | self.dependencies.extend(packages) | |
182 |
|
182 | |||
183 | @property |
|
183 | @property | |
184 | def will_run(self): |
|
184 | def will_run(self): | |
185 | return self.enabled and all(have[p] for p in self.dependencies) |
|
185 | return self.enabled and all(have[p] for p in self.dependencies) | |
186 |
|
186 | |||
187 | # Name -> (include, exclude, dependencies_met) |
|
187 | # Name -> (include, exclude, dependencies_met) | |
188 | test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names} |
|
188 | test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names} | |
189 |
|
189 | |||
190 | # Exclusions and dependencies |
|
190 | # Exclusions and dependencies | |
191 | # --------------------------- |
|
191 | # --------------------------- | |
192 |
|
192 | |||
193 | # core: |
|
193 | # core: | |
194 | sec = test_sections['core'] |
|
194 | sec = test_sections['core'] | |
195 | if not have['sqlite3']: |
|
195 | if not have['sqlite3']: | |
196 | sec.exclude('tests.test_history') |
|
196 | sec.exclude('tests.test_history') | |
197 | sec.exclude('history') |
|
197 | sec.exclude('history') | |
198 | if not have['matplotlib']: |
|
198 | if not have['matplotlib']: | |
199 | sec.exclude('pylabtools'), |
|
199 | sec.exclude('pylabtools'), | |
200 | sec.exclude('tests.test_pylabtools') |
|
200 | sec.exclude('tests.test_pylabtools') | |
201 |
|
201 | |||
202 | # lib: |
|
202 | # lib: | |
203 | sec = test_sections['lib'] |
|
203 | sec = test_sections['lib'] | |
204 | if not have['pexpect']: |
|
204 | if not have['pexpect']: | |
205 | sec.exclude('irunner') |
|
205 | sec.exclude('irunner') | |
206 | sec.exclude('tests.test_irunner') |
|
206 | sec.exclude('tests.test_irunner') | |
207 | if not have['zmq']: |
|
207 | if not have['zmq']: | |
208 | sec.exclude('kernel') |
|
208 | sec.exclude('kernel') | |
209 | # We do this unconditionally, so that the test suite doesn't import |
|
209 | # We do this unconditionally, so that the test suite doesn't import | |
210 | # gtk, changing the default encoding and masking some unicode bugs. |
|
210 | # gtk, changing the default encoding and masking some unicode bugs. | |
211 | sec.exclude('inputhookgtk') |
|
211 | sec.exclude('inputhookgtk') | |
212 | # We also do this unconditionally, because wx can interfere with Unix signals. |
|
212 | # We also do this unconditionally, because wx can interfere with Unix signals. | |
213 | # There are currently no tests for it anyway. |
|
213 | # There are currently no tests for it anyway. | |
214 | sec.exclude('inputhookwx') |
|
214 | sec.exclude('inputhookwx') | |
215 | # Testing inputhook will need a lot of thought, to figure out |
|
215 | # Testing inputhook will need a lot of thought, to figure out | |
216 | # how to have tests that don't lock up with the gui event |
|
216 | # how to have tests that don't lock up with the gui event | |
217 | # loops in the picture |
|
217 | # loops in the picture | |
218 | sec.exclude('inputhook') |
|
218 | sec.exclude('inputhook') | |
219 |
|
219 | |||
220 | # testing: |
|
220 | # testing: | |
221 | sec = test_sections['testing'] |
|
221 | sec = test_sections['testing'] | |
222 | # This guy is probably attic material |
|
222 | # This guy is probably attic material | |
223 | sec.exclude('mkdoctests') |
|
223 | sec.exclude('mkdoctests') | |
224 | # These have to be skipped on win32 because they use echo, rm, cd, etc. |
|
224 | # These have to be skipped on win32 because they use echo, rm, cd, etc. | |
225 | # See ticket https://github.com/ipython/ipython/issues/87 |
|
225 | # See ticket https://github.com/ipython/ipython/issues/87 | |
226 | if sys.platform == 'win32': |
|
226 | if sys.platform == 'win32': | |
227 | sec.exclude('plugin.test_exampleip') |
|
227 | sec.exclude('plugin.test_exampleip') | |
228 | sec.exclude('plugin.dtexample') |
|
228 | sec.exclude('plugin.dtexample') | |
229 |
|
229 | |||
230 | # terminal: |
|
230 | # terminal: | |
231 | if (not have['pexpect']) or (not have['zmq']): |
|
231 | if (not have['pexpect']) or (not have['zmq']): | |
232 | test_sections['terminal'].exclude('console') |
|
232 | test_sections['terminal'].exclude('console') | |
233 |
|
233 | |||
234 | # parallel |
|
234 | # parallel | |
235 | sec = test_sections['parallel'] |
|
235 | sec = test_sections['parallel'] | |
236 | sec.requires('zmq') |
|
236 | sec.requires('zmq') | |
237 | if not have['pymongo']: |
|
237 | if not have['pymongo']: | |
238 | sec.exclude('controller.mongodb') |
|
238 | sec.exclude('controller.mongodb') | |
239 | sec.exclude('tests.test_mongodb') |
|
239 | sec.exclude('tests.test_mongodb') | |
240 |
|
240 | |||
241 | # kernel: |
|
241 | # kernel: | |
242 | sec = test_sections['kernel'] |
|
242 | sec = test_sections['kernel'] | |
243 | sec.requires('zmq') |
|
243 | sec.requires('zmq') | |
244 | # The in-process kernel tests are done in a separate section |
|
244 | # The in-process kernel tests are done in a separate section | |
245 | sec.exclude('inprocess') |
|
245 | sec.exclude('inprocess') | |
246 | # importing gtk sets the default encoding, which we want to avoid |
|
246 | # importing gtk sets the default encoding, which we want to avoid | |
247 | sec.exclude('zmq.gui.gtkembed') |
|
247 | sec.exclude('zmq.gui.gtkembed') | |
248 | if not have['matplotlib']: |
|
248 | if not have['matplotlib']: | |
249 | sec.exclude('zmq.pylab') |
|
249 | sec.exclude('zmq.pylab') | |
250 |
|
250 | |||
251 | # kernel.inprocess: |
|
251 | # kernel.inprocess: | |
252 | test_sections['kernel.inprocess'].requires('zmq') |
|
252 | test_sections['kernel.inprocess'].requires('zmq') | |
253 |
|
253 | |||
254 | # extensions: |
|
254 | # extensions: | |
255 | sec = test_sections['extensions'] |
|
255 | sec = test_sections['extensions'] | |
256 | if not have['cython']: |
|
256 | if not have['cython']: | |
257 | sec.exclude('cythonmagic') |
|
257 | sec.exclude('cythonmagic') | |
258 | sec.exclude('tests.test_cythonmagic') |
|
258 | sec.exclude('tests.test_cythonmagic') | |
259 | if not have['oct2py']: |
|
259 | if not have['oct2py']: | |
260 | sec.exclude('octavemagic') |
|
260 | sec.exclude('octavemagic') | |
261 | sec.exclude('tests.test_octavemagic') |
|
261 | sec.exclude('tests.test_octavemagic') | |
262 | if not have['rpy2'] or not have['numpy']: |
|
262 | if not have['rpy2'] or not have['numpy']: | |
263 | sec.exclude('rmagic') |
|
263 | sec.exclude('rmagic') | |
264 | sec.exclude('tests.test_rmagic') |
|
264 | sec.exclude('tests.test_rmagic') | |
265 | # autoreload does some strange stuff, so move it to its own test section |
|
265 | # autoreload does some strange stuff, so move it to its own test section | |
266 | sec.exclude('autoreload') |
|
266 | sec.exclude('autoreload') | |
267 | sec.exclude('tests.test_autoreload') |
|
267 | sec.exclude('tests.test_autoreload') | |
268 | test_sections['autoreload'] = TestSection('autoreload', |
|
268 | test_sections['autoreload'] = TestSection('autoreload', | |
269 | ['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload']) |
|
269 | ['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload']) | |
270 | test_group_names.append('autoreload') |
|
270 | test_group_names.append('autoreload') | |
271 |
|
271 | |||
272 | # qt: |
|
272 | # qt: | |
273 | test_sections['qt'].requires('zmq', 'qt', 'pygments') |
|
273 | test_sections['qt'].requires('zmq', 'qt', 'pygments') | |
274 |
|
274 | |||
275 | # html: |
|
275 | # html: | |
276 | sec = test_sections['html'] |
|
276 | sec = test_sections['html'] | |
277 | sec.requires('zmq', 'tornado', 'requests') |
|
277 | sec.requires('zmq', 'tornado', 'requests') | |
278 | # The notebook 'static' directory contains JS, css and other |
|
278 | # The notebook 'static' directory contains JS, css and other | |
279 | # files for web serving. Occasionally projects may put a .py |
|
279 | # files for web serving. Occasionally projects may put a .py | |
280 | # file in there (MathJax ships a conf.py), so we might as |
|
280 | # file in there (MathJax ships a conf.py), so we might as | |
281 | # well play it safe and skip the whole thing. |
|
281 | # well play it safe and skip the whole thing. | |
282 | sec.exclude('static') |
|
282 | sec.exclude('static') | |
283 | sec.exclude('fabfile') |
|
283 | sec.exclude('fabfile') | |
284 | if not have['jinja2']: |
|
284 | if not have['jinja2']: | |
285 | sec.exclude('notebookapp') |
|
285 | sec.exclude('notebookapp') | |
286 | if not have['azure']: |
|
286 | if not have['azure']: | |
287 | sec.exclude('services.notebooks.azurenbmanager') |
|
287 | sec.exclude('services.notebooks.azurenbmanager') | |
288 |
|
288 | |||
289 | # config: |
|
289 | # config: | |
290 | # Config files aren't really importable stand-alone |
|
290 | # Config files aren't really importable stand-alone | |
291 | test_sections['config'].exclude('profile') |
|
291 | test_sections['config'].exclude('profile') | |
292 |
|
292 | |||
293 | # nbconvert: |
|
293 | # nbconvert: | |
294 | sec = test_sections['nbconvert'] |
|
294 | sec = test_sections['nbconvert'] | |
295 | sec.requires('pygments', 'jinja2', 'sphinx') |
|
295 | sec.requires('pygments', 'jinja2', 'sphinx') | |
296 | # Exclude nbconvert directories containing config files used to test. |
|
296 | # Exclude nbconvert directories containing config files used to test. | |
297 | # Executing the config files with iptest would cause an exception. |
|
297 | # Executing the config files with iptest would cause an exception. | |
298 | sec.exclude('tests.files') |
|
298 | sec.exclude('tests.files') | |
299 | sec.exclude('exporters.tests.files') |
|
299 | sec.exclude('exporters.tests.files') | |
300 | if not have['tornado']: |
|
300 | if not have['tornado']: | |
301 | sec.exclude('nbconvert.post_processors.serve') |
|
301 | sec.exclude('nbconvert.post_processors.serve') | |
302 | sec.exclude('nbconvert.post_processors.tests.test_serve') |
|
302 | sec.exclude('nbconvert.post_processors.tests.test_serve') | |
303 |
|
303 | |||
304 | #----------------------------------------------------------------------------- |
|
304 | #----------------------------------------------------------------------------- | |
305 | # Functions and classes |
|
305 | # Functions and classes | |
306 | #----------------------------------------------------------------------------- |
|
306 | #----------------------------------------------------------------------------- | |
307 |
|
307 | |||
308 | def check_exclusions_exist(): |
|
308 | def check_exclusions_exist(): | |
309 | from IPython.utils.path import get_ipython_package_dir |
|
309 | from IPython.utils.path import get_ipython_package_dir | |
310 | from IPython.utils.warn import warn |
|
310 | from IPython.utils.warn import warn | |
311 | parent = os.path.dirname(get_ipython_package_dir()) |
|
311 | parent = os.path.dirname(get_ipython_package_dir()) | |
312 | for sec in test_sections: |
|
312 | for sec in test_sections: | |
313 | for pattern in sec.exclusions: |
|
313 | for pattern in sec.exclusions: | |
314 | fullpath = pjoin(parent, pattern) |
|
314 | fullpath = pjoin(parent, pattern) | |
315 | if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'): |
|
315 | if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'): | |
316 | warn("Excluding nonexistent file: %r" % pattern) |
|
316 | warn("Excluding nonexistent file: %r" % pattern) | |
317 |
|
317 | |||
318 |
|
318 | |||
319 | class ExclusionPlugin(Plugin): |
|
319 | class ExclusionPlugin(Plugin): | |
320 | """A nose plugin to effect our exclusions of files and directories. |
|
320 | """A nose plugin to effect our exclusions of files and directories. | |
321 | """ |
|
321 | """ | |
322 | name = 'exclusions' |
|
322 | name = 'exclusions' | |
323 | score = 3000 # Should come before any other plugins |
|
323 | score = 3000 # Should come before any other plugins | |
324 |
|
324 | |||
325 | def __init__(self, exclude_patterns=None): |
|
325 | def __init__(self, exclude_patterns=None): | |
326 | """ |
|
326 | """ | |
327 | Parameters |
|
327 | Parameters | |
328 | ---------- |
|
328 | ---------- | |
329 |
|
329 | |||
330 | exclude_patterns : sequence of strings, optional |
|
330 | exclude_patterns : sequence of strings, optional | |
331 | Filenames containing these patterns (as raw strings, not as regular |
|
331 | Filenames containing these patterns (as raw strings, not as regular | |
332 | expressions) are excluded from the tests. |
|
332 | expressions) are excluded from the tests. | |
333 | """ |
|
333 | """ | |
334 | self.exclude_patterns = exclude_patterns or [] |
|
334 | self.exclude_patterns = exclude_patterns or [] | |
335 | super(ExclusionPlugin, self).__init__() |
|
335 | super(ExclusionPlugin, self).__init__() | |
336 |
|
336 | |||
337 | def options(self, parser, env=os.environ): |
|
337 | def options(self, parser, env=os.environ): | |
338 | Plugin.options(self, parser, env) |
|
338 | Plugin.options(self, parser, env) | |
339 |
|
339 | |||
340 | def configure(self, options, config): |
|
340 | def configure(self, options, config): | |
341 | Plugin.configure(self, options, config) |
|
341 | Plugin.configure(self, options, config) | |
342 | # Override nose trying to disable plugin. |
|
342 | # Override nose trying to disable plugin. | |
343 | self.enabled = True |
|
343 | self.enabled = True | |
344 |
|
344 | |||
345 | def wantFile(self, filename): |
|
345 | def wantFile(self, filename): | |
346 | """Return whether the given filename should be scanned for tests. |
|
346 | """Return whether the given filename should be scanned for tests. | |
347 | """ |
|
347 | """ | |
348 | if any(pat in filename for pat in self.exclude_patterns): |
|
348 | if any(pat in filename for pat in self.exclude_patterns): | |
349 | return False |
|
349 | return False | |
350 | return None |
|
350 | return None | |
351 |
|
351 | |||
352 | def wantDirectory(self, directory): |
|
352 | def wantDirectory(self, directory): | |
353 | """Return whether the given directory should be scanned for tests. |
|
353 | """Return whether the given directory should be scanned for tests. | |
354 | """ |
|
354 | """ | |
355 | if any(pat in directory for pat in self.exclude_patterns): |
|
355 | if any(pat in directory for pat in self.exclude_patterns): | |
356 | return False |
|
356 | return False | |
357 | return None |
|
357 | return None | |
358 |
|
358 | |||
359 |
|
359 | |||
360 | class StreamCapturer(Thread): |
|
360 | class StreamCapturer(Thread): | |
361 | daemon = True # Don't hang if main thread crashes |
|
361 | daemon = True # Don't hang if main thread crashes | |
362 | started = False |
|
362 | started = False | |
363 | def __init__(self): |
|
363 | def __init__(self): | |
364 | super(StreamCapturer, self).__init__() |
|
364 | super(StreamCapturer, self).__init__() | |
365 | self.streams = [] |
|
365 | self.streams = [] | |
366 | self.buffer = BytesIO() |
|
366 | self.buffer = BytesIO() | |
367 | self.readfd, self.writefd = os.pipe() |
|
367 | self.readfd, self.writefd = os.pipe() | |
368 | self.buffer_lock = Lock() |
|
368 | self.buffer_lock = Lock() | |
369 | self.stop = Event() |
|
369 | self.stop = Event() | |
370 |
|
370 | |||
371 | def run(self): |
|
371 | def run(self): | |
372 | self.started = True |
|
372 | self.started = True | |
373 |
|
373 | |||
374 | while not self.stop.is_set(): |
|
374 | while not self.stop.is_set(): | |
375 | chunk = os.read(self.readfd, 1024) |
|
375 | chunk = os.read(self.readfd, 1024) | |
376 |
|
376 | |||
377 | with self.buffer_lock: |
|
377 | with self.buffer_lock: | |
378 | self.buffer.write(chunk) |
|
378 | self.buffer.write(chunk) | |
379 |
|
379 | |||
380 | os.close(self.readfd) |
|
380 | os.close(self.readfd) | |
381 | os.close(self.writefd) |
|
381 | os.close(self.writefd) | |
382 |
|
382 | |||
383 | def reset_buffer(self): |
|
383 | def reset_buffer(self): | |
384 | with self.buffer_lock: |
|
384 | with self.buffer_lock: | |
385 | self.buffer.truncate(0) |
|
385 | self.buffer.truncate(0) | |
386 | self.buffer.seek(0) |
|
386 | self.buffer.seek(0) | |
387 |
|
387 | |||
388 | def get_buffer(self): |
|
388 | def get_buffer(self): | |
389 | with self.buffer_lock: |
|
389 | with self.buffer_lock: | |
390 | return self.buffer.getvalue() |
|
390 | return self.buffer.getvalue() | |
391 |
|
391 | |||
392 | def ensure_started(self): |
|
392 | def ensure_started(self): | |
393 | if not self.started: |
|
393 | if not self.started: | |
394 | self.start() |
|
394 | self.start() | |
395 |
|
395 | |||
396 | def halt(self): |
|
396 | def halt(self): | |
397 | """Safely stop the thread.""" |
|
397 | """Safely stop the thread.""" | |
398 | if not self.started: |
|
398 | if not self.started: | |
399 | return |
|
399 | return | |
400 |
|
400 | |||
401 | self.stop.set() |
|
401 | self.stop.set() | |
402 | os.write(self.writefd, b'wake up') # Ensure we're not locked in a read() |
|
402 | os.write(self.writefd, b'wake up') # Ensure we're not locked in a read() | |
403 | self.join() |
|
403 | self.join() | |
404 |
|
404 | |||
405 | class SubprocessStreamCapturePlugin(Plugin): |
|
405 | class SubprocessStreamCapturePlugin(Plugin): | |
406 | name='subprocstreams' |
|
406 | name='subprocstreams' | |
407 | def __init__(self): |
|
407 | def __init__(self): | |
408 | Plugin.__init__(self) |
|
408 | Plugin.__init__(self) | |
409 | self.stream_capturer = StreamCapturer() |
|
409 | self.stream_capturer = StreamCapturer() | |
|
410 | self.destination = os.environ.get('IPTEST_SUBPROC_STREAMS', 'capture') | |||
410 | # This is ugly, but distant parts of the test machinery need to be able |
|
411 | # This is ugly, but distant parts of the test machinery need to be able | |
411 | # to redirect streams, so we make the object globally accessible. |
|
412 | # to redirect streams, so we make the object globally accessible. | |
412 |
nose.ip |
|
413 | nose.iptest_stdstreams_fileno = self.get_write_fileno | |
|
414 | ||||
|
415 | def get_write_fileno(self): | |||
|
416 | if self.destination == 'capture': | |||
|
417 | self.stream_capturer.ensure_started() | |||
|
418 | return self.stream_capturer.writefd | |||
|
419 | elif self.destination == 'discard': | |||
|
420 | return os.open(os.devnull, os.O_WRONLY) | |||
|
421 | else: | |||
|
422 | return sys.__stdout__.fileno() | |||
413 |
|
423 | |||
414 | def configure(self, options, config): |
|
424 | def configure(self, options, config): | |
415 | Plugin.configure(self, options, config) |
|
425 | Plugin.configure(self, options, config) | |
416 | # Override nose trying to disable plugin. |
|
426 | # Override nose trying to disable plugin. | |
|
427 | if self.destination == 'capture': | |||
417 | self.enabled = True |
|
428 | self.enabled = True | |
418 |
|
429 | |||
419 | def startTest(self, test): |
|
430 | def startTest(self, test): | |
420 | # Reset log capture |
|
431 | # Reset log capture | |
421 | self.stream_capturer.reset_buffer() |
|
432 | self.stream_capturer.reset_buffer() | |
422 |
|
433 | |||
423 | def formatFailure(self, test, err): |
|
434 | def formatFailure(self, test, err): | |
424 | # Show output |
|
435 | # Show output | |
425 | ec, ev, tb = err |
|
436 | ec, ev, tb = err | |
426 | captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace') |
|
437 | captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace') | |
427 | if captured.strip(): |
|
438 | if captured.strip(): | |
428 | ev = safe_str(ev) |
|
439 | ev = safe_str(ev) | |
429 | out = [ev, '>> begin captured subprocess output <<', |
|
440 | out = [ev, '>> begin captured subprocess output <<', | |
430 | captured, |
|
441 | captured, | |
431 | '>> end captured subprocess output <<'] |
|
442 | '>> end captured subprocess output <<'] | |
432 | return ec, '\n'.join(out), tb |
|
443 | return ec, '\n'.join(out), tb | |
433 |
|
444 | |||
434 | return err |
|
445 | return err | |
435 |
|
446 | |||
436 | formatError = formatFailure |
|
447 | formatError = formatFailure | |
437 |
|
448 | |||
438 | def finalize(self, result): |
|
449 | def finalize(self, result): | |
439 | self.stream_capturer.halt() |
|
450 | self.stream_capturer.halt() | |
440 |
|
451 | |||
441 |
|
452 | |||
442 | def run_iptest(): |
|
453 | def run_iptest(): | |
443 | """Run the IPython test suite using nose. |
|
454 | """Run the IPython test suite using nose. | |
444 |
|
455 | |||
445 | This function is called when this script is **not** called with the form |
|
456 | This function is called when this script is **not** called with the form | |
446 | `iptest all`. It simply calls nose with appropriate command line flags |
|
457 | `iptest all`. It simply calls nose with appropriate command line flags | |
447 | and accepts all of the standard nose arguments. |
|
458 | and accepts all of the standard nose arguments. | |
448 | """ |
|
459 | """ | |
449 | # Apply our monkeypatch to Xunit |
|
460 | # Apply our monkeypatch to Xunit | |
450 | if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'): |
|
461 | if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'): | |
451 | monkeypatch_xunit() |
|
462 | monkeypatch_xunit() | |
452 |
|
463 | |||
453 | warnings.filterwarnings('ignore', |
|
464 | warnings.filterwarnings('ignore', | |
454 | 'This will be removed soon. Use IPython.testing.util instead') |
|
465 | 'This will be removed soon. Use IPython.testing.util instead') | |
455 |
|
466 | |||
456 | arg1 = sys.argv[1] |
|
467 | arg1 = sys.argv[1] | |
457 | if arg1 in test_sections: |
|
468 | if arg1 in test_sections: | |
458 | section = test_sections[arg1] |
|
469 | section = test_sections[arg1] | |
459 | sys.argv[1:2] = section.includes |
|
470 | sys.argv[1:2] = section.includes | |
460 | elif arg1.startswith('IPython.') and arg1[8:] in test_sections: |
|
471 | elif arg1.startswith('IPython.') and arg1[8:] in test_sections: | |
461 | section = test_sections[arg1[8:]] |
|
472 | section = test_sections[arg1[8:]] | |
462 | sys.argv[1:2] = section.includes |
|
473 | sys.argv[1:2] = section.includes | |
463 | else: |
|
474 | else: | |
464 | section = TestSection(arg1, includes=[arg1]) |
|
475 | section = TestSection(arg1, includes=[arg1]) | |
465 |
|
476 | |||
466 |
|
477 | |||
467 | argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks |
|
478 | argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks | |
468 |
|
479 | |||
469 | '--with-ipdoctest', |
|
480 | '--with-ipdoctest', | |
470 | '--ipdoctest-tests','--ipdoctest-extension=txt', |
|
481 | '--ipdoctest-tests','--ipdoctest-extension=txt', | |
471 |
|
482 | |||
472 | # We add --exe because of setuptools' imbecility (it |
|
483 | # We add --exe because of setuptools' imbecility (it | |
473 | # blindly does chmod +x on ALL files). Nose does the |
|
484 | # blindly does chmod +x on ALL files). Nose does the | |
474 | # right thing and it tries to avoid executables, |
|
485 | # right thing and it tries to avoid executables, | |
475 | # setuptools unfortunately forces our hand here. This |
|
486 | # setuptools unfortunately forces our hand here. This | |
476 | # has been discussed on the distutils list and the |
|
487 | # has been discussed on the distutils list and the | |
477 | # setuptools devs refuse to fix this problem! |
|
488 | # setuptools devs refuse to fix this problem! | |
478 | '--exe', |
|
489 | '--exe', | |
479 | ] |
|
490 | ] | |
480 | if '-a' not in argv and '-A' not in argv: |
|
491 | if '-a' not in argv and '-A' not in argv: | |
481 | argv = argv + ['-a', '!crash'] |
|
492 | argv = argv + ['-a', '!crash'] | |
482 |
|
493 | |||
483 | if nose.__version__ >= '0.11': |
|
494 | if nose.__version__ >= '0.11': | |
484 | # I don't fully understand why we need this one, but depending on what |
|
495 | # I don't fully understand why we need this one, but depending on what | |
485 | # directory the test suite is run from, if we don't give it, 0 tests |
|
496 | # directory the test suite is run from, if we don't give it, 0 tests | |
486 | # get run. Specifically, if the test suite is run from the source dir |
|
497 | # get run. Specifically, if the test suite is run from the source dir | |
487 | # with an argument (like 'iptest.py IPython.core', 0 tests are run, |
|
498 | # with an argument (like 'iptest.py IPython.core', 0 tests are run, | |
488 | # even if the same call done in this directory works fine). It appears |
|
499 | # even if the same call done in this directory works fine). It appears | |
489 | # that if the requested package is in the current dir, nose bails early |
|
500 | # that if the requested package is in the current dir, nose bails early | |
490 | # by default. Since it's otherwise harmless, leave it in by default |
|
501 | # by default. Since it's otherwise harmless, leave it in by default | |
491 | # for nose >= 0.11, though unfortunately nose 0.10 doesn't support it. |
|
502 | # for nose >= 0.11, though unfortunately nose 0.10 doesn't support it. | |
492 | argv.append('--traverse-namespace') |
|
503 | argv.append('--traverse-namespace') | |
493 |
|
504 | |||
494 | # use our plugin for doctesting. It will remove the standard doctest plugin |
|
505 | # use our plugin for doctesting. It will remove the standard doctest plugin | |
495 | # if it finds it enabled |
|
506 | # if it finds it enabled | |
496 | plugins = [ExclusionPlugin(section.excludes), IPythonDoctest(), KnownFailure(), |
|
507 | plugins = [ExclusionPlugin(section.excludes), IPythonDoctest(), KnownFailure(), | |
497 | SubprocessStreamCapturePlugin() ] |
|
508 | SubprocessStreamCapturePlugin() ] | |
498 |
|
509 | |||
499 | # Use working directory set by parent process (see iptestcontroller) |
|
510 | # Use working directory set by parent process (see iptestcontroller) | |
500 | if 'IPTEST_WORKING_DIR' in os.environ: |
|
511 | if 'IPTEST_WORKING_DIR' in os.environ: | |
501 | os.chdir(os.environ['IPTEST_WORKING_DIR']) |
|
512 | os.chdir(os.environ['IPTEST_WORKING_DIR']) | |
502 |
|
513 | |||
503 | # We need a global ipython running in this process, but the special |
|
514 | # We need a global ipython running in this process, but the special | |
504 | # in-process group spawns its own IPython kernels, so for *that* group we |
|
515 | # in-process group spawns its own IPython kernels, so for *that* group we | |
505 | # must avoid also opening the global one (otherwise there's a conflict of |
|
516 | # must avoid also opening the global one (otherwise there's a conflict of | |
506 | # singletons). Ultimately the solution to this problem is to refactor our |
|
517 | # singletons). Ultimately the solution to this problem is to refactor our | |
507 | # assumptions about what needs to be a singleton and what doesn't (app |
|
518 | # assumptions about what needs to be a singleton and what doesn't (app | |
508 | # objects should, individual shells shouldn't). But for now, this |
|
519 | # objects should, individual shells shouldn't). But for now, this | |
509 | # workaround allows the test suite for the inprocess module to complete. |
|
520 | # workaround allows the test suite for the inprocess module to complete. | |
510 | if 'kernel.inprocess' not in section.name: |
|
521 | if 'kernel.inprocess' not in section.name: | |
511 | from IPython.testing import globalipapp |
|
522 | from IPython.testing import globalipapp | |
512 | globalipapp.start_ipython() |
|
523 | globalipapp.start_ipython() | |
513 |
|
524 | |||
514 | # Now nose can run |
|
525 | # Now nose can run | |
515 | TestProgram(argv=argv, addplugins=plugins) |
|
526 | TestProgram(argv=argv, addplugins=plugins) | |
516 |
|
527 | |||
517 | if __name__ == '__main__': |
|
528 | if __name__ == '__main__': | |
518 | run_iptest() |
|
529 | run_iptest() | |
519 |
|
530 |
@@ -1,501 +1,507 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """IPython Test Process Controller |
|
2 | """IPython Test Process Controller | |
3 |
|
3 | |||
4 | This module runs one or more subprocesses which will actually run the IPython |
|
4 | This module runs one or more subprocesses which will actually run the IPython | |
5 | test suite. |
|
5 | test suite. | |
6 |
|
6 | |||
7 | """ |
|
7 | """ | |
8 |
|
8 | |||
9 | #----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
10 | # Copyright (C) 2009-2011 The IPython Development Team |
|
10 | # Copyright (C) 2009-2011 The IPython Development Team | |
11 | # |
|
11 | # | |
12 | # Distributed under the terms of the BSD License. The full license is in |
|
12 | # Distributed under the terms of the BSD License. The full license is in | |
13 | # the file COPYING, distributed as part of this software. |
|
13 | # the file COPYING, distributed as part of this software. | |
14 | #----------------------------------------------------------------------------- |
|
14 | #----------------------------------------------------------------------------- | |
15 |
|
15 | |||
16 | #----------------------------------------------------------------------------- |
|
16 | #----------------------------------------------------------------------------- | |
17 | # Imports |
|
17 | # Imports | |
18 | #----------------------------------------------------------------------------- |
|
18 | #----------------------------------------------------------------------------- | |
19 | from __future__ import print_function |
|
19 | from __future__ import print_function | |
20 |
|
20 | |||
21 | import argparse |
|
21 | import argparse | |
22 | import multiprocessing.pool |
|
22 | import multiprocessing.pool | |
23 | from multiprocessing import Process, Queue |
|
23 | from multiprocessing import Process, Queue | |
24 | import os |
|
24 | import os | |
25 | import shutil |
|
25 | import shutil | |
26 | import signal |
|
26 | import signal | |
27 | import sys |
|
27 | import sys | |
28 | import subprocess |
|
28 | import subprocess | |
29 | import time |
|
29 | import time | |
30 |
|
30 | |||
31 | from .iptest import have, test_group_names as py_test_group_names, test_sections |
|
31 | from .iptest import have, test_group_names as py_test_group_names, test_sections | |
32 | from IPython.utils.path import compress_user |
|
32 | from IPython.utils.path import compress_user | |
33 | from IPython.utils.py3compat import bytes_to_str |
|
33 | from IPython.utils.py3compat import bytes_to_str | |
34 | from IPython.utils.sysinfo import get_sys_info |
|
34 | from IPython.utils.sysinfo import get_sys_info | |
35 | from IPython.utils.tempdir import TemporaryDirectory |
|
35 | from IPython.utils.tempdir import TemporaryDirectory | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | class TestController(object): |
|
38 | class TestController(object): | |
39 | """Run tests in a subprocess |
|
39 | """Run tests in a subprocess | |
40 | """ |
|
40 | """ | |
41 | #: str, IPython test suite to be executed. |
|
41 | #: str, IPython test suite to be executed. | |
42 | section = None |
|
42 | section = None | |
43 | #: list, command line arguments to be executed |
|
43 | #: list, command line arguments to be executed | |
44 | cmd = None |
|
44 | cmd = None | |
45 | #: dict, extra environment variables to set for the subprocess |
|
45 | #: dict, extra environment variables to set for the subprocess | |
46 | env = None |
|
46 | env = None | |
47 | #: list, TemporaryDirectory instances to clear up when the process finishes |
|
47 | #: list, TemporaryDirectory instances to clear up when the process finishes | |
48 | dirs = None |
|
48 | dirs = None | |
49 | #: subprocess.Popen instance |
|
49 | #: subprocess.Popen instance | |
50 | process = None |
|
50 | process = None | |
51 | #: str, process stdout+stderr |
|
51 | #: str, process stdout+stderr | |
52 | stdout = None |
|
52 | stdout = None | |
53 | #: bool, whether to capture process stdout & stderr |
|
53 | #: bool, whether to capture process stdout & stderr | |
54 | buffer_output = False |
|
54 | buffer_output = False | |
55 |
|
55 | |||
56 | def __init__(self): |
|
56 | def __init__(self): | |
57 | self.cmd = [] |
|
57 | self.cmd = [] | |
58 | self.env = {} |
|
58 | self.env = {} | |
59 | self.dirs = [] |
|
59 | self.dirs = [] | |
60 |
|
60 | |||
61 | def launch(self): |
|
61 | def launch(self): | |
62 | # print('*** ENV:', self.env) # dbg |
|
62 | # print('*** ENV:', self.env) # dbg | |
63 | # print('*** CMD:', self.cmd) # dbg |
|
63 | # print('*** CMD:', self.cmd) # dbg | |
64 | env = os.environ.copy() |
|
64 | env = os.environ.copy() | |
65 | env.update(self.env) |
|
65 | env.update(self.env) | |
66 | output = subprocess.PIPE if self.buffer_output else None |
|
66 | output = subprocess.PIPE if self.buffer_output else None | |
67 | stdout = subprocess.STDOUT if self.buffer_output else None |
|
67 | stdout = subprocess.STDOUT if self.buffer_output else None | |
68 | self.process = subprocess.Popen(self.cmd, stdout=output, |
|
68 | self.process = subprocess.Popen(self.cmd, stdout=output, | |
69 | stderr=stdout, env=env) |
|
69 | stderr=stdout, env=env) | |
70 |
|
70 | |||
71 | def wait(self): |
|
71 | def wait(self): | |
72 | self.stdout, _ = self.process.communicate() |
|
72 | self.stdout, _ = self.process.communicate() | |
73 | return self.process.returncode |
|
73 | return self.process.returncode | |
74 |
|
74 | |||
75 | def cleanup_process(self): |
|
75 | def cleanup_process(self): | |
76 | """Cleanup on exit by killing any leftover processes.""" |
|
76 | """Cleanup on exit by killing any leftover processes.""" | |
77 | subp = self.process |
|
77 | subp = self.process | |
78 | if subp is None or (subp.poll() is not None): |
|
78 | if subp is None or (subp.poll() is not None): | |
79 | return # Process doesn't exist, or is already dead. |
|
79 | return # Process doesn't exist, or is already dead. | |
80 |
|
80 | |||
81 | try: |
|
81 | try: | |
82 | print('Cleaning up stale PID: %d' % subp.pid) |
|
82 | print('Cleaning up stale PID: %d' % subp.pid) | |
83 | subp.kill() |
|
83 | subp.kill() | |
84 | except: # (OSError, WindowsError) ? |
|
84 | except: # (OSError, WindowsError) ? | |
85 | # This is just a best effort, if we fail or the process was |
|
85 | # This is just a best effort, if we fail or the process was | |
86 | # really gone, ignore it. |
|
86 | # really gone, ignore it. | |
87 | pass |
|
87 | pass | |
88 | else: |
|
88 | else: | |
89 | for i in range(10): |
|
89 | for i in range(10): | |
90 | if subp.poll() is None: |
|
90 | if subp.poll() is None: | |
91 | time.sleep(0.1) |
|
91 | time.sleep(0.1) | |
92 | else: |
|
92 | else: | |
93 | break |
|
93 | break | |
94 |
|
94 | |||
95 | if subp.poll() is None: |
|
95 | if subp.poll() is None: | |
96 | # The process did not die... |
|
96 | # The process did not die... | |
97 | print('... failed. Manual cleanup may be required.') |
|
97 | print('... failed. Manual cleanup may be required.') | |
98 |
|
98 | |||
99 | def cleanup(self): |
|
99 | def cleanup(self): | |
100 | "Kill process if it's still alive, and clean up temporary directories" |
|
100 | "Kill process if it's still alive, and clean up temporary directories" | |
101 | self.cleanup_process() |
|
101 | self.cleanup_process() | |
102 | for td in self.dirs: |
|
102 | for td in self.dirs: | |
103 | td.cleanup() |
|
103 | td.cleanup() | |
104 |
|
104 | |||
105 | __del__ = cleanup |
|
105 | __del__ = cleanup | |
106 |
|
106 | |||
107 | class PyTestController(TestController): |
|
107 | class PyTestController(TestController): | |
108 | """Run Python tests using IPython.testing.iptest""" |
|
108 | """Run Python tests using IPython.testing.iptest""" | |
109 | #: str, Python command to execute in subprocess |
|
109 | #: str, Python command to execute in subprocess | |
110 | pycmd = None |
|
110 | pycmd = None | |
111 |
|
111 | |||
112 | def __init__(self, section): |
|
112 | def __init__(self, section): | |
113 | """Create new test runner.""" |
|
113 | """Create new test runner.""" | |
114 | TestController.__init__(self) |
|
114 | TestController.__init__(self) | |
115 | self.section = section |
|
115 | self.section = section | |
116 | # pycmd is put into cmd[2] in PyTestController.launch() |
|
116 | # pycmd is put into cmd[2] in PyTestController.launch() | |
117 | self.cmd = [sys.executable, '-c', None, section] |
|
117 | self.cmd = [sys.executable, '-c', None, section] | |
118 | self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()" |
|
118 | self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()" | |
119 | ipydir = TemporaryDirectory() |
|
119 | ipydir = TemporaryDirectory() | |
120 | self.dirs.append(ipydir) |
|
120 | self.dirs.append(ipydir) | |
121 | self.env['IPYTHONDIR'] = ipydir.name |
|
121 | self.env['IPYTHONDIR'] = ipydir.name | |
122 | self.workingdir = workingdir = TemporaryDirectory() |
|
122 | self.workingdir = workingdir = TemporaryDirectory() | |
123 | self.dirs.append(workingdir) |
|
123 | self.dirs.append(workingdir) | |
124 | self.env['IPTEST_WORKING_DIR'] = workingdir.name |
|
124 | self.env['IPTEST_WORKING_DIR'] = workingdir.name | |
125 | # This means we won't get odd effects from our own matplotlib config |
|
125 | # This means we won't get odd effects from our own matplotlib config | |
126 | self.env['MPLCONFIGDIR'] = workingdir.name |
|
126 | self.env['MPLCONFIGDIR'] = workingdir.name | |
127 |
|
127 | |||
128 | @property |
|
128 | @property | |
129 | def will_run(self): |
|
129 | def will_run(self): | |
130 | try: |
|
130 | try: | |
131 | return test_sections[self.section].will_run |
|
131 | return test_sections[self.section].will_run | |
132 | except KeyError: |
|
132 | except KeyError: | |
133 | return True |
|
133 | return True | |
134 |
|
134 | |||
135 | def add_xunit(self): |
|
135 | def add_xunit(self): | |
136 | xunit_file = os.path.abspath(self.section + '.xunit.xml') |
|
136 | xunit_file = os.path.abspath(self.section + '.xunit.xml') | |
137 | self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file]) |
|
137 | self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file]) | |
138 |
|
138 | |||
139 | def add_coverage(self): |
|
139 | def add_coverage(self): | |
140 | try: |
|
140 | try: | |
141 | sources = test_sections[self.section].includes |
|
141 | sources = test_sections[self.section].includes | |
142 | except KeyError: |
|
142 | except KeyError: | |
143 | sources = ['IPython'] |
|
143 | sources = ['IPython'] | |
144 |
|
144 | |||
145 | coverage_rc = ("[run]\n" |
|
145 | coverage_rc = ("[run]\n" | |
146 | "data_file = {data_file}\n" |
|
146 | "data_file = {data_file}\n" | |
147 | "source =\n" |
|
147 | "source =\n" | |
148 | " {source}\n" |
|
148 | " {source}\n" | |
149 | ).format(data_file=os.path.abspath('.coverage.'+self.section), |
|
149 | ).format(data_file=os.path.abspath('.coverage.'+self.section), | |
150 | source="\n ".join(sources)) |
|
150 | source="\n ".join(sources)) | |
151 | config_file = os.path.join(self.workingdir.name, '.coveragerc') |
|
151 | config_file = os.path.join(self.workingdir.name, '.coveragerc') | |
152 | with open(config_file, 'w') as f: |
|
152 | with open(config_file, 'w') as f: | |
153 | f.write(coverage_rc) |
|
153 | f.write(coverage_rc) | |
154 |
|
154 | |||
155 | self.env['COVERAGE_PROCESS_START'] = config_file |
|
155 | self.env['COVERAGE_PROCESS_START'] = config_file | |
156 | self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd |
|
156 | self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd | |
157 |
|
157 | |||
158 | def launch(self): |
|
158 | def launch(self): | |
159 | self.cmd[2] = self.pycmd |
|
159 | self.cmd[2] = self.pycmd | |
160 | super(PyTestController, self).launch() |
|
160 | super(PyTestController, self).launch() | |
161 |
|
161 | |||
162 | class JSController(TestController): |
|
162 | class JSController(TestController): | |
163 | """Run CasperJS tests """ |
|
163 | """Run CasperJS tests """ | |
164 | def __init__(self, section): |
|
164 | def __init__(self, section): | |
165 | """Create new test runner.""" |
|
165 | """Create new test runner.""" | |
166 | TestController.__init__(self) |
|
166 | TestController.__init__(self) | |
167 | self.section = section |
|
167 | self.section = section | |
168 |
|
168 | |||
169 | self.ipydir = TemporaryDirectory() |
|
169 | self.ipydir = TemporaryDirectory() | |
170 | self.dirs.append(self.ipydir) |
|
170 | self.dirs.append(self.ipydir) | |
171 | self.env['IPYTHONDIR'] = self.ipydir.name |
|
171 | self.env['IPYTHONDIR'] = self.ipydir.name | |
172 |
|
172 | |||
173 | def launch(self): |
|
173 | def launch(self): | |
174 | # start the ipython notebook, so we get the port number |
|
174 | # start the ipython notebook, so we get the port number | |
175 | self._init_server() |
|
175 | self._init_server() | |
176 |
|
176 | |||
177 | import IPython.html.tests as t |
|
177 | import IPython.html.tests as t | |
178 | test_dir = os.path.join(os.path.dirname(t.__file__), 'casperjs') |
|
178 | test_dir = os.path.join(os.path.dirname(t.__file__), 'casperjs') | |
179 | includes = '--includes=' + os.path.join(test_dir,'util.js') |
|
179 | includes = '--includes=' + os.path.join(test_dir,'util.js') | |
180 | test_cases = os.path.join(test_dir, 'test_cases') |
|
180 | test_cases = os.path.join(test_dir, 'test_cases') | |
181 | port = '--port=' + str(self.server_port) |
|
181 | port = '--port=' + str(self.server_port) | |
182 | self.cmd = ['casperjs', 'test', port, includes, test_cases] |
|
182 | self.cmd = ['casperjs', 'test', port, includes, test_cases] | |
183 |
|
183 | |||
184 | super(JSController, self).launch() |
|
184 | super(JSController, self).launch() | |
185 |
|
185 | |||
186 | @property |
|
186 | @property | |
187 | def will_run(self): |
|
187 | def will_run(self): | |
188 | return all(have[a] for a in ['zmq', 'tornado', 'jinja2', 'casperjs']) |
|
188 | return all(have[a] for a in ['zmq', 'tornado', 'jinja2', 'casperjs']) | |
189 |
|
189 | |||
190 | def _init_server(self): |
|
190 | def _init_server(self): | |
191 | "Start the notebook server in a separate process" |
|
191 | "Start the notebook server in a separate process" | |
192 | self.queue = q = Queue() |
|
192 | self.queue = q = Queue() | |
193 | self.server = Process(target=run_webapp, args=(q, self.ipydir.name)) |
|
193 | self.server = Process(target=run_webapp, args=(q, self.ipydir.name)) | |
194 | self.server.start() |
|
194 | self.server.start() | |
195 | self.server_port = q.get() |
|
195 | self.server_port = q.get() | |
196 |
|
196 | |||
197 | def cleanup(self): |
|
197 | def cleanup(self): | |
198 | self.server.terminate() |
|
198 | self.server.terminate() | |
199 | self.server.join() |
|
199 | self.server.join() | |
200 | TestController.cleanup(self) |
|
200 | TestController.cleanup(self) | |
201 |
|
201 | |||
202 | js_test_group_names = {'js'} |
|
202 | js_test_group_names = {'js'} | |
203 |
|
203 | |||
204 | def run_webapp(q, nbdir, loglevel=0): |
|
204 | def run_webapp(q, nbdir, loglevel=0): | |
205 | """start the IPython Notebook, and pass port back to the queue""" |
|
205 | """start the IPython Notebook, and pass port back to the queue""" | |
206 | import os |
|
206 | import os | |
207 | import IPython.html.notebookapp as nbapp |
|
207 | import IPython.html.notebookapp as nbapp | |
208 | import sys |
|
208 | import sys | |
209 | sys.stderr = open(os.devnull, 'w') |
|
209 | sys.stderr = open(os.devnull, 'w') | |
210 | os.environ["IPYTHONDIR"] = nbdir |
|
210 | os.environ["IPYTHONDIR"] = nbdir | |
211 | server = nbapp.NotebookApp() |
|
211 | server = nbapp.NotebookApp() | |
212 | args = ['--no-browser'] |
|
212 | args = ['--no-browser'] | |
213 | args.append('--notebook-dir='+nbdir) |
|
213 | args.append('--notebook-dir='+nbdir) | |
214 | args.append('--profile-dir='+nbdir) |
|
214 | args.append('--profile-dir='+nbdir) | |
215 | args.append('--log-level='+str(loglevel)) |
|
215 | args.append('--log-level='+str(loglevel)) | |
216 | server.initialize(args) |
|
216 | server.initialize(args) | |
217 | # communicate the port number to the parent process |
|
217 | # communicate the port number to the parent process | |
218 | q.put(server.port) |
|
218 | q.put(server.port) | |
219 | server.start() |
|
219 | server.start() | |
220 |
|
220 | |||
221 | def prepare_controllers(options): |
|
221 | def prepare_controllers(options): | |
222 | """Returns two lists of TestController instances, those to run, and those |
|
222 | """Returns two lists of TestController instances, those to run, and those | |
223 | not to run.""" |
|
223 | not to run.""" | |
224 | testgroups = options.testgroups |
|
224 | testgroups = options.testgroups | |
225 |
|
225 | |||
226 | if testgroups: |
|
226 | if testgroups: | |
227 | py_testgroups = [g for g in testgroups if (g in py_test_group_names) \ |
|
227 | py_testgroups = [g for g in testgroups if (g in py_test_group_names) \ | |
228 | or g.startswith('IPython')] |
|
228 | or g.startswith('IPython')] | |
229 | js_testgroups = [g for g in testgroups if g in js_test_group_names] |
|
229 | js_testgroups = [g for g in testgroups if g in js_test_group_names] | |
230 | else: |
|
230 | else: | |
231 | py_testgroups = py_test_group_names |
|
231 | py_testgroups = py_test_group_names | |
232 | js_testgroups = js_test_group_names |
|
232 | js_testgroups = js_test_group_names | |
233 | if not options.all: |
|
233 | if not options.all: | |
234 | test_sections['parallel'].enabled = False |
|
234 | test_sections['parallel'].enabled = False | |
235 |
|
235 | |||
236 | c_js = [JSController(name) for name in js_testgroups] |
|
236 | c_js = [JSController(name) for name in js_testgroups] | |
237 | c_py = [PyTestController(name) for name in py_testgroups] |
|
237 | c_py = [PyTestController(name) for name in py_testgroups] | |
238 |
|
238 | |||
239 | configure_py_controllers(c_py, xunit=options.xunit, |
|
239 | configure_py_controllers(c_py, xunit=options.xunit, | |
240 |
coverage=options.coverage, |
|
240 | coverage=options.coverage, subproc_streams=options.subproc_streams, | |
|
241 | extra_args=options.extra_args) | |||
241 |
|
242 | |||
242 | controllers = c_py + c_js |
|
243 | controllers = c_py + c_js | |
243 | to_run = [c for c in controllers if c.will_run] |
|
244 | to_run = [c for c in controllers if c.will_run] | |
244 | not_run = [c for c in controllers if not c.will_run] |
|
245 | not_run = [c for c in controllers if not c.will_run] | |
245 | return to_run, not_run |
|
246 | return to_run, not_run | |
246 |
|
247 | |||
247 |
def configure_py_controllers(controllers, xunit=False, coverage=False, |
|
248 | def configure_py_controllers(controllers, xunit=False, coverage=False, | |
|
249 | subproc_streams='capture', extra_args=()): | |||
248 | """Apply options for a collection of TestController objects.""" |
|
250 | """Apply options for a collection of TestController objects.""" | |
249 | for controller in controllers: |
|
251 | for controller in controllers: | |
250 | if xunit: |
|
252 | if xunit: | |
251 | controller.add_xunit() |
|
253 | controller.add_xunit() | |
252 | if coverage: |
|
254 | if coverage: | |
253 | controller.add_coverage() |
|
255 | controller.add_coverage() | |
|
256 | controller.env['IPTEST_SUBPROC_STREAMS'] = subproc_streams | |||
254 | controller.cmd.extend(extra_args) |
|
257 | controller.cmd.extend(extra_args) | |
255 |
|
258 | |||
256 | def do_run(controller): |
|
259 | def do_run(controller): | |
257 | try: |
|
260 | try: | |
258 | try: |
|
261 | try: | |
259 | controller.launch() |
|
262 | controller.launch() | |
260 | except Exception: |
|
263 | except Exception: | |
261 | import traceback |
|
264 | import traceback | |
262 | traceback.print_exc() |
|
265 | traceback.print_exc() | |
263 | return controller, 1 # signal failure |
|
266 | return controller, 1 # signal failure | |
264 |
|
267 | |||
265 | exitcode = controller.wait() |
|
268 | exitcode = controller.wait() | |
266 | return controller, exitcode |
|
269 | return controller, exitcode | |
267 |
|
270 | |||
268 | except KeyboardInterrupt: |
|
271 | except KeyboardInterrupt: | |
269 | return controller, -signal.SIGINT |
|
272 | return controller, -signal.SIGINT | |
270 | finally: |
|
273 | finally: | |
271 | controller.cleanup() |
|
274 | controller.cleanup() | |
272 |
|
275 | |||
273 | def report(): |
|
276 | def report(): | |
274 | """Return a string with a summary report of test-related variables.""" |
|
277 | """Return a string with a summary report of test-related variables.""" | |
275 | inf = get_sys_info() |
|
278 | inf = get_sys_info() | |
276 | out = [] |
|
279 | out = [] | |
277 | def _add(name, value): |
|
280 | def _add(name, value): | |
278 | out.append((name, value)) |
|
281 | out.append((name, value)) | |
279 |
|
282 | |||
280 | _add('IPython version', inf['ipython_version']) |
|
283 | _add('IPython version', inf['ipython_version']) | |
281 | _add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source'])) |
|
284 | _add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source'])) | |
282 | _add('IPython package', compress_user(inf['ipython_path'])) |
|
285 | _add('IPython package', compress_user(inf['ipython_path'])) | |
283 | _add('Python version', inf['sys_version'].replace('\n','')) |
|
286 | _add('Python version', inf['sys_version'].replace('\n','')) | |
284 | _add('sys.executable', compress_user(inf['sys_executable'])) |
|
287 | _add('sys.executable', compress_user(inf['sys_executable'])) | |
285 | _add('Platform', inf['platform']) |
|
288 | _add('Platform', inf['platform']) | |
286 |
|
289 | |||
287 | width = max(len(n) for (n,v) in out) |
|
290 | width = max(len(n) for (n,v) in out) | |
288 | out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out] |
|
291 | out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out] | |
289 |
|
292 | |||
290 | avail = [] |
|
293 | avail = [] | |
291 | not_avail = [] |
|
294 | not_avail = [] | |
292 |
|
295 | |||
293 | for k, is_avail in have.items(): |
|
296 | for k, is_avail in have.items(): | |
294 | if is_avail: |
|
297 | if is_avail: | |
295 | avail.append(k) |
|
298 | avail.append(k) | |
296 | else: |
|
299 | else: | |
297 | not_avail.append(k) |
|
300 | not_avail.append(k) | |
298 |
|
301 | |||
299 | if avail: |
|
302 | if avail: | |
300 | out.append('\nTools and libraries available at test time:\n') |
|
303 | out.append('\nTools and libraries available at test time:\n') | |
301 | avail.sort() |
|
304 | avail.sort() | |
302 | out.append(' ' + ' '.join(avail)+'\n') |
|
305 | out.append(' ' + ' '.join(avail)+'\n') | |
303 |
|
306 | |||
304 | if not_avail: |
|
307 | if not_avail: | |
305 | out.append('\nTools and libraries NOT available at test time:\n') |
|
308 | out.append('\nTools and libraries NOT available at test time:\n') | |
306 | not_avail.sort() |
|
309 | not_avail.sort() | |
307 | out.append(' ' + ' '.join(not_avail)+'\n') |
|
310 | out.append(' ' + ' '.join(not_avail)+'\n') | |
308 |
|
311 | |||
309 | return ''.join(out) |
|
312 | return ''.join(out) | |
310 |
|
313 | |||
311 | def run_iptestall(options): |
|
314 | def run_iptestall(options): | |
312 | """Run the entire IPython test suite by calling nose and trial. |
|
315 | """Run the entire IPython test suite by calling nose and trial. | |
313 |
|
316 | |||
314 | This function constructs :class:`IPTester` instances for all IPython |
|
317 | This function constructs :class:`IPTester` instances for all IPython | |
315 | modules and package and then runs each of them. This causes the modules |
|
318 | modules and package and then runs each of them. This causes the modules | |
316 | and packages of IPython to be tested each in their own subprocess using |
|
319 | and packages of IPython to be tested each in their own subprocess using | |
317 | nose. |
|
320 | nose. | |
318 |
|
321 | |||
319 | Parameters |
|
322 | Parameters | |
320 | ---------- |
|
323 | ---------- | |
321 |
|
324 | |||
322 | All parameters are passed as attributes of the options object. |
|
325 | All parameters are passed as attributes of the options object. | |
323 |
|
326 | |||
324 | testgroups : list of str |
|
327 | testgroups : list of str | |
325 | Run only these sections of the test suite. If empty, run all the available |
|
328 | Run only these sections of the test suite. If empty, run all the available | |
326 | sections. |
|
329 | sections. | |
327 |
|
330 | |||
328 | fast : int or None |
|
331 | fast : int or None | |
329 | Run the test suite in parallel, using n simultaneous processes. If None |
|
332 | Run the test suite in parallel, using n simultaneous processes. If None | |
330 | is passed, one process is used per CPU core. Default 1 (i.e. sequential) |
|
333 | is passed, one process is used per CPU core. Default 1 (i.e. sequential) | |
331 |
|
334 | |||
332 | inc_slow : bool |
|
335 | inc_slow : bool | |
333 | Include slow tests, like IPython.parallel. By default, these tests aren't |
|
336 | Include slow tests, like IPython.parallel. By default, these tests aren't | |
334 | run. |
|
337 | run. | |
335 |
|
338 | |||
336 | xunit : bool |
|
339 | xunit : bool | |
337 | Produce Xunit XML output. This is written to multiple foo.xunit.xml files. |
|
340 | Produce Xunit XML output. This is written to multiple foo.xunit.xml files. | |
338 |
|
341 | |||
339 | coverage : bool or str |
|
342 | coverage : bool or str | |
340 | Measure code coverage from tests. True will store the raw coverage data, |
|
343 | Measure code coverage from tests. True will store the raw coverage data, | |
341 | or pass 'html' or 'xml' to get reports. |
|
344 | or pass 'html' or 'xml' to get reports. | |
342 |
|
345 | |||
343 | extra_args : list |
|
346 | extra_args : list | |
344 | Extra arguments to pass to the test subprocesses, e.g. '-v' |
|
347 | Extra arguments to pass to the test subprocesses, e.g. '-v' | |
345 | """ |
|
348 | """ | |
346 | if options.fast != 1: |
|
349 | if options.fast != 1: | |
347 | # If running in parallel, capture output so it doesn't get interleaved |
|
350 | # If running in parallel, capture output so it doesn't get interleaved | |
348 | TestController.buffer_output = True |
|
351 | TestController.buffer_output = True | |
349 |
|
352 | |||
350 | to_run, not_run = prepare_controllers(options) |
|
353 | to_run, not_run = prepare_controllers(options) | |
351 |
|
354 | |||
352 | def justify(ltext, rtext, width=70, fill='-'): |
|
355 | def justify(ltext, rtext, width=70, fill='-'): | |
353 | ltext += ' ' |
|
356 | ltext += ' ' | |
354 | rtext = (' ' + rtext).rjust(width - len(ltext), fill) |
|
357 | rtext = (' ' + rtext).rjust(width - len(ltext), fill) | |
355 | return ltext + rtext |
|
358 | return ltext + rtext | |
356 |
|
359 | |||
357 | # Run all test runners, tracking execution time |
|
360 | # Run all test runners, tracking execution time | |
358 | failed = [] |
|
361 | failed = [] | |
359 | t_start = time.time() |
|
362 | t_start = time.time() | |
360 |
|
363 | |||
361 | print() |
|
364 | print() | |
362 | if options.fast == 1: |
|
365 | if options.fast == 1: | |
363 | # This actually means sequential, i.e. with 1 job |
|
366 | # This actually means sequential, i.e. with 1 job | |
364 | for controller in to_run: |
|
367 | for controller in to_run: | |
365 | print('IPython test group:', controller.section) |
|
368 | print('IPython test group:', controller.section) | |
366 | sys.stdout.flush() # Show in correct order when output is piped |
|
369 | sys.stdout.flush() # Show in correct order when output is piped | |
367 | controller, res = do_run(controller) |
|
370 | controller, res = do_run(controller) | |
368 | if res: |
|
371 | if res: | |
369 | failed.append(controller) |
|
372 | failed.append(controller) | |
370 | if res == -signal.SIGINT: |
|
373 | if res == -signal.SIGINT: | |
371 | print("Interrupted") |
|
374 | print("Interrupted") | |
372 | break |
|
375 | break | |
373 | print() |
|
376 | print() | |
374 |
|
377 | |||
375 | else: |
|
378 | else: | |
376 | # Run tests concurrently |
|
379 | # Run tests concurrently | |
377 | try: |
|
380 | try: | |
378 | pool = multiprocessing.pool.ThreadPool(options.fast) |
|
381 | pool = multiprocessing.pool.ThreadPool(options.fast) | |
379 | for (controller, res) in pool.imap_unordered(do_run, to_run): |
|
382 | for (controller, res) in pool.imap_unordered(do_run, to_run): | |
380 | res_string = 'OK' if res == 0 else 'FAILED' |
|
383 | res_string = 'OK' if res == 0 else 'FAILED' | |
381 | print(justify('IPython test group: ' + controller.section, res_string)) |
|
384 | print(justify('IPython test group: ' + controller.section, res_string)) | |
382 | if res: |
|
385 | if res: | |
383 | print(bytes_to_str(controller.stdout)) |
|
386 | print(bytes_to_str(controller.stdout)) | |
384 | failed.append(controller) |
|
387 | failed.append(controller) | |
385 | if res == -signal.SIGINT: |
|
388 | if res == -signal.SIGINT: | |
386 | print("Interrupted") |
|
389 | print("Interrupted") | |
387 | break |
|
390 | break | |
388 | except KeyboardInterrupt: |
|
391 | except KeyboardInterrupt: | |
389 | return |
|
392 | return | |
390 |
|
393 | |||
391 | for controller in not_run: |
|
394 | for controller in not_run: | |
392 | print(justify('IPython test group: ' + controller.section, 'NOT RUN')) |
|
395 | print(justify('IPython test group: ' + controller.section, 'NOT RUN')) | |
393 |
|
396 | |||
394 | t_end = time.time() |
|
397 | t_end = time.time() | |
395 | t_tests = t_end - t_start |
|
398 | t_tests = t_end - t_start | |
396 | nrunners = len(to_run) |
|
399 | nrunners = len(to_run) | |
397 | nfail = len(failed) |
|
400 | nfail = len(failed) | |
398 | # summarize results |
|
401 | # summarize results | |
399 | print('_'*70) |
|
402 | print('_'*70) | |
400 | print('Test suite completed for system with the following information:') |
|
403 | print('Test suite completed for system with the following information:') | |
401 | print(report()) |
|
404 | print(report()) | |
402 | took = "Took %.3fs." % t_tests |
|
405 | took = "Took %.3fs." % t_tests | |
403 | print('Status: ', end='') |
|
406 | print('Status: ', end='') | |
404 | if not failed: |
|
407 | if not failed: | |
405 | print('OK (%d test groups).' % nrunners, took) |
|
408 | print('OK (%d test groups).' % nrunners, took) | |
406 | else: |
|
409 | else: | |
407 | # If anything went wrong, point out what command to rerun manually to |
|
410 | # If anything went wrong, point out what command to rerun manually to | |
408 | # see the actual errors and individual summary |
|
411 | # see the actual errors and individual summary | |
409 | failed_sections = [c.section for c in failed] |
|
412 | failed_sections = [c.section for c in failed] | |
410 | print('ERROR - {} out of {} test groups failed ({}).'.format(nfail, |
|
413 | print('ERROR - {} out of {} test groups failed ({}).'.format(nfail, | |
411 | nrunners, ', '.join(failed_sections)), took) |
|
414 | nrunners, ', '.join(failed_sections)), took) | |
412 | print() |
|
415 | print() | |
413 | print('You may wish to rerun these, with:') |
|
416 | print('You may wish to rerun these, with:') | |
414 | print(' iptest', *failed_sections) |
|
417 | print(' iptest', *failed_sections) | |
415 | print() |
|
418 | print() | |
416 |
|
419 | |||
417 | if options.coverage: |
|
420 | if options.coverage: | |
418 | from coverage import coverage |
|
421 | from coverage import coverage | |
419 | cov = coverage(data_file='.coverage') |
|
422 | cov = coverage(data_file='.coverage') | |
420 | cov.combine() |
|
423 | cov.combine() | |
421 | cov.save() |
|
424 | cov.save() | |
422 |
|
425 | |||
423 | # Coverage HTML report |
|
426 | # Coverage HTML report | |
424 | if options.coverage == 'html': |
|
427 | if options.coverage == 'html': | |
425 | html_dir = 'ipy_htmlcov' |
|
428 | html_dir = 'ipy_htmlcov' | |
426 | shutil.rmtree(html_dir, ignore_errors=True) |
|
429 | shutil.rmtree(html_dir, ignore_errors=True) | |
427 | print("Writing HTML coverage report to %s/ ... " % html_dir, end="") |
|
430 | print("Writing HTML coverage report to %s/ ... " % html_dir, end="") | |
428 | sys.stdout.flush() |
|
431 | sys.stdout.flush() | |
429 |
|
432 | |||
430 | # Custom HTML reporter to clean up module names. |
|
433 | # Custom HTML reporter to clean up module names. | |
431 | from coverage.html import HtmlReporter |
|
434 | from coverage.html import HtmlReporter | |
432 | class CustomHtmlReporter(HtmlReporter): |
|
435 | class CustomHtmlReporter(HtmlReporter): | |
433 | def find_code_units(self, morfs): |
|
436 | def find_code_units(self, morfs): | |
434 | super(CustomHtmlReporter, self).find_code_units(morfs) |
|
437 | super(CustomHtmlReporter, self).find_code_units(morfs) | |
435 | for cu in self.code_units: |
|
438 | for cu in self.code_units: | |
436 | nameparts = cu.name.split(os.sep) |
|
439 | nameparts = cu.name.split(os.sep) | |
437 | if 'IPython' not in nameparts: |
|
440 | if 'IPython' not in nameparts: | |
438 | continue |
|
441 | continue | |
439 | ix = nameparts.index('IPython') |
|
442 | ix = nameparts.index('IPython') | |
440 | cu.name = '.'.join(nameparts[ix:]) |
|
443 | cu.name = '.'.join(nameparts[ix:]) | |
441 |
|
444 | |||
442 | # Reimplement the html_report method with our custom reporter |
|
445 | # Reimplement the html_report method with our custom reporter | |
443 | cov._harvest_data() |
|
446 | cov._harvest_data() | |
444 | cov.config.from_args(omit='*%stests' % os.sep, html_dir=html_dir, |
|
447 | cov.config.from_args(omit='*%stests' % os.sep, html_dir=html_dir, | |
445 | html_title='IPython test coverage', |
|
448 | html_title='IPython test coverage', | |
446 | ) |
|
449 | ) | |
447 | reporter = CustomHtmlReporter(cov, cov.config) |
|
450 | reporter = CustomHtmlReporter(cov, cov.config) | |
448 | reporter.report(None) |
|
451 | reporter.report(None) | |
449 | print('done.') |
|
452 | print('done.') | |
450 |
|
453 | |||
451 | # Coverage XML report |
|
454 | # Coverage XML report | |
452 | elif options.coverage == 'xml': |
|
455 | elif options.coverage == 'xml': | |
453 | cov.xml_report(outfile='ipy_coverage.xml') |
|
456 | cov.xml_report(outfile='ipy_coverage.xml') | |
454 |
|
457 | |||
455 | if failed: |
|
458 | if failed: | |
456 | # Ensure that our exit code indicates failure |
|
459 | # Ensure that our exit code indicates failure | |
457 | sys.exit(1) |
|
460 | sys.exit(1) | |
458 |
|
461 | |||
459 | argparser = argparse.ArgumentParser(description='Run IPython test suite') |
|
462 | argparser = argparse.ArgumentParser(description='Run IPython test suite') | |
460 | argparser.add_argument('testgroups', nargs='*', |
|
463 | argparser.add_argument('testgroups', nargs='*', | |
461 | help='Run specified groups of tests. If omitted, run ' |
|
464 | help='Run specified groups of tests. If omitted, run ' | |
462 | 'all tests.') |
|
465 | 'all tests.') | |
463 | argparser.add_argument('--all', action='store_true', |
|
466 | argparser.add_argument('--all', action='store_true', | |
464 | help='Include slow tests not run by default.') |
|
467 | help='Include slow tests not run by default.') | |
465 | argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int, |
|
468 | argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int, | |
466 | help='Run test sections in parallel.') |
|
469 | help='Run test sections in parallel.') | |
467 | argparser.add_argument('--xunit', action='store_true', |
|
470 | argparser.add_argument('--xunit', action='store_true', | |
468 | help='Produce Xunit XML results') |
|
471 | help='Produce Xunit XML results') | |
469 | argparser.add_argument('--coverage', nargs='?', const=True, default=False, |
|
472 | argparser.add_argument('--coverage', nargs='?', const=True, default=False, | |
470 | help="Measure test coverage. Specify 'html' or " |
|
473 | help="Measure test coverage. Specify 'html' or " | |
471 | "'xml' to get reports.") |
|
474 | "'xml' to get reports.") | |
|
475 | argparser.add_argument('--subproc-streams', default='capture', | |||
|
476 | help="What to do with stdout/stderr from subprocesses. " | |||
|
477 | "'capture' (default), 'show' and 'discard' are the options.") | |||
472 |
|
478 | |||
473 | def default_options(): |
|
479 | def default_options(): | |
474 | """Get an argparse Namespace object with the default arguments, to pass to |
|
480 | """Get an argparse Namespace object with the default arguments, to pass to | |
475 | :func:`run_iptestall`. |
|
481 | :func:`run_iptestall`. | |
476 | """ |
|
482 | """ | |
477 | options = argparser.parse_args([]) |
|
483 | options = argparser.parse_args([]) | |
478 | options.extra_args = [] |
|
484 | options.extra_args = [] | |
479 | return options |
|
485 | return options | |
480 |
|
486 | |||
481 | def main(): |
|
487 | def main(): | |
482 | # Arguments after -- should be passed through to nose. Argparse treats |
|
488 | # Arguments after -- should be passed through to nose. Argparse treats | |
483 | # everything after -- as regular positional arguments, so we separate them |
|
489 | # everything after -- as regular positional arguments, so we separate them | |
484 | # first. |
|
490 | # first. | |
485 | try: |
|
491 | try: | |
486 | ix = sys.argv.index('--') |
|
492 | ix = sys.argv.index('--') | |
487 | except ValueError: |
|
493 | except ValueError: | |
488 | to_parse = sys.argv[1:] |
|
494 | to_parse = sys.argv[1:] | |
489 | extra_args = [] |
|
495 | extra_args = [] | |
490 | else: |
|
496 | else: | |
491 | to_parse = sys.argv[1:ix] |
|
497 | to_parse = sys.argv[1:ix] | |
492 | extra_args = sys.argv[ix+1:] |
|
498 | extra_args = sys.argv[ix+1:] | |
493 |
|
499 | |||
494 | options = argparser.parse_args(to_parse) |
|
500 | options = argparser.parse_args(to_parse) | |
495 | options.extra_args = extra_args |
|
501 | options.extra_args = extra_args | |
496 |
|
502 | |||
497 | run_iptestall(options) |
|
503 | run_iptestall(options) | |
498 |
|
504 | |||
499 |
|
505 | |||
500 | if __name__ == '__main__': |
|
506 | if __name__ == '__main__': | |
501 | main() |
|
507 | main() |
General Comments 0
You need to be logged in to leave comments.
Login now