Show More
@@ -1,204 +1,215 b'' | |||
|
1 | 1 | """Functions for Github authorisation.""" |
|
2 | 2 | from __future__ import print_function |
|
3 | 3 | |
|
4 | 4 | try: |
|
5 | 5 | input = raw_input |
|
6 | 6 | except NameError: |
|
7 | 7 | pass |
|
8 | 8 | |
|
9 | 9 | import os |
|
10 | 10 | |
|
11 | 11 | import requests |
|
12 | 12 | import getpass |
|
13 | 13 | import json |
|
14 | 14 | |
|
15 | 15 | # Keyring stores passwords by a 'username', but we're not storing a username and |
|
16 | 16 | # password |
|
17 | 17 | fake_username = 'ipython_tools' |
|
18 | 18 | |
|
19 | class Obj(dict): | |
|
20 | """Dictionary with attribute access to names.""" | |
|
21 | def __getattr__(self, name): | |
|
22 | try: | |
|
23 | return self[name] | |
|
24 | except KeyError: | |
|
25 | raise AttributeError(name) | |
|
26 | ||
|
27 | def __setattr__(self, name, val): | |
|
28 | self[name] = val | |
|
29 | ||
|
19 | 30 | token = None |
|
20 | 31 | def get_auth_token(): |
|
21 | 32 | global token |
|
22 | 33 | |
|
23 | 34 | if token is not None: |
|
24 | 35 | return token |
|
25 | 36 | |
|
26 | 37 | import keyring |
|
27 | 38 | token = keyring.get_password('github', fake_username) |
|
28 | 39 | if token is not None: |
|
29 | 40 | return token |
|
30 | 41 | |
|
31 | 42 | print("Please enter your github username and password. These are not " |
|
32 | 43 | "stored, only used to get an oAuth token. You can revoke this at " |
|
33 | 44 | "any time on Github.") |
|
34 | 45 | user = input("Username: ") |
|
35 | 46 | pw = getpass.getpass("Password: ") |
|
36 | 47 | |
|
37 | 48 | auth_request = { |
|
38 | 49 | "scopes": [ |
|
39 | 50 | "public_repo", |
|
40 | 51 | "gist" |
|
41 | 52 | ], |
|
42 | 53 | "note": "IPython tools", |
|
43 | 54 | "note_url": "https://github.com/ipython/ipython/tree/master/tools", |
|
44 | 55 | } |
|
45 | 56 | response = requests.post('https://api.github.com/authorizations', |
|
46 | 57 | auth=(user, pw), data=json.dumps(auth_request)) |
|
47 | 58 | response.raise_for_status() |
|
48 | 59 | token = json.loads(response.text)['token'] |
|
49 | 60 | keyring.set_password('github', fake_username, token) |
|
50 | 61 | return token |
|
51 | 62 | |
|
52 | 63 | def make_auth_header(): |
|
53 | 64 | return {'Authorization': 'token ' + get_auth_token()} |
|
54 | 65 | |
|
55 | 66 | def post_issue_comment(project, num, body): |
|
56 | 67 | url = 'https://api.github.com/repos/{project}/issues/{num}/comments'.format(project=project, num=num) |
|
57 | 68 | payload = json.dumps({'body': body}) |
|
58 | 69 | r = requests.post(url, data=payload, headers=make_auth_header()) |
|
59 | 70 | |
|
60 | 71 | def post_gist(content, description='', filename='file', auth=False): |
|
61 | 72 | """Post some text to a Gist, and return the URL.""" |
|
62 | 73 | post_data = json.dumps({ |
|
63 | 74 | "description": description, |
|
64 | 75 | "public": True, |
|
65 | 76 | "files": { |
|
66 | 77 | filename: { |
|
67 | 78 | "content": content |
|
68 | 79 | } |
|
69 | 80 | } |
|
70 | 81 | }).encode('utf-8') |
|
71 | 82 | |
|
72 | 83 | headers = make_auth_header() if auth else {} |
|
73 | 84 | response = requests.post("https://api.github.com/gists", data=post_data, headers=headers) |
|
74 | 85 | response.raise_for_status() |
|
75 | 86 | response_data = json.loads(response.text) |
|
76 | 87 | return response_data['html_url'] |
|
77 | 88 | |
|
78 | 89 | def get_pull_request(project, num, github_api=3): |
|
79 | 90 | """get pull request info by number |
|
80 | 91 | |
|
81 | 92 | github_api : version of github api to use |
|
82 | 93 | """ |
|
83 | 94 | if github_api==2 : |
|
84 | 95 | url = "http://github.com/api/v2/json/pulls/{project}/{num}".format(project=project, num=num) |
|
85 | 96 | elif github_api == 3: |
|
86 | 97 | url = "https://api.github.com/repos/{project}/pulls/{num}".format(project=project, num=num) |
|
87 | 98 | response = requests.get(url) |
|
88 | 99 | response.raise_for_status() |
|
89 | 100 | if github_api == 2 : |
|
90 | 101 | return json.loads(response.text)['pull'] |
|
91 | return json.loads(response.text) | |
|
102 | return json.loads(response.text, object_hook=Obj) | |
|
92 | 103 | |
|
93 | 104 | def get_pulls_list(project, github_api=3): |
|
94 | 105 | """get pull request list |
|
95 | 106 | |
|
96 | 107 | github_api : version of github api to use |
|
97 | 108 | """ |
|
98 | 109 | if github_api == 3 : |
|
99 | 110 | url = "https://api.github.com/repos/{project}/pulls".format(project=project) |
|
100 | 111 | else : |
|
101 | 112 | url = "http://github.com/api/v2/json/pulls/{project}".format(project=project) |
|
102 | 113 | response = requests.get(url) |
|
103 | 114 | response.raise_for_status() |
|
104 | 115 | if github_api == 2 : |
|
105 | 116 | return json.loads(response.text)['pulls'] |
|
106 | 117 | return json.loads(response.text) |
|
107 | 118 | |
|
108 | 119 | # encode_multipart_formdata is from urllib3.filepost |
|
109 | 120 | # The only change is to iter_fields, to enforce S3's required key ordering |
|
110 | 121 | |
|
111 | 122 | def iter_fields(fields): |
|
112 | 123 | fields = fields.copy() |
|
113 | 124 | for key in ('key', 'acl', 'Filename', 'success_action_status', 'AWSAccessKeyId', |
|
114 | 125 | 'Policy', 'Signature', 'Content-Type', 'file'): |
|
115 | 126 | yield (key, fields.pop(key)) |
|
116 | 127 | for (k,v) in fields.items(): |
|
117 | 128 | yield k,v |
|
118 | 129 | |
|
119 | 130 | def encode_multipart_formdata(fields, boundary=None): |
|
120 | 131 | """ |
|
121 | 132 | Encode a dictionary of ``fields`` using the multipart/form-data mime format. |
|
122 | 133 | |
|
123 | 134 | :param fields: |
|
124 | 135 | Dictionary of fields or list of (key, value) field tuples. The key is |
|
125 | 136 | treated as the field name, and the value as the body of the form-data |
|
126 | 137 | bytes. If the value is a tuple of two elements, then the first element |
|
127 | 138 | is treated as the filename of the form-data section. |
|
128 | 139 | |
|
129 | 140 | Field names and filenames must be unicode. |
|
130 | 141 | |
|
131 | 142 | :param boundary: |
|
132 | 143 | If not specified, then a random boundary will be generated using |
|
133 | 144 | :func:`mimetools.choose_boundary`. |
|
134 | 145 | """ |
|
135 | 146 | # copy requests imports in here: |
|
136 | 147 | from io import BytesIO |
|
137 | 148 | from requests.packages.urllib3.filepost import ( |
|
138 | 149 | choose_boundary, six, writer, b, get_content_type |
|
139 | 150 | ) |
|
140 | 151 | body = BytesIO() |
|
141 | 152 | if boundary is None: |
|
142 | 153 | boundary = choose_boundary() |
|
143 | 154 | |
|
144 | 155 | for fieldname, value in iter_fields(fields): |
|
145 | 156 | body.write(b('--%s\r\n' % (boundary))) |
|
146 | 157 | |
|
147 | 158 | if isinstance(value, tuple): |
|
148 | 159 | filename, data = value |
|
149 | 160 | writer(body).write('Content-Disposition: form-data; name="%s"; ' |
|
150 | 161 | 'filename="%s"\r\n' % (fieldname, filename)) |
|
151 | 162 | body.write(b('Content-Type: %s\r\n\r\n' % |
|
152 | 163 | (get_content_type(filename)))) |
|
153 | 164 | else: |
|
154 | 165 | data = value |
|
155 | 166 | writer(body).write('Content-Disposition: form-data; name="%s"\r\n' |
|
156 | 167 | % (fieldname)) |
|
157 | 168 | body.write(b'Content-Type: text/plain\r\n\r\n') |
|
158 | 169 | |
|
159 | 170 | if isinstance(data, int): |
|
160 | 171 | data = str(data) # Backwards compatibility |
|
161 | 172 | if isinstance(data, six.text_type): |
|
162 | 173 | writer(body).write(data) |
|
163 | 174 | else: |
|
164 | 175 | body.write(data) |
|
165 | 176 | |
|
166 | 177 | body.write(b'\r\n') |
|
167 | 178 | |
|
168 | 179 | body.write(b('--%s--\r\n' % (boundary))) |
|
169 | 180 | |
|
170 | 181 | content_type = b('multipart/form-data; boundary=%s' % boundary) |
|
171 | 182 | |
|
172 | 183 | return body.getvalue(), content_type |
|
173 | 184 | |
|
174 | 185 | |
|
175 | 186 | def post_download(project, filename, name=None, description=""): |
|
176 | 187 | """Upload a file to the GitHub downloads area""" |
|
177 | 188 | if name is None: |
|
178 | 189 | name = os.path.basename(filename) |
|
179 | 190 | with open(filename, 'rb') as f: |
|
180 | 191 | filedata = f.read() |
|
181 | 192 | |
|
182 | 193 | url = "https://api.github.com/repos/{project}/downloads".format(project=project) |
|
183 | 194 | |
|
184 | 195 | payload = json.dumps(dict(name=name, size=len(filedata), |
|
185 | 196 | description=description)) |
|
186 | 197 | response = requests.post(url, data=payload, headers=make_auth_header()) |
|
187 | 198 | response.raise_for_status() |
|
188 | 199 | reply = json.loads(response.content) |
|
189 | 200 | s3_url = reply['s3_url'] |
|
190 | 201 | |
|
191 | 202 | fields = dict( |
|
192 | 203 | key=reply['path'], |
|
193 | 204 | acl=reply['acl'], |
|
194 | 205 | success_action_status=201, |
|
195 | 206 | Filename=reply['name'], |
|
196 | 207 | AWSAccessKeyId=reply['accesskeyid'], |
|
197 | 208 | Policy=reply['policy'], |
|
198 | 209 | Signature=reply['signature'], |
|
199 | 210 | file=(reply['name'], filedata), |
|
200 | 211 | ) |
|
201 | 212 | fields['Content-Type'] = reply['mime_type'] |
|
202 | 213 | data, content_type = encode_multipart_formdata(fields) |
|
203 | 214 | s3r = requests.post(s3_url, data=data, headers={'Content-Type': content_type}) |
|
204 | 215 | return s3r |
@@ -1,13 +1,13 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | """Post the results of a pull request test to Github. |
|
3 | 3 | """ |
|
4 | from test_pr import load_results, post_logs, post_results_comment, print_results | |
|
4 | from test_pr import TestRun | |
|
5 | 5 | |
|
6 | num, results, pr, unavailable_pythons = load_results() | |
|
7 | results_urls = post_logs(results) | |
|
8 | print_results(pr, results_urls, unavailable_pythons) | |
|
9 | post_results_comment(pr, results_urls, num, unavailable_pythons) | |
|
6 | testrun = TestRun.load_results() | |
|
7 | testrun.post_logs() | |
|
8 | testrun.print_results() | |
|
9 | testrun.post_results_comment() | |
|
10 | 10 | |
|
11 | 11 | print() |
|
12 | 12 | print("Posted test results to pull request") |
|
13 | print(" " + pr['html_url']) | |
|
13 | print(" " + testrun.pr['html_url']) |
@@ -1,259 +1,266 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | """ |
|
3 | 3 | This is a script for testing pull requests for IPython. It merges the pull |
|
4 | 4 | request with current master, installs and tests on all available versions of |
|
5 | 5 | Python, and posts the results to Gist if any tests fail. |
|
6 | 6 | |
|
7 | 7 | Usage: |
|
8 | 8 | python test_pr.py 1657 |
|
9 | 9 | """ |
|
10 | 10 | from __future__ import print_function |
|
11 | 11 | |
|
12 | 12 | import errno |
|
13 | 13 | from glob import glob |
|
14 | 14 | import io |
|
15 | 15 | import json |
|
16 | 16 | import os |
|
17 | 17 | import pickle |
|
18 | 18 | import re |
|
19 | 19 | import requests |
|
20 | 20 | import shutil |
|
21 | 21 | import time |
|
22 | 22 | from subprocess import call, check_call, check_output, PIPE, STDOUT, CalledProcessError |
|
23 | 23 | import sys |
|
24 | 24 | |
|
25 | 25 | import gh_api |
|
26 | from gh_api import Obj | |
|
26 | 27 | |
|
27 | 28 | basedir = os.path.join(os.path.expanduser("~"), ".ipy_pr_tests") |
|
28 | 29 | repodir = os.path.join(basedir, "ipython") |
|
29 | 30 | ipy_repository = 'git://github.com/ipython/ipython.git' |
|
30 | 31 | ipy_http_repository = 'http://github.com/ipython/ipython.git' |
|
31 | 32 | gh_project="ipython/ipython" |
|
32 | 33 | |
|
33 | 34 | supported_pythons = ['python2.6', 'python2.7', 'python3.1', 'python3.2'] |
|
34 | unavailable_pythons = [] | |
|
35 | 35 | |
|
36 | def available_python_versions(): | |
|
36 | missing_libs_re = re.compile(r"Tools and libraries NOT available at test time:\n" | |
|
37 | r"\s*(.*?)\n") | |
|
38 | def get_missing_libraries(log): | |
|
39 | m = missing_libs_re.search(log) | |
|
40 | if m: | |
|
41 | return m.group(1) | |
|
42 | ||
|
43 | class TestRun(object): | |
|
44 | def __init__(self, pr_num): | |
|
45 | self.unavailable_pythons = [] | |
|
46 | self.venvs = [] | |
|
47 | self.pr_num = pr_num | |
|
48 | ||
|
49 | self.pr = gh_api.get_pull_request(gh_project, pr_num) | |
|
50 | ||
|
51 | self.setup() | |
|
52 | ||
|
53 | self.results = [] | |
|
54 | ||
|
55 | def available_python_versions(self): | |
|
37 | 56 | """Get the executable names of available versions of Python on the system. |
|
38 | 57 | """ |
|
39 | del unavailable_pythons[:] | |
|
40 | 58 | for py in supported_pythons: |
|
41 | 59 | try: |
|
42 | 60 | check_call([py, '-c', 'import nose'], stdout=PIPE) |
|
43 | 61 | yield py |
|
44 | 62 | except (OSError, CalledProcessError): |
|
45 | unavailable_pythons.append(py) | |
|
46 | ||
|
47 | venvs = [] | |
|
63 | self.unavailable_pythons.append(py) | |
|
48 | 64 | |
|
49 | def setup(): | |
|
65 | def setup(self): | |
|
50 | 66 | """Prepare the repository and virtualenvs.""" |
|
51 | global venvs | |
|
52 | ||
|
53 | 67 | try: |
|
54 | 68 | os.mkdir(basedir) |
|
55 | 69 | except OSError as e: |
|
56 | 70 | if e.errno != errno.EEXIST: |
|
57 | 71 | raise |
|
58 | 72 | os.chdir(basedir) |
|
59 | 73 | |
|
60 | 74 | # Delete virtualenvs and recreate |
|
61 | 75 | for venv in glob('venv-*'): |
|
62 | 76 | shutil.rmtree(venv) |
|
63 | for py in available_python_versions(): | |
|
77 | for py in self.available_python_versions(): | |
|
64 | 78 | check_call(['virtualenv', '-p', py, '--system-site-packages', 'venv-%s' % py]) |
|
65 | venvs.append((py, 'venv-%s' % py)) | |
|
79 | self.venvs.append((py, 'venv-%s' % py)) | |
|
66 | 80 | |
|
67 | 81 | # Check out and update the repository |
|
68 | 82 | if not os.path.exists('ipython'): |
|
69 | 83 | try : |
|
70 | 84 | check_call(['git', 'clone', ipy_repository]) |
|
71 | 85 | except CalledProcessError : |
|
72 | 86 | check_call(['git', 'clone', ipy_http_repository]) |
|
73 | 87 | os.chdir(repodir) |
|
74 | 88 | check_call(['git', 'checkout', 'master']) |
|
75 | 89 | try : |
|
76 | 90 | check_call(['git', 'pull', ipy_repository, 'master']) |
|
77 | 91 | except CalledProcessError : |
|
78 | 92 | check_call(['git', 'pull', ipy_http_repository, 'master']) |
|
79 | 93 | os.chdir(basedir) |
|
80 | 94 | |
|
81 | missing_libs_re = re.compile(r"Tools and libraries NOT available at test time:\n" | |
|
82 | r"\s*(.*?)\n") | |
|
83 | def get_missing_libraries(log): | |
|
84 | m = missing_libs_re.search(log) | |
|
85 | if m: | |
|
86 | return m.group(1) | |
|
95 | def get_branch(self): | |
|
96 | repo = self.pr['head']['repo']['clone_url'] | |
|
97 | branch = self.pr['head']['ref'] | |
|
98 | owner = self.pr['head']['repo']['owner']['login'] | |
|
99 | mergeable = self.pr['mergeable'] | |
|
87 | 100 | |
|
88 | def get_branch(repo, branch, owner, mergeable): | |
|
89 | 101 | os.chdir(repodir) |
|
90 | 102 | if mergeable: |
|
91 | 103 | merged_branch = "%s-%s" % (owner, branch) |
|
92 | 104 | # Delete the branch first |
|
93 | 105 | call(['git', 'branch', '-D', merged_branch]) |
|
94 | 106 | check_call(['git', 'checkout', '-b', merged_branch]) |
|
95 | 107 | check_call(['git', 'pull', '--no-ff', '--no-commit', repo, branch]) |
|
96 | 108 | check_call(['git', 'commit', '-m', "merge %s/%s" % (repo, branch)]) |
|
97 | 109 | else: |
|
98 | 110 | # Fetch the branch without merging it. |
|
99 | 111 | check_call(['git', 'fetch', repo, branch]) |
|
100 | 112 | check_call(['git', 'checkout', 'FETCH_HEAD']) |
|
101 | 113 | os.chdir(basedir) |
|
102 | 114 | |
|
103 | def run_tests(venv): | |
|
104 | py = os.path.join(basedir, venv, 'bin', 'python') | |
|
105 | print(py) | |
|
106 | os.chdir(repodir) | |
|
107 | # cleanup build-dir | |
|
108 | if os.path.exists('build'): | |
|
109 | shutil.rmtree('build') | |
|
110 | check_call([py, 'setup.py', 'install']) | |
|
111 | os.chdir(basedir) | |
|
112 | ||
|
113 | # Environment variables: | |
|
114 | orig_path = os.environ["PATH"] | |
|
115 | os.environ["PATH"] = os.path.join(basedir, venv, 'bin') + ':' + os.environ["PATH"] | |
|
116 | os.environ.pop("PYTHONPATH", None) | |
|
117 | ||
|
118 | iptest = os.path.join(basedir, venv, 'bin', 'iptest') | |
|
119 | if not os.path.exists(iptest): | |
|
120 | iptest = os.path.join(basedir, venv, 'bin', 'iptest3') | |
|
121 | ||
|
122 | print("\nRunning tests, this typically takes a few minutes...") | |
|
123 | try: | |
|
124 | return True, check_output([iptest], stderr=STDOUT).decode('utf-8') | |
|
125 | except CalledProcessError as e: | |
|
126 | return False, e.output.decode('utf-8') | |
|
127 | finally: | |
|
128 | # Restore $PATH | |
|
129 | os.environ["PATH"] = orig_path | |
|
130 | ||
|
131 | def markdown_format(pr, results_urls, unavailable_pythons): | |
|
132 | def format_result(py, passed, gist_url, missing_libraries): | |
|
133 | s = "* %s: " % py | |
|
134 | if passed: | |
|
115 | def markdown_format(self): | |
|
116 | def format_result(result): | |
|
117 | s = "* %s: " % result.py | |
|
118 | if result.passed: | |
|
135 | 119 | s += "OK" |
|
136 | 120 | else: |
|
137 |
s += "Failed, log at %s" % |
|
|
138 | if missing_libraries: | |
|
139 | s += " (libraries not available: " + missing_libraries + ")" | |
|
121 | s += "Failed, log at %s" % result.log_url | |
|
122 | if result.missing_libraries: | |
|
123 | s += " (libraries not available: " + result.missing_libraries + ")" | |
|
140 | 124 | return s |
|
141 | 125 | |
|
142 | if pr['mergeable']: | |
|
143 | com = pr['head']['sha'][:7] + " merged into master" | |
|
126 | if self.pr['mergeable']: | |
|
127 | com = self.pr['head']['sha'][:7] + " merged into master" | |
|
144 | 128 | else: |
|
145 | com = pr['head']['sha'][:7] + " (can't merge cleanly)" | |
|
129 | com = self.pr['head']['sha'][:7] + " (can't merge cleanly)" | |
|
146 | 130 | lines = ["**Test results for commit %s**" % com, |
|
147 | 131 | "Platform: " + sys.platform, |
|
148 | 132 | ""] + \ |
|
149 |
[format_result( |
|
|
133 | [format_result(r) for r in self.results] + \ | |
|
150 | 134 | ["", |
|
151 |
"Not available for testing: " + ", ".join(unavailable_pythons)] |
|
|
135 | "Not available for testing: " + ", ".join(self.unavailable_pythons)] | |
|
152 | 136 | return "\n".join(lines) |
|
153 | 137 | |
|
154 | def post_results_comment(pr, results, num, unavailable_pythons=unavailable_pythons): | |
|
155 | body = markdown_format(pr, results, unavailable_pythons) | |
|
156 | gh_api.post_issue_comment(gh_project, num, body) | |
|
138 | def post_results_comment(self): | |
|
139 | body = self.markdown_format() | |
|
140 | gh_api.post_issue_comment(gh_project, self.pr_num, body) | |
|
141 | ||
|
142 | def print_results(self): | |
|
143 | pr = self.pr | |
|
157 | 144 | |
|
158 | def print_results(pr, results_urls, unavailable_pythons=unavailable_pythons): | |
|
159 | 145 | print("\n") |
|
160 | 146 | if pr['mergeable']: |
|
161 | 147 | print("**Test results for commit %s merged into master**" % pr['head']['sha'][:7]) |
|
162 | 148 | else: |
|
163 | 149 | print("**Test results for commit %s (can't merge cleanly)**" % pr['head']['sha'][:7]) |
|
164 | 150 | print("Platform:", sys.platform) |
|
165 | for py, passed, gist_url, missing_libraries in results_urls: | |
|
166 | if passed: | |
|
167 | print(py, ":", "OK") | |
|
151 | for result in self.results: | |
|
152 | if result.passed: | |
|
153 | print(result.py, ":", "OK") | |
|
168 | 154 | else: |
|
169 | print(py, ":", "Failed") | |
|
170 |
print(" Test log:", |
|
|
171 | if missing_libraries: | |
|
172 | print(" Libraries not available:", missing_libraries) | |
|
173 | print("Not available for testing:", ", ".join(unavailable_pythons)) | |
|
155 | print(result.py, ":", "Failed") | |
|
156 | print(" Test log:", result.get('log_url') or result.log_file) | |
|
157 | if result.missing_libraries: | |
|
158 | print(" Libraries not available:", result.missing_libraries) | |
|
159 | print("Not available for testing:", ", ".join(self.unavailable_pythons)) | |
|
174 | 160 | |
|
175 |
def dump_results( |
|
|
161 | def dump_results(self): | |
|
176 | 162 | with open(os.path.join(basedir, 'lastresults.pkl'), 'wb') as f: |
|
177 | pickle.dump((num, results, pr, unavailable_pythons), f) | |
|
163 | pickle.dump(self, f) | |
|
178 | 164 | |
|
165 | @staticmethod | |
|
179 | 166 | def load_results(): |
|
180 | 167 | with open(os.path.join(basedir, 'lastresults.pkl'), 'rb') as f: |
|
181 | 168 | return pickle.load(f) |
|
182 | 169 | |
|
183 |
def save_logs( |
|
|
184 | results_paths = [] | |
|
185 | for py, passed, log, missing_libraries in results: | |
|
186 | if passed: | |
|
187 | results_paths.append((py, passed, None, missing_libraries)) | |
|
188 | else: | |
|
189 | ||
|
190 | result_locn = os.path.abspath(os.path.join('venv-%s' % py, | |
|
191 | pr['head']['sha'][:7]+".log")) | |
|
170 | def save_logs(self): | |
|
171 | for result in self.results: | |
|
172 | if not result.passed: | |
|
173 | result_locn = os.path.abspath(os.path.join('venv-%s' % result.py, | |
|
174 | self.pr['head']['sha'][:7]+".log")) | |
|
192 | 175 | with io.open(result_locn, 'w', encoding='utf-8') as f: |
|
193 | f.write(log) | |
|
194 | ||
|
195 | results_paths.append((py, False, result_locn, missing_libraries)) | |
|
176 | f.write(result.log) | |
|
196 | 177 | |
|
197 | return results_paths | |
|
178 | result.log_file = result_locn | |
|
198 | 179 | |
|
199 |
def post_logs( |
|
|
200 | results_urls = [] | |
|
201 | for py, passed, log, missing_libraries in results: | |
|
202 | if passed: | |
|
203 | results_urls.append((py, passed, None, missing_libraries)) | |
|
204 | else: | |
|
205 | result_locn = gh_api.post_gist(log, description='IPython test log', | |
|
180 | def post_logs(self): | |
|
181 | for result in self.results: | |
|
182 | if not result.passed: | |
|
183 | result.log_url = gh_api.post_gist(result.log, | |
|
184 | description='IPython test log', | |
|
206 | 185 | filename="results.log", auth=True) |
|
207 | results_urls.append((py, False, result_locn, missing_libraries)) | |
|
208 | 186 | |
|
209 | return results_urls | |
|
187 | def run(self): | |
|
188 | for py, venv in self.venvs: | |
|
189 | tic = time.time() | |
|
190 | passed, log = run_tests(venv) | |
|
191 | elapsed = int(time.time() - tic) | |
|
192 | print("Ran tests with %s in %is" % (py, elapsed)) | |
|
193 | missing_libraries = get_missing_libraries(log) | |
|
194 | ||
|
195 | self.results.append(Obj(py=py, | |
|
196 | passed=passed, | |
|
197 | log=log, | |
|
198 | missing_libraries=missing_libraries | |
|
199 | ) | |
|
200 | ) | |
|
201 | ||
|
202 | ||
|
203 | def run_tests(venv): | |
|
204 | py = os.path.join(basedir, venv, 'bin', 'python') | |
|
205 | print(py) | |
|
206 | os.chdir(repodir) | |
|
207 | # cleanup build-dir | |
|
208 | if os.path.exists('build'): | |
|
209 | shutil.rmtree('build') | |
|
210 | check_call([py, 'setup.py', 'install']) | |
|
211 | os.chdir(basedir) | |
|
212 | ||
|
213 | # Environment variables: | |
|
214 | orig_path = os.environ["PATH"] | |
|
215 | os.environ["PATH"] = os.path.join(basedir, venv, 'bin') + ':' + os.environ["PATH"] | |
|
216 | os.environ.pop("PYTHONPATH", None) | |
|
217 | ||
|
218 | iptest = os.path.join(basedir, venv, 'bin', 'iptest') | |
|
219 | if not os.path.exists(iptest): | |
|
220 | iptest = os.path.join(basedir, venv, 'bin', 'iptest3') | |
|
221 | ||
|
222 | print("\nRunning tests, this typically takes a few minutes...") | |
|
223 | try: | |
|
224 | return True, check_output([iptest], stderr=STDOUT).decode('utf-8') | |
|
225 | except CalledProcessError as e: | |
|
226 | return False, e.output.decode('utf-8') | |
|
227 | finally: | |
|
228 | # Restore $PATH | |
|
229 | os.environ["PATH"] = orig_path | |
|
230 | ||
|
210 | 231 | |
|
211 | 232 | def test_pr(num, post_results=True): |
|
212 | 233 | # Get Github authorisation first, so that the user is prompted straight away |
|
213 | 234 | # if their login is needed. |
|
214 | 235 | if post_results: |
|
215 | 236 | gh_api.get_auth_token() |
|
216 | 237 | |
|
217 | setup() | |
|
218 | pr = gh_api.get_pull_request(gh_project, num) | |
|
219 | get_branch(repo=pr['head']['repo']['clone_url'], | |
|
220 | branch=pr['head']['ref'], | |
|
221 | owner=pr['head']['repo']['owner']['login'], | |
|
222 | mergeable=pr['mergeable'], | |
|
223 | ) | |
|
238 | testrun = TestRun(num) | |
|
224 | 239 | |
|
225 | results = [] | |
|
226 | for py, venv in venvs: | |
|
227 | tic = time.time() | |
|
228 | passed, log = run_tests(venv) | |
|
229 | elapsed = int(time.time() - tic) | |
|
230 | print("Ran tests with %s in %is" % (py, elapsed)) | |
|
231 | missing_libraries = get_missing_libraries(log) | |
|
232 | if passed: | |
|
233 | results.append((py, True, None, missing_libraries)) | |
|
234 | else: | |
|
235 | results.append((py, False, log, missing_libraries)) | |
|
240 | testrun.get_branch() | |
|
241 | ||
|
242 | testrun.run() | |
|
236 | 243 | |
|
237 |
dump_results( |
|
|
244 | testrun.dump_results() | |
|
238 | 245 | |
|
239 | results_paths = save_logs(results, pr) | |
|
240 |
print_results( |
|
|
246 | testrun.save_logs() | |
|
247 | testrun.print_results() | |
|
241 | 248 | |
|
242 | 249 | if post_results: |
|
243 |
results_urls = post_logs |
|
|
244 |
post_results_comment( |
|
|
250 | results_urls = testrun.post_logs | |
|
251 | testrun.post_results_comment() | |
|
245 | 252 | print("(Posted to Github)") |
|
246 | 253 | else: |
|
247 | 254 | post_script = os.path.join(os.path.dirname(sys.argv[0]), "post_pr_test.py") |
|
248 | 255 | print("To post the results to Github, run", post_script) |
|
249 | 256 | |
|
250 | 257 | |
|
251 | 258 | if __name__ == '__main__': |
|
252 | 259 | import argparse |
|
253 | 260 | parser = argparse.ArgumentParser(description="Test an IPython pull request") |
|
254 | 261 | parser.add_argument('-p', '--publish', action='store_true', |
|
255 | 262 | help="Publish the results to Github") |
|
256 | 263 | parser.add_argument('number', type=int, help="The pull request number") |
|
257 | 264 | |
|
258 | 265 | args = parser.parse_args() |
|
259 | 266 | test_pr(args.number, post_results=args.publish) |
General Comments 0
You need to be logged in to leave comments.
Login now