Show More
@@ -1,301 +1,301 b'' | |||
|
1 | 1 | """Functions for Github API requests.""" |
|
2 | 2 | from __future__ import print_function |
|
3 | 3 | |
|
4 | 4 | try: |
|
5 | 5 | input = raw_input |
|
6 | 6 | except NameError: |
|
7 | 7 | pass |
|
8 | 8 | |
|
9 | 9 | import os |
|
10 | 10 | import re |
|
11 | 11 | import sys |
|
12 | 12 | |
|
13 | 13 | import requests |
|
14 | 14 | import getpass |
|
15 | 15 | import json |
|
16 | 16 | |
|
17 | 17 | try: |
|
18 | 18 | import requests_cache |
|
19 | 19 | except ImportError: |
|
20 | print("no cache", file=sys.stderr) | |
|
20 | print("cache not available, install `requests_cache` for caching.", file=sys.stderr) | |
|
21 | 21 | else: |
|
22 | 22 | requests_cache.install_cache("gh_api", expire_after=3600) |
|
23 | 23 | |
|
24 | 24 | # Keyring stores passwords by a 'username', but we're not storing a username and |
|
25 | 25 | # password |
|
26 | 26 | fake_username = 'ipython_tools' |
|
27 | 27 | |
|
28 | 28 | class Obj(dict): |
|
29 | 29 | """Dictionary with attribute access to names.""" |
|
30 | 30 | def __getattr__(self, name): |
|
31 | 31 | try: |
|
32 | 32 | return self[name] |
|
33 | 33 | except KeyError: |
|
34 | 34 | raise AttributeError(name) |
|
35 | 35 | |
|
36 | 36 | def __setattr__(self, name, val): |
|
37 | 37 | self[name] = val |
|
38 | 38 | |
|
39 | 39 | token = None |
|
40 | 40 | def get_auth_token(): |
|
41 | 41 | global token |
|
42 | 42 | |
|
43 | 43 | if token is not None: |
|
44 | 44 | return token |
|
45 | 45 | |
|
46 | 46 | import keyring |
|
47 | 47 | token = keyring.get_password('github', fake_username) |
|
48 | 48 | if token is not None: |
|
49 | 49 | return token |
|
50 | 50 | |
|
51 | 51 | print("Please enter your github username and password. These are not " |
|
52 | 52 | "stored, only used to get an oAuth token. You can revoke this at " |
|
53 | 53 | "any time on Github.") |
|
54 | 54 | user = input("Username: ") |
|
55 | 55 | pw = getpass.getpass("Password: ") |
|
56 | 56 | |
|
57 | 57 | auth_request = { |
|
58 | 58 | "scopes": [ |
|
59 | 59 | "public_repo", |
|
60 | 60 | "gist" |
|
61 | 61 | ], |
|
62 | 62 | "note": "IPython tools", |
|
63 | 63 | "note_url": "https://github.com/ipython/ipython/tree/master/tools", |
|
64 | 64 | } |
|
65 | 65 | response = requests.post('https://api.github.com/authorizations', |
|
66 | 66 | auth=(user, pw), data=json.dumps(auth_request)) |
|
67 | 67 | if response.status_code == 401 and response.headers.get('X-GitHub-OTP') == 'required; sms': |
|
68 | 68 | print("Your login API resquest a SMS one time password") |
|
69 | 69 | sms_pw = getpass.getpass("SMS password: ") |
|
70 | 70 | response = requests.post('https://api.github.com/authorizations', |
|
71 | 71 | auth=(user, pw), |
|
72 | 72 | data=json.dumps(auth_request), |
|
73 | 73 | headers={'X-GitHub-OTP':sms_pw}) |
|
74 | 74 | response.raise_for_status() |
|
75 | 75 | token = json.loads(response.text)['token'] |
|
76 | 76 | keyring.set_password('github', fake_username, token) |
|
77 | 77 | return token |
|
78 | 78 | |
|
79 | 79 | def make_auth_header(): |
|
80 | 80 | return {'Authorization': 'token ' + get_auth_token()} |
|
81 | 81 | |
|
82 | 82 | def post_issue_comment(project, num, body): |
|
83 | 83 | url = 'https://api.github.com/repos/{project}/issues/{num}/comments'.format(project=project, num=num) |
|
84 | 84 | payload = json.dumps({'body': body}) |
|
85 | 85 | requests.post(url, data=payload, headers=make_auth_header()) |
|
86 | 86 | |
|
87 | 87 | def post_gist(content, description='', filename='file', auth=False): |
|
88 | 88 | """Post some text to a Gist, and return the URL.""" |
|
89 | 89 | post_data = json.dumps({ |
|
90 | 90 | "description": description, |
|
91 | 91 | "public": True, |
|
92 | 92 | "files": { |
|
93 | 93 | filename: { |
|
94 | 94 | "content": content |
|
95 | 95 | } |
|
96 | 96 | } |
|
97 | 97 | }).encode('utf-8') |
|
98 | 98 | |
|
99 | 99 | headers = make_auth_header() if auth else {} |
|
100 | 100 | response = requests.post("https://api.github.com/gists", data=post_data, headers=headers) |
|
101 | 101 | response.raise_for_status() |
|
102 | 102 | response_data = json.loads(response.text) |
|
103 | 103 | return response_data['html_url'] |
|
104 | 104 | |
|
105 | 105 | def get_pull_request(project, num, auth=False): |
|
106 | 106 | """get pull request info by number |
|
107 | 107 | """ |
|
108 | 108 | url = "https://api.github.com/repos/{project}/pulls/{num}".format(project=project, num=num) |
|
109 | 109 | if auth: |
|
110 | 110 | header = make_auth_header() |
|
111 | 111 | else: |
|
112 | 112 | header = None |
|
113 | 113 | print("fetching %s" % url, file=sys.stderr) |
|
114 | 114 | response = requests.get(url, headers=header) |
|
115 | 115 | response.raise_for_status() |
|
116 | 116 | return json.loads(response.text, object_hook=Obj) |
|
117 | 117 | |
|
118 | 118 | def get_pull_request_files(project, num, auth=False): |
|
119 | 119 | """get list of files in a pull request""" |
|
120 | 120 | url = "https://api.github.com/repos/{project}/pulls/{num}/files".format(project=project, num=num) |
|
121 | 121 | if auth: |
|
122 | 122 | header = make_auth_header() |
|
123 | 123 | else: |
|
124 | 124 | header = None |
|
125 | 125 | return get_paged_request(url, headers=header) |
|
126 | 126 | |
|
127 | 127 | element_pat = re.compile(r'<(.+?)>') |
|
128 | 128 | rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]') |
|
129 | 129 | |
|
130 | 130 | def get_paged_request(url, headers=None, **params): |
|
131 | 131 | """get a full list, handling APIv3's paging""" |
|
132 | 132 | results = [] |
|
133 | 133 | params.setdefault("per_page", 100) |
|
134 | 134 | while True: |
|
135 | 135 | if '?' in url: |
|
136 | 136 | params = None |
|
137 | 137 | print("fetching %s" % url, file=sys.stderr) |
|
138 | 138 | else: |
|
139 | 139 | print("fetching %s with %s" % (url, params), file=sys.stderr) |
|
140 | 140 | response = requests.get(url, headers=headers, params=params) |
|
141 | 141 | response.raise_for_status() |
|
142 | 142 | results.extend(response.json()) |
|
143 | 143 | if 'next' in response.links: |
|
144 | 144 | url = response.links['next']['url'] |
|
145 | 145 | else: |
|
146 | 146 | break |
|
147 | 147 | return results |
|
148 | 148 | |
|
149 | 149 | def get_pulls_list(project, auth=False, **params): |
|
150 | 150 | """get pull request list""" |
|
151 | 151 | params.setdefault("state", "closed") |
|
152 | 152 | url = "https://api.github.com/repos/{project}/pulls".format(project=project) |
|
153 | 153 | if auth: |
|
154 | 154 | headers = make_auth_header() |
|
155 | 155 | else: |
|
156 | 156 | headers = None |
|
157 | 157 | pages = get_paged_request(url, headers=headers, **params) |
|
158 | 158 | return pages |
|
159 | 159 | |
|
160 | 160 | def get_issues_list(project, auth=False, **params): |
|
161 | 161 | """get issues list""" |
|
162 | 162 | params.setdefault("state", "closed") |
|
163 | 163 | url = "https://api.github.com/repos/{project}/issues".format(project=project) |
|
164 | 164 | if auth: |
|
165 | 165 | headers = make_auth_header() |
|
166 | 166 | else: |
|
167 | 167 | headers = None |
|
168 | 168 | pages = get_paged_request(url, headers=headers, **params) |
|
169 | 169 | return pages |
|
170 | 170 | |
|
171 | 171 | def get_milestones(project, auth=False, **params): |
|
172 | 172 | params.setdefault('state', 'all') |
|
173 | 173 | url = "https://api.github.com/repos/{project}/milestones".format(project=project) |
|
174 | 174 | if auth: |
|
175 | 175 | headers = make_auth_header() |
|
176 | 176 | else: |
|
177 | 177 | headers = None |
|
178 | 178 | milestones = get_paged_request(url, headers=headers, **params) |
|
179 | 179 | return milestones |
|
180 | 180 | |
|
181 | 181 | def get_milestone_id(project, milestone, auth=False, **params): |
|
182 | 182 | milestones = get_milestones(project, auth=auth, **params) |
|
183 | 183 | for mstone in milestones: |
|
184 | 184 | if mstone['title'] == milestone: |
|
185 | 185 | return mstone['number'] |
|
186 | 186 | else: |
|
187 | 187 | raise ValueError("milestone %s not found" % milestone) |
|
188 | 188 | |
|
189 | 189 | def is_pull_request(issue): |
|
190 | 190 | """Return True if the given issue is a pull request.""" |
|
191 | 191 | return bool(issue.get('pull_request', {}).get('html_url', None)) |
|
192 | 192 | |
|
193 | 193 | def get_authors(pr): |
|
194 | 194 | print("getting authors for #%i" % pr['number'], file=sys.stderr) |
|
195 | 195 | h = make_auth_header() |
|
196 | 196 | r = requests.get(pr['commits_url'], headers=h) |
|
197 | 197 | r.raise_for_status() |
|
198 | 198 | commits = r.json() |
|
199 | 199 | authors = [] |
|
200 | 200 | for commit in commits: |
|
201 | 201 | author = commit['commit']['author'] |
|
202 | 202 | authors.append("%s <%s>" % (author['name'], author['email'])) |
|
203 | 203 | return authors |
|
204 | 204 | |
|
205 | 205 | # encode_multipart_formdata is from urllib3.filepost |
|
206 | 206 | # The only change is to iter_fields, to enforce S3's required key ordering |
|
207 | 207 | |
|
208 | 208 | def iter_fields(fields): |
|
209 | 209 | fields = fields.copy() |
|
210 | 210 | for key in ('key', 'acl', 'Filename', 'success_action_status', 'AWSAccessKeyId', |
|
211 | 211 | 'Policy', 'Signature', 'Content-Type', 'file'): |
|
212 | 212 | yield (key, fields.pop(key)) |
|
213 | 213 | for (k,v) in fields.items(): |
|
214 | 214 | yield k,v |
|
215 | 215 | |
|
216 | 216 | def encode_multipart_formdata(fields, boundary=None): |
|
217 | 217 | """ |
|
218 | 218 | Encode a dictionary of ``fields`` using the multipart/form-data mime format. |
|
219 | 219 | |
|
220 | 220 | :param fields: |
|
221 | 221 | Dictionary of fields or list of (key, value) field tuples. The key is |
|
222 | 222 | treated as the field name, and the value as the body of the form-data |
|
223 | 223 | bytes. If the value is a tuple of two elements, then the first element |
|
224 | 224 | is treated as the filename of the form-data section. |
|
225 | 225 | |
|
226 | 226 | Field names and filenames must be unicode. |
|
227 | 227 | |
|
228 | 228 | :param boundary: |
|
229 | 229 | If not specified, then a random boundary will be generated using |
|
230 | 230 | :func:`mimetools.choose_boundary`. |
|
231 | 231 | """ |
|
232 | 232 | # copy requests imports in here: |
|
233 | 233 | from io import BytesIO |
|
234 | 234 | from requests.packages.urllib3.filepost import ( |
|
235 | 235 | choose_boundary, six, writer, b, get_content_type |
|
236 | 236 | ) |
|
237 | 237 | body = BytesIO() |
|
238 | 238 | if boundary is None: |
|
239 | 239 | boundary = choose_boundary() |
|
240 | 240 | |
|
241 | 241 | for fieldname, value in iter_fields(fields): |
|
242 | 242 | body.write(b('--%s\r\n' % (boundary))) |
|
243 | 243 | |
|
244 | 244 | if isinstance(value, tuple): |
|
245 | 245 | filename, data = value |
|
246 | 246 | writer(body).write('Content-Disposition: form-data; name="%s"; ' |
|
247 | 247 | 'filename="%s"\r\n' % (fieldname, filename)) |
|
248 | 248 | body.write(b('Content-Type: %s\r\n\r\n' % |
|
249 | 249 | (get_content_type(filename)))) |
|
250 | 250 | else: |
|
251 | 251 | data = value |
|
252 | 252 | writer(body).write('Content-Disposition: form-data; name="%s"\r\n' |
|
253 | 253 | % (fieldname)) |
|
254 | 254 | body.write(b'Content-Type: text/plain\r\n\r\n') |
|
255 | 255 | |
|
256 | 256 | if isinstance(data, int): |
|
257 | 257 | data = str(data) # Backwards compatibility |
|
258 | 258 | if isinstance(data, six.text_type): |
|
259 | 259 | writer(body).write(data) |
|
260 | 260 | else: |
|
261 | 261 | body.write(data) |
|
262 | 262 | |
|
263 | 263 | body.write(b'\r\n') |
|
264 | 264 | |
|
265 | 265 | body.write(b('--%s--\r\n' % (boundary))) |
|
266 | 266 | |
|
267 | 267 | content_type = b('multipart/form-data; boundary=%s' % boundary) |
|
268 | 268 | |
|
269 | 269 | return body.getvalue(), content_type |
|
270 | 270 | |
|
271 | 271 | |
|
272 | 272 | def post_download(project, filename, name=None, description=""): |
|
273 | 273 | """Upload a file to the GitHub downloads area""" |
|
274 | 274 | if name is None: |
|
275 | 275 | name = os.path.basename(filename) |
|
276 | 276 | with open(filename, 'rb') as f: |
|
277 | 277 | filedata = f.read() |
|
278 | 278 | |
|
279 | 279 | url = "https://api.github.com/repos/{project}/downloads".format(project=project) |
|
280 | 280 | |
|
281 | 281 | payload = json.dumps(dict(name=name, size=len(filedata), |
|
282 | 282 | description=description)) |
|
283 | 283 | response = requests.post(url, data=payload, headers=make_auth_header()) |
|
284 | 284 | response.raise_for_status() |
|
285 | 285 | reply = json.loads(response.content) |
|
286 | 286 | s3_url = reply['s3_url'] |
|
287 | 287 | |
|
288 | 288 | fields = dict( |
|
289 | 289 | key=reply['path'], |
|
290 | 290 | acl=reply['acl'], |
|
291 | 291 | success_action_status=201, |
|
292 | 292 | Filename=reply['name'], |
|
293 | 293 | AWSAccessKeyId=reply['accesskeyid'], |
|
294 | 294 | Policy=reply['policy'], |
|
295 | 295 | Signature=reply['signature'], |
|
296 | 296 | file=(reply['name'], filedata), |
|
297 | 297 | ) |
|
298 | 298 | fields['Content-Type'] = reply['mime_type'] |
|
299 | 299 | data, content_type = encode_multipart_formdata(fields) |
|
300 | 300 | s3r = requests.post(s3_url, data=data, headers={'Content-Type': content_type}) |
|
301 | 301 | return s3r |
@@ -1,232 +1,231 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | """Simple tools to query github.com and gather stats about issues. |
|
3 | 3 | |
|
4 | 4 | To generate a report for IPython 2.0, run: |
|
5 | 5 | |
|
6 | 6 | python github_stats.py --milestone 2.0 --since-tag rel-1.0.0 |
|
7 | 7 | """ |
|
8 | 8 | #----------------------------------------------------------------------------- |
|
9 | 9 | # Imports |
|
10 | 10 | #----------------------------------------------------------------------------- |
|
11 | 11 | |
|
12 | 12 | from __future__ import print_function |
|
13 | 13 | |
|
14 | 14 | import codecs |
|
15 | 15 | import sys |
|
16 | 16 | |
|
17 | 17 | from argparse import ArgumentParser |
|
18 | 18 | from datetime import datetime, timedelta |
|
19 | 19 | from subprocess import check_output |
|
20 | 20 | |
|
21 | 21 | from gh_api import ( |
|
22 | 22 | get_paged_request, make_auth_header, get_pull_request, is_pull_request, |
|
23 | 23 | get_milestone_id, get_issues_list, get_authors, |
|
24 | 24 | ) |
|
25 | 25 | #----------------------------------------------------------------------------- |
|
26 | 26 | # Globals |
|
27 | 27 | #----------------------------------------------------------------------------- |
|
28 | 28 | |
|
29 | 29 | ISO8601 = "%Y-%m-%dT%H:%M:%SZ" |
|
30 | 30 | PER_PAGE = 100 |
|
31 | 31 | |
|
32 | 32 | #----------------------------------------------------------------------------- |
|
33 | 33 | # Functions |
|
34 | 34 | #----------------------------------------------------------------------------- |
|
35 | 35 | |
|
36 | 36 | def round_hour(dt): |
|
37 | 37 | return dt.replace(minute=0,second=0,microsecond=0) |
|
38 | 38 | |
|
39 | 39 | def _parse_datetime(s): |
|
40 | 40 | """Parse dates in the format returned by the Github API.""" |
|
41 | 41 | if s: |
|
42 | 42 | return datetime.strptime(s, ISO8601) |
|
43 | 43 | else: |
|
44 | 44 | return datetime.fromtimestamp(0) |
|
45 | 45 | |
|
46 | 46 | def issues2dict(issues): |
|
47 | 47 | """Convert a list of issues to a dict, keyed by issue number.""" |
|
48 | 48 | idict = {} |
|
49 | 49 | for i in issues: |
|
50 | 50 | idict[i['number']] = i |
|
51 | 51 | return idict |
|
52 | 52 | |
|
53 | 53 | def split_pulls(all_issues, project="ipython/ipython"): |
|
54 | 54 | """split a list of closed issues into non-PR Issues and Pull Requests""" |
|
55 | 55 | pulls = [] |
|
56 | 56 | issues = [] |
|
57 | 57 | for i in all_issues: |
|
58 | 58 | if is_pull_request(i): |
|
59 | 59 | pull = get_pull_request(project, i['number'], auth=True) |
|
60 | 60 | pulls.append(pull) |
|
61 | 61 | else: |
|
62 | 62 | issues.append(i) |
|
63 | 63 | return issues, pulls |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | def issues_closed_since(period=timedelta(days=365), project="ipython/ipython", pulls=False): |
|
67 | 67 | """Get all issues closed since a particular point in time. period |
|
68 | 68 | can either be a datetime object, or a timedelta object. In the |
|
69 | 69 | latter case, it is used as a time before the present. |
|
70 | 70 | """ |
|
71 | 71 | |
|
72 | 72 | which = 'pulls' if pulls else 'issues' |
|
73 | 73 | |
|
74 | 74 | if isinstance(period, timedelta): |
|
75 | 75 | since = round_hour(datetime.utcnow() - period) |
|
76 | 76 | else: |
|
77 | 77 | since = period |
|
78 | 78 | url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, since.strftime(ISO8601), PER_PAGE) |
|
79 | 79 | allclosed = get_paged_request(url, headers=make_auth_header()) |
|
80 | 80 | |
|
81 | 81 | filtered = [ i for i in allclosed if _parse_datetime(i['closed_at']) > since ] |
|
82 | 82 | if pulls: |
|
83 | 83 | filtered = [ i for i in filtered if _parse_datetime(i['merged_at']) > since ] |
|
84 | 84 | # filter out PRs not against master (backports) |
|
85 | 85 | filtered = [ i for i in filtered if i['base']['ref'] == 'master' ] |
|
86 | 86 | else: |
|
87 | 87 | filtered = [ i for i in filtered if not is_pull_request(i) ] |
|
88 | 88 | |
|
89 | 89 | return filtered |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | def sorted_by_field(issues, field='closed_at', reverse=False): |
|
93 | 93 | """Return a list of issues sorted by closing date date.""" |
|
94 | 94 | return sorted(issues, key = lambda i:i[field], reverse=reverse) |
|
95 | 95 | |
|
96 | 96 | |
|
97 | 97 | def report(issues, show_urls=False): |
|
98 | 98 | """Summary report about a list of issues, printing number and title.""" |
|
99 | 99 | if show_urls: |
|
100 | 100 | for i in issues: |
|
101 | 101 | role = 'ghpull' if 'merged_at' in i else 'ghissue' |
|
102 | 102 | print(u'* :%s:`%d`: %s' % (role, i['number'], |
|
103 | 103 | i['title'].replace(u'`', u'``'))) |
|
104 | 104 | else: |
|
105 | 105 | for i in issues: |
|
106 | 106 | print(u'* %d: %s' % (i['number'], i['title'].replace(u'`', u'``'))) |
|
107 | 107 | |
|
108 | 108 | #----------------------------------------------------------------------------- |
|
109 | 109 | # Main script |
|
110 | 110 | #----------------------------------------------------------------------------- |
|
111 | 111 | |
|
112 | 112 | if __name__ == "__main__": |
|
113 | 113 | # deal with unicode |
|
114 | 114 | if sys.version_info < (3,): |
|
115 | 115 | sys.stdout = codecs.getwriter('utf8')(sys.stdout) |
|
116 | 116 | |
|
117 | 117 | # Whether to add reST urls for all issues in printout. |
|
118 | 118 | show_urls = True |
|
119 | 119 | |
|
120 | 120 | parser = ArgumentParser() |
|
121 | 121 | parser.add_argument('--since-tag', type=str, |
|
122 | 122 | help="The git tag to use for the starting point (typically the last major release)." |
|
123 | 123 | ) |
|
124 | 124 | parser.add_argument('--milestone', type=str, |
|
125 | 125 | help="The GitHub milestone to use for filtering issues [optional]." |
|
126 | 126 | ) |
|
127 | 127 | parser.add_argument('--days', type=int, |
|
128 | 128 | help="The number of days of data to summarize (use this or --since-tag)." |
|
129 | 129 | ) |
|
130 | 130 | parser.add_argument('--project', type=str, default="ipython/ipython", |
|
131 | 131 | help="The project to summarize." |
|
132 | 132 | ) |
|
133 | 133 | parser.add_argument('--links', action='store_true', default=False, |
|
134 | 134 | help="Include links to all closed Issues and PRs in the output." |
|
135 | 135 | ) |
|
136 | 136 | |
|
137 | 137 | opts = parser.parse_args() |
|
138 | 138 | tag = opts.since_tag |
|
139 | 139 | |
|
140 | 140 | # set `since` from days or git tag |
|
141 | 141 | if opts.days: |
|
142 | 142 | since = datetime.utcnow() - timedelta(days=opts.days) |
|
143 | 143 | else: |
|
144 | 144 | if not tag: |
|
145 | 145 | tag = check_output(['git', 'describe', '--abbrev=0']).strip().decode('utf8') |
|
146 | 146 | cmd = ['git', 'log', '-1', '--format=%ai', tag] |
|
147 | 147 | tagday, tz = check_output(cmd).strip().decode('utf8').rsplit(' ', 1) |
|
148 | 148 | since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S") |
|
149 | 149 | h = int(tz[1:3]) |
|
150 | 150 | m = int(tz[3:]) |
|
151 | 151 | td = timedelta(hours=h, minutes=m) |
|
152 | 152 | if tz[0] == '-': |
|
153 | 153 | since += td |
|
154 | 154 | else: |
|
155 | 155 | since -= td |
|
156 | 156 | |
|
157 | 157 | since = round_hour(since) |
|
158 | 158 | |
|
159 | 159 | milestone = opts.milestone |
|
160 | 160 | project = opts.project |
|
161 | 161 | |
|
162 | 162 | print("fetching GitHub stats since %s (tag: %s, milestone: %s)" % (since, tag, milestone), file=sys.stderr) |
|
163 | 163 | if milestone: |
|
164 | 164 | milestone_id = get_milestone_id(project=project, milestone=milestone, |
|
165 | 165 | auth=True) |
|
166 | 166 | issues_and_pulls = get_issues_list(project=project, |
|
167 | 167 | milestone=milestone_id, |
|
168 | 168 | state='closed', |
|
169 | 169 | auth=True, |
|
170 | 170 | ) |
|
171 | 171 | issues, pulls = split_pulls(issues_and_pulls) |
|
172 | 172 | else: |
|
173 | 173 | issues = issues_closed_since(since, project=project, pulls=False) |
|
174 | 174 | pulls = issues_closed_since(since, project=project, pulls=True) |
|
175 | 175 | |
|
176 | 176 | # For regular reports, it's nice to show them in reverse chronological order |
|
177 | 177 | issues = sorted_by_field(issues, reverse=True) |
|
178 | 178 | pulls = sorted_by_field(pulls, reverse=True) |
|
179 | 179 | |
|
180 | 180 | n_issues, n_pulls = map(len, (issues, pulls)) |
|
181 | 181 | n_total = n_issues + n_pulls |
|
182 | 182 | |
|
183 | 183 | # Print summary report we can directly include into release notes. |
|
184 | 184 | |
|
185 | 185 | print() |
|
186 | 186 | since_day = since.strftime("%Y/%m/%d") |
|
187 | 187 | today = datetime.today().strftime("%Y/%m/%d") |
|
188 | 188 | print("GitHub stats for %s - %s (tag: %s)" % (since_day, today, tag)) |
|
189 | 189 | print() |
|
190 | 190 | print("These lists are automatically generated, and may be incomplete or contain duplicates.") |
|
191 | 191 | print() |
|
192 | 192 | |
|
193 | 193 | ncommits = 0 |
|
194 | 194 | all_authors = [] |
|
195 | 195 | if tag: |
|
196 | 196 | # print git info, in addition to GitHub info: |
|
197 | 197 | since_tag = tag+'..' |
|
198 | 198 | cmd = ['git', 'log', '--oneline', since_tag] |
|
199 | 199 | ncommits += len(check_output(cmd).splitlines()) |
|
200 | 200 | |
|
201 | 201 | author_cmd = ['git', 'log', '--use-mailmap', "--format=* %aN", since_tag] |
|
202 | 202 | all_authors.extend(check_output(author_cmd).decode('utf-8', 'replace').splitlines()) |
|
203 | 203 | |
|
204 | 204 | pr_authors = [] |
|
205 | 205 | for pr in pulls: |
|
206 | 206 | pr_authors.extend(get_authors(pr)) |
|
207 | 207 | ncommits = len(pr_authors) + ncommits - len(pulls) |
|
208 | 208 | author_cmd = ['git', 'check-mailmap'] + pr_authors |
|
209 | 209 | with_email = check_output(author_cmd).decode('utf-8', 'replace').splitlines() |
|
210 | 210 | all_authors.extend([ u'* ' + a.split(' <')[0] for a in with_email ]) |
|
211 | 211 | unique_authors = sorted(set(all_authors), key=lambda s: s.lower()) |
|
212 | 212 | |
|
213 | 213 | print("We closed %d issues and merged %d pull requests." % (n_issues, n_pulls)) |
|
214 | 214 | if milestone: |
|
215 |
print("The full list can be seen `on GitHub <https://github.com/ |
|
|
216 | % (project, milestone) | |
|
215 | print("The full list can be seen `on GitHub <https://github.com/{project}/issues?q=milestone%3A{milestone}+>`__".format(project=project,milestone=milestone) | |
|
217 | 216 | ) |
|
218 | 217 | |
|
219 | 218 | print() |
|
220 | 219 | print("The following %i authors contributed %i commits." % (len(unique_authors), ncommits)) |
|
221 | 220 | print() |
|
222 | 221 | print('\n'.join(unique_authors)) |
|
223 | 222 | |
|
224 | 223 | if opts.links: |
|
225 | 224 | print() |
|
226 | 225 | print("GitHub issues and pull requests:") |
|
227 | 226 | print() |
|
228 | 227 | print('Pull Requests (%d):\n' % n_pulls) |
|
229 | 228 | report(pulls, show_urls) |
|
230 | 229 | print() |
|
231 | 230 | print('Issues (%d):\n' % n_issues) |
|
232 | 231 | report(issues, show_urls) |
General Comments 0
You need to be logged in to leave comments.
Login now