##// END OF EJS Templates
cleanup ununsed tools...
Matthias Bussonnier -
Show More
1 NO CONTENT: modified file chmod 100755 => 100644
NO CONTENT: modified file chmod 100755 => 100644
@@ -1,304 +1,163 b''
1 """Functions for Github API requests."""
1 """Functions for Github API requests."""
2
2
3 try:
3 try:
4 input = raw_input
4 input = raw_input
5 except NameError:
5 except NameError:
6 pass
6 pass
7
7
8 import os
9 import re
8 import re
10 import sys
9 import sys
11
10
12 import requests
11 import requests
13 import getpass
12 import getpass
14 import json
13 import json
15 from pathlib import Path
16
14
17 try:
15 try:
18 import requests_cache
16 import requests_cache
19 except ImportError:
17 except ImportError:
20 print("cache not available, install `requests_cache` for caching.", file=sys.stderr)
18 print("cache not available, install `requests_cache` for caching.", file=sys.stderr)
21 else:
19 else:
22 requests_cache.install_cache("gh_api", expire_after=3600)
20 requests_cache.install_cache("gh_api", expire_after=3600)
23
21
24 # Keyring stores passwords by a 'username', but we're not storing a username and
22 # Keyring stores passwords by a 'username', but we're not storing a username and
25 # password
23 # password
26 import socket
24 import socket
27 fake_username = 'ipython_tools_%s' % socket.gethostname().replace('.','_').replace('-','_')
25 fake_username = 'ipython_tools_%s' % socket.gethostname().replace('.','_').replace('-','_')
28
26
29 class Obj(dict):
27 class Obj(dict):
30 """Dictionary with attribute access to names."""
28 """Dictionary with attribute access to names."""
31 def __getattr__(self, name):
29 def __getattr__(self, name):
32 try:
30 try:
33 return self[name]
31 return self[name]
34 except KeyError as e:
32 except KeyError as e:
35 raise AttributeError(name) from e
33 raise AttributeError(name) from e
36
34
37 def __setattr__(self, name, val):
35 def __setattr__(self, name, val):
38 self[name] = val
36 self[name] = val
39
37
40 token = None
38 token = None
41 def get_auth_token():
39 def get_auth_token():
42 global token
40 global token
43
41
44 if token is not None:
42 if token is not None:
45 return token
43 return token
46
44
47 import keyring
45 import keyring
48 token = keyring.get_password('github', fake_username)
46 token = keyring.get_password('github', fake_username)
49 if token is not None:
47 if token is not None:
50 return token
48 return token
51
49
52 print("Please enter your github username and password. These are not "
50 print("Please enter your github username and password. These are not "
53 "stored, only used to get an oAuth token. You can revoke this at "
51 "stored, only used to get an oAuth token. You can revoke this at "
54 "any time on Github.\n"
52 "any time on Github.\n"
55 "Username: ", file=sys.stderr, end='')
53 "Username: ", file=sys.stderr, end='')
56 user = input('')
54 user = input('')
57 pw = getpass.getpass("Password: ", stream=sys.stderr)
55 pw = getpass.getpass("Password: ", stream=sys.stderr)
58
56
59 auth_request = {
57 auth_request = {
60 "scopes": [
58 "scopes": [
61 "public_repo",
59 "public_repo",
62 "gist"
60 "gist"
63 ],
61 ],
64 "note": "IPython tools %s" % socket.gethostname(),
62 "note": "IPython tools %s" % socket.gethostname(),
65 "note_url": "https://github.com/ipython/ipython/tree/master/tools",
63 "note_url": "https://github.com/ipython/ipython/tree/master/tools",
66 }
64 }
67 response = requests.post('https://api.github.com/authorizations',
65 response = requests.post('https://api.github.com/authorizations',
68 auth=(user, pw), data=json.dumps(auth_request))
66 auth=(user, pw), data=json.dumps(auth_request))
69 if response.status_code == 401 and \
67 if response.status_code == 401 and \
70 'required;' in response.headers.get('X-GitHub-OTP', ''):
68 'required;' in response.headers.get('X-GitHub-OTP', ''):
71 print("Your login API requested a one time password", file=sys.stderr)
69 print("Your login API requested a one time password", file=sys.stderr)
72 otp = getpass.getpass("One Time Password: ", stream=sys.stderr)
70 otp = getpass.getpass("One Time Password: ", stream=sys.stderr)
73 response = requests.post('https://api.github.com/authorizations',
71 response = requests.post('https://api.github.com/authorizations',
74 auth=(user, pw),
72 auth=(user, pw),
75 data=json.dumps(auth_request),
73 data=json.dumps(auth_request),
76 headers={'X-GitHub-OTP':otp})
74 headers={'X-GitHub-OTP':otp})
77 response.raise_for_status()
75 response.raise_for_status()
78 token = json.loads(response.text)['token']
76 token = json.loads(response.text)['token']
79 keyring.set_password('github', fake_username, token)
77 keyring.set_password('github', fake_username, token)
80 return token
78 return token
81
79
82 def make_auth_header():
80 def make_auth_header():
83 return {'Authorization': 'token ' + get_auth_token()}
81 return {'Authorization': 'token ' + get_auth_token()}
84
82
85 def post_issue_comment(project, num, body):
86 url = 'https://api.github.com/repos/{project}/issues/{num}/comments'.format(project=project, num=num)
87 payload = json.dumps({'body': body})
88 requests.post(url, data=payload, headers=make_auth_header())
89
90 def post_gist(content, description='', filename='file', auth=False):
91 """Post some text to a Gist, and return the URL."""
92 post_data = json.dumps({
93 "description": description,
94 "public": True,
95 "files": {
96 filename: {
97 "content": content
98 }
99 }
100 }).encode('utf-8')
101
102 headers = make_auth_header() if auth else {}
103 response = requests.post("https://api.github.com/gists", data=post_data, headers=headers)
104 response.raise_for_status()
105 response_data = json.loads(response.text)
106 return response_data['html_url']
107
83
108 def get_pull_request(project, num, auth=False):
84 def get_pull_request(project, num, auth=False):
109 """get pull request info by number
85 """get pull request info by number
110 """
86 """
111 url = "https://api.github.com/repos/{project}/pulls/{num}".format(project=project, num=num)
87 url = "https://api.github.com/repos/{project}/pulls/{num}".format(project=project, num=num)
112 if auth:
88 if auth:
113 header = make_auth_header()
89 header = make_auth_header()
114 else:
90 else:
115 header = None
91 header = None
116 print("fetching %s" % url, file=sys.stderr)
92 print("fetching %s" % url, file=sys.stderr)
117 response = requests.get(url, headers=header)
93 response = requests.get(url, headers=header)
118 response.raise_for_status()
94 response.raise_for_status()
119 return json.loads(response.text, object_hook=Obj)
95 return json.loads(response.text, object_hook=Obj)
120
96
121 def get_pull_request_files(project, num, auth=False):
122 """get list of files in a pull request"""
123 url = "https://api.github.com/repos/{project}/pulls/{num}/files".format(project=project, num=num)
124 if auth:
125 header = make_auth_header()
126 else:
127 header = None
128 return get_paged_request(url, headers=header)
129
130 element_pat = re.compile(r'<(.+?)>')
97 element_pat = re.compile(r'<(.+?)>')
131 rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]')
98 rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]')
132
99
133 def get_paged_request(url, headers=None, **params):
100 def get_paged_request(url, headers=None, **params):
134 """get a full list, handling APIv3's paging"""
101 """get a full list, handling APIv3's paging"""
135 results = []
102 results = []
136 params.setdefault("per_page", 100)
103 params.setdefault("per_page", 100)
137 while True:
104 while True:
138 if '?' in url:
105 if '?' in url:
139 params = None
106 params = None
140 print("fetching %s" % url, file=sys.stderr)
107 print("fetching %s" % url, file=sys.stderr)
141 else:
108 else:
142 print("fetching %s with %s" % (url, params), file=sys.stderr)
109 print("fetching %s with %s" % (url, params), file=sys.stderr)
143 response = requests.get(url, headers=headers, params=params)
110 response = requests.get(url, headers=headers, params=params)
144 response.raise_for_status()
111 response.raise_for_status()
145 results.extend(response.json())
112 results.extend(response.json())
146 if 'next' in response.links:
113 if 'next' in response.links:
147 url = response.links['next']['url']
114 url = response.links['next']['url']
148 else:
115 else:
149 break
116 break
150 return results
117 return results
151
118
152 def get_pulls_list(project, auth=False, **params):
153 """get pull request list"""
154 params.setdefault("state", "closed")
155 url = "https://api.github.com/repos/{project}/pulls".format(project=project)
156 if auth:
157 headers = make_auth_header()
158 else:
159 headers = None
160 pages = get_paged_request(url, headers=headers, **params)
161 return pages
162
163 def get_issues_list(project, auth=False, **params):
119 def get_issues_list(project, auth=False, **params):
164 """get issues list"""
120 """get issues list"""
165 params.setdefault("state", "closed")
121 params.setdefault("state", "closed")
166 url = "https://api.github.com/repos/{project}/issues".format(project=project)
122 url = "https://api.github.com/repos/{project}/issues".format(project=project)
167 if auth:
123 if auth:
168 headers = make_auth_header()
124 headers = make_auth_header()
169 else:
125 else:
170 headers = None
126 headers = None
171 pages = get_paged_request(url, headers=headers, **params)
127 pages = get_paged_request(url, headers=headers, **params)
172 return pages
128 return pages
173
129
174 def get_milestones(project, auth=False, **params):
130 def get_milestones(project, auth=False, **params):
175 params.setdefault('state', 'all')
131 params.setdefault('state', 'all')
176 url = "https://api.github.com/repos/{project}/milestones".format(project=project)
132 url = "https://api.github.com/repos/{project}/milestones".format(project=project)
177 if auth:
133 if auth:
178 headers = make_auth_header()
134 headers = make_auth_header()
179 else:
135 else:
180 headers = None
136 headers = None
181 milestones = get_paged_request(url, headers=headers, **params)
137 milestones = get_paged_request(url, headers=headers, **params)
182 return milestones
138 return milestones
183
139
184 def get_milestone_id(project, milestone, auth=False, **params):
140 def get_milestone_id(project, milestone, auth=False, **params):
185 milestones = get_milestones(project, auth=auth, **params)
141 milestones = get_milestones(project, auth=auth, **params)
186 for mstone in milestones:
142 for mstone in milestones:
187 if mstone['title'] == milestone:
143 if mstone['title'] == milestone:
188 return mstone['number']
144 return mstone['number']
189 else:
145 else:
190 raise ValueError("milestone %s not found" % milestone)
146 raise ValueError("milestone %s not found" % milestone)
191
147
192 def is_pull_request(issue):
148 def is_pull_request(issue):
193 """Return True if the given issue is a pull request."""
149 """Return True if the given issue is a pull request."""
194 return bool(issue.get('pull_request', {}).get('html_url', None))
150 return bool(issue.get('pull_request', {}).get('html_url', None))
195
151
196 def get_authors(pr):
152 def get_authors(pr):
197 print("getting authors for #%i" % pr['number'], file=sys.stderr)
153 print("getting authors for #%i" % pr['number'], file=sys.stderr)
198 h = make_auth_header()
154 h = make_auth_header()
199 r = requests.get(pr['commits_url'], headers=h)
155 r = requests.get(pr['commits_url'], headers=h)
200 r.raise_for_status()
156 r.raise_for_status()
201 commits = r.json()
157 commits = r.json()
202 authors = []
158 authors = []
203 for commit in commits:
159 for commit in commits:
204 author = commit['commit']['author']
160 author = commit['commit']['author']
205 authors.append("%s <%s>" % (author['name'], author['email']))
161 authors.append("%s <%s>" % (author['name'], author['email']))
206 return authors
162 return authors
207
163
208 # encode_multipart_formdata is from urllib3.filepost
209 # The only change is to iter_fields, to enforce S3's required key ordering
210
211 def iter_fields(fields):
212 fields = fields.copy()
213 for key in ('key', 'acl', 'Filename', 'success_action_status', 'AWSAccessKeyId',
214 'Policy', 'Signature', 'Content-Type', 'file'):
215 yield (key, fields.pop(key))
216 for (k,v) in fields.items():
217 yield k,v
218
219 def encode_multipart_formdata(fields, boundary=None):
220 """
221 Encode a dictionary of ``fields`` using the multipart/form-data mime format.
222
223 :param fields:
224 Dictionary of fields or list of (key, value) field tuples. The key is
225 treated as the field name, and the value as the body of the form-data
226 bytes. If the value is a tuple of two elements, then the first element
227 is treated as the filename of the form-data section.
228
229 Field names and filenames must be unicode.
230
231 :param boundary:
232 If not specified, then a random boundary will be generated using
233 :func:`mimetools.choose_boundary`.
234 """
235 # copy requests imports in here:
236 from io import BytesIO
237 from requests.packages.urllib3.filepost import (
238 choose_boundary, six, writer, b, get_content_type
239 )
240 body = BytesIO()
241 if boundary is None:
242 boundary = choose_boundary()
243
244 for fieldname, value in iter_fields(fields):
245 body.write(b('--%s\r\n' % (boundary)))
246
247 if isinstance(value, tuple):
248 filename, data = value
249 writer(body).write('Content-Disposition: form-data; name="%s"; '
250 'filename="%s"\r\n' % (fieldname, filename))
251 body.write(b('Content-Type: %s\r\n\r\n' %
252 (get_content_type(filename))))
253 else:
254 data = value
255 writer(body).write('Content-Disposition: form-data; name="%s"\r\n'
256 % (fieldname))
257 body.write(b'Content-Type: text/plain\r\n\r\n')
258
259 if isinstance(data, int):
260 data = str(data) # Backwards compatibility
261 if isinstance(data, six.text_type):
262 writer(body).write(data)
263 else:
264 body.write(data)
265
266 body.write(b'\r\n')
267
268 body.write(b('--%s--\r\n' % (boundary)))
269
270 content_type = b('multipart/form-data; boundary=%s' % boundary)
271
272 return body.getvalue(), content_type
273
274
275 def post_download(project, filename, name=None, description=""):
276 """Upload a file to the GitHub downloads area"""
277 if name is None:
278 name = Path(filename).name
279 with open(filename, 'rb') as f:
280 filedata = f.read()
281
282 url = "https://api.github.com/repos/{project}/downloads".format(project=project)
283
284 payload = json.dumps(dict(name=name, size=len(filedata),
285 description=description))
286 response = requests.post(url, data=payload, headers=make_auth_header())
287 response.raise_for_status()
288 reply = json.loads(response.content)
289 s3_url = reply['s3_url']
290
291 fields = dict(
292 key=reply['path'],
293 acl=reply['acl'],
294 success_action_status=201,
295 Filename=reply['name'],
296 AWSAccessKeyId=reply['accesskeyid'],
297 Policy=reply['policy'],
298 Signature=reply['signature'],
299 file=(reply['name'], filedata),
300 )
301 fields['Content-Type'] = reply['mime_type']
302 data, content_type = encode_multipart_formdata(fields)
303 s3r = requests.post(s3_url, data=data, headers={'Content-Type': content_type})
304 return s3r
@@ -1,231 +1,230 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 """Simple tools to query github.com and gather stats about issues.
2 """Simple tools to query github.com and gather stats about issues.
3
3
4 To generate a report for IPython 2.0, run:
4 To generate a report for IPython 2.0, run:
5
5
6 python github_stats.py --milestone 2.0 --since-tag rel-1.0.0
6 python github_stats.py --milestone 2.0 --since-tag rel-1.0.0
7 """
7 """
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9 # Imports
9 # Imports
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11
11
12
12
13 import codecs
14 import sys
13 import sys
15
14
16 from argparse import ArgumentParser
15 from argparse import ArgumentParser
17 from datetime import datetime, timedelta
16 from datetime import datetime, timedelta
18 from subprocess import check_output
17 from subprocess import check_output
19
18
20 from gh_api import (
19 from gh_api import (
21 get_paged_request, make_auth_header, get_pull_request, is_pull_request,
20 get_paged_request, make_auth_header, get_pull_request, is_pull_request,
22 get_milestone_id, get_issues_list, get_authors,
21 get_milestone_id, get_issues_list, get_authors,
23 )
22 )
24 #-----------------------------------------------------------------------------
23 #-----------------------------------------------------------------------------
25 # Globals
24 # Globals
26 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
27
26
28 ISO8601 = "%Y-%m-%dT%H:%M:%SZ"
27 ISO8601 = "%Y-%m-%dT%H:%M:%SZ"
29 PER_PAGE = 100
28 PER_PAGE = 100
30
29
31 #-----------------------------------------------------------------------------
30 #-----------------------------------------------------------------------------
32 # Functions
31 # Functions
33 #-----------------------------------------------------------------------------
32 #-----------------------------------------------------------------------------
34
33
35 def round_hour(dt):
34 def round_hour(dt):
36 return dt.replace(minute=0,second=0,microsecond=0)
35 return dt.replace(minute=0,second=0,microsecond=0)
37
36
38 def _parse_datetime(s):
37 def _parse_datetime(s):
39 """Parse dates in the format returned by the Github API."""
38 """Parse dates in the format returned by the Github API."""
40 if s:
39 if s:
41 return datetime.strptime(s, ISO8601)
40 return datetime.strptime(s, ISO8601)
42 else:
41 else:
43 return datetime.fromtimestamp(0)
42 return datetime.fromtimestamp(0)
44
43
45 def issues2dict(issues):
44 def issues2dict(issues):
46 """Convert a list of issues to a dict, keyed by issue number."""
45 """Convert a list of issues to a dict, keyed by issue number."""
47 idict = {}
46 idict = {}
48 for i in issues:
47 for i in issues:
49 idict[i['number']] = i
48 idict[i['number']] = i
50 return idict
49 return idict
51
50
52 def split_pulls(all_issues, project="ipython/ipython"):
51 def split_pulls(all_issues, project="ipython/ipython"):
53 """split a list of closed issues into non-PR Issues and Pull Requests"""
52 """split a list of closed issues into non-PR Issues and Pull Requests"""
54 pulls = []
53 pulls = []
55 issues = []
54 issues = []
56 for i in all_issues:
55 for i in all_issues:
57 if is_pull_request(i):
56 if is_pull_request(i):
58 pull = get_pull_request(project, i['number'], auth=True)
57 pull = get_pull_request(project, i['number'], auth=True)
59 pulls.append(pull)
58 pulls.append(pull)
60 else:
59 else:
61 issues.append(i)
60 issues.append(i)
62 return issues, pulls
61 return issues, pulls
63
62
64
63
65 def issues_closed_since(period=timedelta(days=365), project="ipython/ipython", pulls=False):
64 def issues_closed_since(period=timedelta(days=365), project="ipython/ipython", pulls=False):
66 """Get all issues closed since a particular point in time. period
65 """Get all issues closed since a particular point in time. period
67 can either be a datetime object, or a timedelta object. In the
66 can either be a datetime object, or a timedelta object. In the
68 latter case, it is used as a time before the present.
67 latter case, it is used as a time before the present.
69 """
68 """
70
69
71 which = 'pulls' if pulls else 'issues'
70 which = 'pulls' if pulls else 'issues'
72
71
73 if isinstance(period, timedelta):
72 if isinstance(period, timedelta):
74 since = round_hour(datetime.utcnow() - period)
73 since = round_hour(datetime.utcnow() - period)
75 else:
74 else:
76 since = period
75 since = period
77 url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, since.strftime(ISO8601), PER_PAGE)
76 url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, since.strftime(ISO8601), PER_PAGE)
78 allclosed = get_paged_request(url, headers=make_auth_header())
77 allclosed = get_paged_request(url, headers=make_auth_header())
79
78
80 filtered = [ i for i in allclosed if _parse_datetime(i['closed_at']) > since ]
79 filtered = [ i for i in allclosed if _parse_datetime(i['closed_at']) > since ]
81 if pulls:
80 if pulls:
82 filtered = [ i for i in filtered if _parse_datetime(i['merged_at']) > since ]
81 filtered = [ i for i in filtered if _parse_datetime(i['merged_at']) > since ]
83 # filter out PRs not against master (backports)
82 # filter out PRs not against master (backports)
84 filtered = [ i for i in filtered if i['base']['ref'] == 'master' ]
83 filtered = [ i for i in filtered if i['base']['ref'] == 'master' ]
85 else:
84 else:
86 filtered = [ i for i in filtered if not is_pull_request(i) ]
85 filtered = [ i for i in filtered if not is_pull_request(i) ]
87
86
88 return filtered
87 return filtered
89
88
90
89
91 def sorted_by_field(issues, field='closed_at', reverse=False):
90 def sorted_by_field(issues, field='closed_at', reverse=False):
92 """Return a list of issues sorted by closing date date."""
91 """Return a list of issues sorted by closing date date."""
93 return sorted(issues, key = lambda i:i[field], reverse=reverse)
92 return sorted(issues, key = lambda i:i[field], reverse=reverse)
94
93
95
94
96 def report(issues, show_urls=False):
95 def report(issues, show_urls=False):
97 """Summary report about a list of issues, printing number and title."""
96 """Summary report about a list of issues, printing number and title."""
98 if show_urls:
97 if show_urls:
99 for i in issues:
98 for i in issues:
100 role = 'ghpull' if 'merged_at' in i else 'ghissue'
99 role = 'ghpull' if 'merged_at' in i else 'ghissue'
101 print(u'* :%s:`%d`: %s' % (role, i['number'],
100 print(u'* :%s:`%d`: %s' % (role, i['number'],
102 i['title'].replace(u'`', u'``')))
101 i['title'].replace(u'`', u'``')))
103 else:
102 else:
104 for i in issues:
103 for i in issues:
105 print(u'* %d: %s' % (i['number'], i['title'].replace(u'`', u'``')))
104 print(u'* %d: %s' % (i['number'], i['title'].replace(u'`', u'``')))
106
105
107 #-----------------------------------------------------------------------------
106 #-----------------------------------------------------------------------------
108 # Main script
107 # Main script
109 #-----------------------------------------------------------------------------
108 #-----------------------------------------------------------------------------
110
109
111 if __name__ == "__main__":
110 if __name__ == "__main__":
112
111
113 print("DEPRECATE: backport_pr.py is deprecated and it is now recommended"
112 print("DEPRECATE: backport_pr.py is deprecated and it is now recommended"
114 "to install `ghpro` from PyPI.", file=sys.stderr)
113 "to install `ghpro` from PyPI.", file=sys.stderr)
115
114
116
115
117 # Whether to add reST urls for all issues in printout.
116 # Whether to add reST urls for all issues in printout.
118 show_urls = True
117 show_urls = True
119
118
120 parser = ArgumentParser()
119 parser = ArgumentParser()
121 parser.add_argument('--since-tag', type=str,
120 parser.add_argument('--since-tag', type=str,
122 help="The git tag to use for the starting point (typically the last major release)."
121 help="The git tag to use for the starting point (typically the last major release)."
123 )
122 )
124 parser.add_argument('--milestone', type=str,
123 parser.add_argument('--milestone', type=str,
125 help="The GitHub milestone to use for filtering issues [optional]."
124 help="The GitHub milestone to use for filtering issues [optional]."
126 )
125 )
127 parser.add_argument('--days', type=int,
126 parser.add_argument('--days', type=int,
128 help="The number of days of data to summarize (use this or --since-tag)."
127 help="The number of days of data to summarize (use this or --since-tag)."
129 )
128 )
130 parser.add_argument('--project', type=str, default="ipython/ipython",
129 parser.add_argument('--project', type=str, default="ipython/ipython",
131 help="The project to summarize."
130 help="The project to summarize."
132 )
131 )
133 parser.add_argument('--links', action='store_true', default=False,
132 parser.add_argument('--links', action='store_true', default=False,
134 help="Include links to all closed Issues and PRs in the output."
133 help="Include links to all closed Issues and PRs in the output."
135 )
134 )
136
135
137 opts = parser.parse_args()
136 opts = parser.parse_args()
138 tag = opts.since_tag
137 tag = opts.since_tag
139
138
140 # set `since` from days or git tag
139 # set `since` from days or git tag
141 if opts.days:
140 if opts.days:
142 since = datetime.utcnow() - timedelta(days=opts.days)
141 since = datetime.utcnow() - timedelta(days=opts.days)
143 else:
142 else:
144 if not tag:
143 if not tag:
145 tag = check_output(['git', 'describe', '--abbrev=0']).strip().decode('utf8')
144 tag = check_output(['git', 'describe', '--abbrev=0']).strip().decode('utf8')
146 cmd = ['git', 'log', '-1', '--format=%ai', tag]
145 cmd = ['git', 'log', '-1', '--format=%ai', tag]
147 tagday, tz = check_output(cmd).strip().decode('utf8').rsplit(' ', 1)
146 tagday, tz = check_output(cmd).strip().decode('utf8').rsplit(' ', 1)
148 since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S")
147 since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S")
149 h = int(tz[1:3])
148 h = int(tz[1:3])
150 m = int(tz[3:])
149 m = int(tz[3:])
151 td = timedelta(hours=h, minutes=m)
150 td = timedelta(hours=h, minutes=m)
152 if tz[0] == '-':
151 if tz[0] == '-':
153 since += td
152 since += td
154 else:
153 else:
155 since -= td
154 since -= td
156
155
157 since = round_hour(since)
156 since = round_hour(since)
158
157
159 milestone = opts.milestone
158 milestone = opts.milestone
160 project = opts.project
159 project = opts.project
161
160
162 print("fetching GitHub stats since %s (tag: %s, milestone: %s)" % (since, tag, milestone), file=sys.stderr)
161 print("fetching GitHub stats since %s (tag: %s, milestone: %s)" % (since, tag, milestone), file=sys.stderr)
163 if milestone:
162 if milestone:
164 milestone_id = get_milestone_id(project=project, milestone=milestone,
163 milestone_id = get_milestone_id(project=project, milestone=milestone,
165 auth=True)
164 auth=True)
166 issues_and_pulls = get_issues_list(project=project,
165 issues_and_pulls = get_issues_list(project=project,
167 milestone=milestone_id,
166 milestone=milestone_id,
168 state='closed',
167 state='closed',
169 auth=True,
168 auth=True,
170 )
169 )
171 issues, pulls = split_pulls(issues_and_pulls, project=project)
170 issues, pulls = split_pulls(issues_and_pulls, project=project)
172 else:
171 else:
173 issues = issues_closed_since(since, project=project, pulls=False)
172 issues = issues_closed_since(since, project=project, pulls=False)
174 pulls = issues_closed_since(since, project=project, pulls=True)
173 pulls = issues_closed_since(since, project=project, pulls=True)
175
174
176 # For regular reports, it's nice to show them in reverse chronological order
175 # For regular reports, it's nice to show them in reverse chronological order
177 issues = sorted_by_field(issues, reverse=True)
176 issues = sorted_by_field(issues, reverse=True)
178 pulls = sorted_by_field(pulls, reverse=True)
177 pulls = sorted_by_field(pulls, reverse=True)
179
178
180 n_issues, n_pulls = map(len, (issues, pulls))
179 n_issues, n_pulls = map(len, (issues, pulls))
181 n_total = n_issues + n_pulls
180 n_total = n_issues + n_pulls
182
181
183 # Print summary report we can directly include into release notes.
182 # Print summary report we can directly include into release notes.
184
183
185 print()
184 print()
186 since_day = since.strftime("%Y/%m/%d")
185 since_day = since.strftime("%Y/%m/%d")
187 today = datetime.today().strftime("%Y/%m/%d")
186 today = datetime.today().strftime("%Y/%m/%d")
188 print("GitHub stats for %s - %s (tag: %s)" % (since_day, today, tag))
187 print("GitHub stats for %s - %s (tag: %s)" % (since_day, today, tag))
189 print()
188 print()
190 print("These lists are automatically generated, and may be incomplete or contain duplicates.")
189 print("These lists are automatically generated, and may be incomplete or contain duplicates.")
191 print()
190 print()
192
191
193 ncommits = 0
192 ncommits = 0
194 all_authors = []
193 all_authors = []
195 if tag:
194 if tag:
196 # print git info, in addition to GitHub info:
195 # print git info, in addition to GitHub info:
197 since_tag = tag+'..'
196 since_tag = tag+'..'
198 cmd = ['git', 'log', '--oneline', since_tag]
197 cmd = ['git', 'log', '--oneline', since_tag]
199 ncommits += len(check_output(cmd).splitlines())
198 ncommits += len(check_output(cmd).splitlines())
200
199
201 author_cmd = ['git', 'log', '--use-mailmap', "--format=* %aN", since_tag]
200 author_cmd = ['git', 'log', '--use-mailmap', "--format=* %aN", since_tag]
202 all_authors.extend(check_output(author_cmd).decode('utf-8', 'replace').splitlines())
201 all_authors.extend(check_output(author_cmd).decode('utf-8', 'replace').splitlines())
203
202
204 pr_authors = []
203 pr_authors = []
205 for pr in pulls:
204 for pr in pulls:
206 pr_authors.extend(get_authors(pr))
205 pr_authors.extend(get_authors(pr))
207 ncommits = len(pr_authors) + ncommits - len(pulls)
206 ncommits = len(pr_authors) + ncommits - len(pulls)
208 author_cmd = ['git', 'check-mailmap'] + pr_authors
207 author_cmd = ['git', 'check-mailmap'] + pr_authors
209 with_email = check_output(author_cmd).decode('utf-8', 'replace').splitlines()
208 with_email = check_output(author_cmd).decode('utf-8', 'replace').splitlines()
210 all_authors.extend([ u'* ' + a.split(' <')[0] for a in with_email ])
209 all_authors.extend([ u'* ' + a.split(' <')[0] for a in with_email ])
211 unique_authors = sorted(set(all_authors), key=lambda s: s.lower())
210 unique_authors = sorted(set(all_authors), key=lambda s: s.lower())
212
211
213 print("We closed %d issues and merged %d pull requests." % (n_issues, n_pulls))
212 print("We closed %d issues and merged %d pull requests." % (n_issues, n_pulls))
214 if milestone:
213 if milestone:
215 print("The full list can be seen `on GitHub <https://github.com/{project}/issues?q=milestone%3A{milestone}>`__".format(project=project,milestone=milestone)
214 print("The full list can be seen `on GitHub <https://github.com/{project}/issues?q=milestone%3A{milestone}>`__".format(project=project,milestone=milestone)
216 )
215 )
217
216
218 print()
217 print()
219 print("The following %i authors contributed %i commits." % (len(unique_authors), ncommits))
218 print("The following %i authors contributed %i commits." % (len(unique_authors), ncommits))
220 print()
219 print()
221 print('\n'.join(unique_authors))
220 print('\n'.join(unique_authors))
222
221
223 if opts.links:
222 if opts.links:
224 print()
223 print()
225 print("GitHub issues and pull requests:")
224 print("GitHub issues and pull requests:")
226 print()
225 print()
227 print('Pull Requests (%d):\n' % n_pulls)
226 print('Pull Requests (%d):\n' % n_pulls)
228 report(pulls, show_urls)
227 report(pulls, show_urls)
229 print()
228 print()
230 print('Issues (%d):\n' % n_issues)
229 print('Issues (%d):\n' % n_issues)
231 report(issues, show_urls)
230 report(issues, show_urls)
1 NO CONTENT: modified file chmod 100755 => 100644
NO CONTENT: modified file chmod 100755 => 100644
@@ -1,92 +1,85 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2 """IPython release script.
2 """IPython release script.
3
3
4 This should ONLY be run at real release time.
4 This should ONLY be run at real release time.
5 """
5 """
6 from __future__ import print_function
6 from __future__ import print_function
7
7
8 import os
8 import os
9 from glob import glob
9 from glob import glob
10 from subprocess import call
10 from subprocess import call
11 import sys
11 import sys
12
12
13 from toollib import (get_ipdir, pjoin, cd, execfile, sh, archive,
13 from toollib import (get_ipdir, pjoin, cd, execfile, sh, archive,
14 archive_user, archive_dir)
14 archive_user, archive_dir)
15 from gh_api import post_download
16
15
17 # Get main ipython dir, this will raise if it doesn't pass some checks
16 # Get main ipython dir, this will raise if it doesn't pass some checks
18 ipdir = get_ipdir()
17 ipdir = get_ipdir()
19 tooldir = pjoin(ipdir, 'tools')
18 tooldir = pjoin(ipdir, 'tools')
20 distdir = pjoin(ipdir, 'dist')
19 distdir = pjoin(ipdir, 'dist')
21
20
22 # Where I keep static backups of each release
21 # Where I keep static backups of each release
23 ipbackupdir = os.path.expanduser('~/ipython/backup')
22 ipbackupdir = os.path.expanduser('~/ipython/backup')
24 if not os.path.exists(ipbackupdir):
23 if not os.path.exists(ipbackupdir):
25 os.makedirs(ipbackupdir)
24 os.makedirs(ipbackupdir)
26
25
27 # Start in main IPython dir
26 # Start in main IPython dir
28 cd(ipdir)
27 cd(ipdir)
29
28
30 # Load release info
29 # Load release info
31 version = None
30 version = None
32 execfile(pjoin('IPython','core','release.py'), globals())
31 execfile(pjoin('IPython','core','release.py'), globals())
33
32
34 # Build site addresses for file uploads
33 # Build site addresses for file uploads
35 release_site = '%s/release/%s' % (archive, version)
34 release_site = '%s/release/%s' % (archive, version)
36 backup_site = '%s/backup/' % archive
35 backup_site = '%s/backup/' % archive
37
36
38 # Start actual release process
37 # Start actual release process
39 print()
38 print()
40 print('Releasing IPython')
39 print('Releasing IPython')
41 print('=================')
40 print('=================')
42 print()
41 print()
43 print('Version:', version)
42 print('Version:', version)
44 print()
43 print()
45 print('Source IPython directory:', ipdir)
44 print('Source IPython directory:', ipdir)
46 print()
45 print()
47
46
48 # Perform local backup, go to tools dir to run it.
47 # Perform local backup, go to tools dir to run it.
49 cd(tooldir)
48 cd(tooldir)
50
49
51 if 'upload' in sys.argv:
50 if 'upload' in sys.argv:
52 cd(distdir)
51 cd(distdir)
53
52
54 # do not upload OS specific files like .DS_Store
53 # do not upload OS specific files like .DS_Store
55 to_upload = glob('*.whl')+glob('*.tar.gz')
54 to_upload = glob('*.whl')+glob('*.tar.gz')
56 for fname in to_upload:
57 # TODO: update to GitHub releases API
58 continue
59 print('uploading %s to GitHub' % fname)
60 desc = "IPython %s source distribution" % version
61 post_download("ipython/ipython", fname, description=desc)
62
55
63 # Make target dir if it doesn't exist
56 # Make target dir if it doesn't exist
64 print('1. Uploading IPython to archive.ipython.org')
57 print('1. Uploading IPython to archive.ipython.org')
65 sh('ssh %s "mkdir -p %s/release/%s" ' % (archive_user, archive_dir, version))
58 sh('ssh %s "mkdir -p %s/release/%s" ' % (archive_user, archive_dir, version))
66 sh('scp *.tar.gz *.tar.xz *.whl %s' % release_site)
59 sh('scp *.tar.gz *.tar.xz *.whl %s' % release_site)
67
60
68 print('2. Uploading backup files...')
61 print('2. Uploading backup files...')
69 cd(ipbackupdir)
62 cd(ipbackupdir)
70 sh('scp `ls -1tr *tgz | tail -1` %s' % backup_site)
63 sh('scp `ls -1tr *tgz | tail -1` %s' % backup_site)
71
64
72 print('3. Uploading to PyPI using twine')
65 print('3. Uploading to PyPI using twine')
73 cd(distdir)
66 cd(distdir)
74 call(['twine', 'upload', '--verbose'] + to_upload)
67 call(['twine', 'upload', '--verbose'] + to_upload)
75
68
76 else:
69 else:
77 # Build, but don't upload
70 # Build, but don't upload
78
71
79 # Make backup tarball
72 # Make backup tarball
80 sh('./make_tarball.py')
73 sh('./make_tarball.py')
81 sh('mv ipython-*.tgz %s' % ipbackupdir)
74 sh('mv ipython-*.tgz %s' % ipbackupdir)
82
75
83 # Build release files
76 # Build release files
84 sh('./build_release')
77 sh('./build_release')
85
78
86 cd(ipdir)
79 cd(ipdir)
87
80
88 print("`./release upload` to upload source distribution on PyPI and ipython archive")
81 print("`./release upload` to upload source distribution on PyPI and ipython archive")
89 sys.exit(0)
82 sys.exit(0)
90
83
91
84
92
85
1 NO CONTENT: modified file chmod 100755 => 100644
NO CONTENT: modified file chmod 100755 => 100644
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now