##// END OF EJS Templates
cleanup ununsed tools...
Matthias Bussonnier -
Show More
1 NO CONTENT: modified file chmod 100755 => 100644
@@ -1,304 +1,163 b''
1 1 """Functions for Github API requests."""
2 2
3 3 try:
4 4 input = raw_input
5 5 except NameError:
6 6 pass
7 7
8 import os
9 8 import re
10 9 import sys
11 10
12 11 import requests
13 12 import getpass
14 13 import json
15 from pathlib import Path
16 14
17 15 try:
18 16 import requests_cache
19 17 except ImportError:
20 18 print("cache not available, install `requests_cache` for caching.", file=sys.stderr)
21 19 else:
22 20 requests_cache.install_cache("gh_api", expire_after=3600)
23 21
24 22 # Keyring stores passwords by a 'username', but we're not storing a username and
25 23 # password
26 24 import socket
27 25 fake_username = 'ipython_tools_%s' % socket.gethostname().replace('.','_').replace('-','_')
28 26
29 27 class Obj(dict):
30 28 """Dictionary with attribute access to names."""
31 29 def __getattr__(self, name):
32 30 try:
33 31 return self[name]
34 32 except KeyError as e:
35 33 raise AttributeError(name) from e
36 34
37 35 def __setattr__(self, name, val):
38 36 self[name] = val
39 37
40 38 token = None
41 39 def get_auth_token():
42 40 global token
43 41
44 42 if token is not None:
45 43 return token
46 44
47 45 import keyring
48 46 token = keyring.get_password('github', fake_username)
49 47 if token is not None:
50 48 return token
51 49
52 50 print("Please enter your github username and password. These are not "
53 51 "stored, only used to get an oAuth token. You can revoke this at "
54 52 "any time on Github.\n"
55 53 "Username: ", file=sys.stderr, end='')
56 54 user = input('')
57 55 pw = getpass.getpass("Password: ", stream=sys.stderr)
58 56
59 57 auth_request = {
60 58 "scopes": [
61 59 "public_repo",
62 60 "gist"
63 61 ],
64 62 "note": "IPython tools %s" % socket.gethostname(),
65 63 "note_url": "https://github.com/ipython/ipython/tree/master/tools",
66 64 }
67 65 response = requests.post('https://api.github.com/authorizations',
68 66 auth=(user, pw), data=json.dumps(auth_request))
69 67 if response.status_code == 401 and \
70 68 'required;' in response.headers.get('X-GitHub-OTP', ''):
71 69 print("Your login API requested a one time password", file=sys.stderr)
72 70 otp = getpass.getpass("One Time Password: ", stream=sys.stderr)
73 71 response = requests.post('https://api.github.com/authorizations',
74 72 auth=(user, pw),
75 73 data=json.dumps(auth_request),
76 74 headers={'X-GitHub-OTP':otp})
77 75 response.raise_for_status()
78 76 token = json.loads(response.text)['token']
79 77 keyring.set_password('github', fake_username, token)
80 78 return token
81 79
82 80 def make_auth_header():
83 81 return {'Authorization': 'token ' + get_auth_token()}
84 82
85 def post_issue_comment(project, num, body):
86 url = 'https://api.github.com/repos/{project}/issues/{num}/comments'.format(project=project, num=num)
87 payload = json.dumps({'body': body})
88 requests.post(url, data=payload, headers=make_auth_header())
89
90 def post_gist(content, description='', filename='file', auth=False):
91 """Post some text to a Gist, and return the URL."""
92 post_data = json.dumps({
93 "description": description,
94 "public": True,
95 "files": {
96 filename: {
97 "content": content
98 }
99 }
100 }).encode('utf-8')
101
102 headers = make_auth_header() if auth else {}
103 response = requests.post("https://api.github.com/gists", data=post_data, headers=headers)
104 response.raise_for_status()
105 response_data = json.loads(response.text)
106 return response_data['html_url']
107 83
108 84 def get_pull_request(project, num, auth=False):
109 85 """get pull request info by number
110 86 """
111 87 url = "https://api.github.com/repos/{project}/pulls/{num}".format(project=project, num=num)
112 88 if auth:
113 89 header = make_auth_header()
114 90 else:
115 91 header = None
116 92 print("fetching %s" % url, file=sys.stderr)
117 93 response = requests.get(url, headers=header)
118 94 response.raise_for_status()
119 95 return json.loads(response.text, object_hook=Obj)
120 96
121 def get_pull_request_files(project, num, auth=False):
122 """get list of files in a pull request"""
123 url = "https://api.github.com/repos/{project}/pulls/{num}/files".format(project=project, num=num)
124 if auth:
125 header = make_auth_header()
126 else:
127 header = None
128 return get_paged_request(url, headers=header)
129
130 97 element_pat = re.compile(r'<(.+?)>')
131 98 rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]')
132 99
133 100 def get_paged_request(url, headers=None, **params):
134 101 """get a full list, handling APIv3's paging"""
135 102 results = []
136 103 params.setdefault("per_page", 100)
137 104 while True:
138 105 if '?' in url:
139 106 params = None
140 107 print("fetching %s" % url, file=sys.stderr)
141 108 else:
142 109 print("fetching %s with %s" % (url, params), file=sys.stderr)
143 110 response = requests.get(url, headers=headers, params=params)
144 111 response.raise_for_status()
145 112 results.extend(response.json())
146 113 if 'next' in response.links:
147 114 url = response.links['next']['url']
148 115 else:
149 116 break
150 117 return results
151 118
152 def get_pulls_list(project, auth=False, **params):
153 """get pull request list"""
154 params.setdefault("state", "closed")
155 url = "https://api.github.com/repos/{project}/pulls".format(project=project)
156 if auth:
157 headers = make_auth_header()
158 else:
159 headers = None
160 pages = get_paged_request(url, headers=headers, **params)
161 return pages
162
163 119 def get_issues_list(project, auth=False, **params):
164 120 """get issues list"""
165 121 params.setdefault("state", "closed")
166 122 url = "https://api.github.com/repos/{project}/issues".format(project=project)
167 123 if auth:
168 124 headers = make_auth_header()
169 125 else:
170 126 headers = None
171 127 pages = get_paged_request(url, headers=headers, **params)
172 128 return pages
173 129
174 130 def get_milestones(project, auth=False, **params):
175 131 params.setdefault('state', 'all')
176 132 url = "https://api.github.com/repos/{project}/milestones".format(project=project)
177 133 if auth:
178 134 headers = make_auth_header()
179 135 else:
180 136 headers = None
181 137 milestones = get_paged_request(url, headers=headers, **params)
182 138 return milestones
183 139
184 140 def get_milestone_id(project, milestone, auth=False, **params):
185 141 milestones = get_milestones(project, auth=auth, **params)
186 142 for mstone in milestones:
187 143 if mstone['title'] == milestone:
188 144 return mstone['number']
189 145 else:
190 146 raise ValueError("milestone %s not found" % milestone)
191 147
192 148 def is_pull_request(issue):
193 149 """Return True if the given issue is a pull request."""
194 150 return bool(issue.get('pull_request', {}).get('html_url', None))
195 151
196 152 def get_authors(pr):
197 153 print("getting authors for #%i" % pr['number'], file=sys.stderr)
198 154 h = make_auth_header()
199 155 r = requests.get(pr['commits_url'], headers=h)
200 156 r.raise_for_status()
201 157 commits = r.json()
202 158 authors = []
203 159 for commit in commits:
204 160 author = commit['commit']['author']
205 161 authors.append("%s <%s>" % (author['name'], author['email']))
206 162 return authors
207 163
208 # encode_multipart_formdata is from urllib3.filepost
209 # The only change is to iter_fields, to enforce S3's required key ordering
210
211 def iter_fields(fields):
212 fields = fields.copy()
213 for key in ('key', 'acl', 'Filename', 'success_action_status', 'AWSAccessKeyId',
214 'Policy', 'Signature', 'Content-Type', 'file'):
215 yield (key, fields.pop(key))
216 for (k,v) in fields.items():
217 yield k,v
218
219 def encode_multipart_formdata(fields, boundary=None):
220 """
221 Encode a dictionary of ``fields`` using the multipart/form-data mime format.
222
223 :param fields:
224 Dictionary of fields or list of (key, value) field tuples. The key is
225 treated as the field name, and the value as the body of the form-data
226 bytes. If the value is a tuple of two elements, then the first element
227 is treated as the filename of the form-data section.
228
229 Field names and filenames must be unicode.
230
231 :param boundary:
232 If not specified, then a random boundary will be generated using
233 :func:`mimetools.choose_boundary`.
234 """
235 # copy requests imports in here:
236 from io import BytesIO
237 from requests.packages.urllib3.filepost import (
238 choose_boundary, six, writer, b, get_content_type
239 )
240 body = BytesIO()
241 if boundary is None:
242 boundary = choose_boundary()
243
244 for fieldname, value in iter_fields(fields):
245 body.write(b('--%s\r\n' % (boundary)))
246
247 if isinstance(value, tuple):
248 filename, data = value
249 writer(body).write('Content-Disposition: form-data; name="%s"; '
250 'filename="%s"\r\n' % (fieldname, filename))
251 body.write(b('Content-Type: %s\r\n\r\n' %
252 (get_content_type(filename))))
253 else:
254 data = value
255 writer(body).write('Content-Disposition: form-data; name="%s"\r\n'
256 % (fieldname))
257 body.write(b'Content-Type: text/plain\r\n\r\n')
258
259 if isinstance(data, int):
260 data = str(data) # Backwards compatibility
261 if isinstance(data, six.text_type):
262 writer(body).write(data)
263 else:
264 body.write(data)
265
266 body.write(b'\r\n')
267
268 body.write(b('--%s--\r\n' % (boundary)))
269
270 content_type = b('multipart/form-data; boundary=%s' % boundary)
271
272 return body.getvalue(), content_type
273
274
275 def post_download(project, filename, name=None, description=""):
276 """Upload a file to the GitHub downloads area"""
277 if name is None:
278 name = Path(filename).name
279 with open(filename, 'rb') as f:
280 filedata = f.read()
281
282 url = "https://api.github.com/repos/{project}/downloads".format(project=project)
283
284 payload = json.dumps(dict(name=name, size=len(filedata),
285 description=description))
286 response = requests.post(url, data=payload, headers=make_auth_header())
287 response.raise_for_status()
288 reply = json.loads(response.content)
289 s3_url = reply['s3_url']
290
291 fields = dict(
292 key=reply['path'],
293 acl=reply['acl'],
294 success_action_status=201,
295 Filename=reply['name'],
296 AWSAccessKeyId=reply['accesskeyid'],
297 Policy=reply['policy'],
298 Signature=reply['signature'],
299 file=(reply['name'], filedata),
300 )
301 fields['Content-Type'] = reply['mime_type']
302 data, content_type = encode_multipart_formdata(fields)
303 s3r = requests.post(s3_url, data=data, headers={'Content-Type': content_type})
304 return s3r
@@ -1,231 +1,230 b''
1 1 #!/usr/bin/env python
2 2 """Simple tools to query github.com and gather stats about issues.
3 3
4 4 To generate a report for IPython 2.0, run:
5 5
6 6 python github_stats.py --milestone 2.0 --since-tag rel-1.0.0
7 7 """
8 8 #-----------------------------------------------------------------------------
9 9 # Imports
10 10 #-----------------------------------------------------------------------------
11 11
12 12
13 import codecs
14 13 import sys
15 14
16 15 from argparse import ArgumentParser
17 16 from datetime import datetime, timedelta
18 17 from subprocess import check_output
19 18
20 19 from gh_api import (
21 20 get_paged_request, make_auth_header, get_pull_request, is_pull_request,
22 21 get_milestone_id, get_issues_list, get_authors,
23 22 )
24 23 #-----------------------------------------------------------------------------
25 24 # Globals
26 25 #-----------------------------------------------------------------------------
27 26
28 27 ISO8601 = "%Y-%m-%dT%H:%M:%SZ"
29 28 PER_PAGE = 100
30 29
31 30 #-----------------------------------------------------------------------------
32 31 # Functions
33 32 #-----------------------------------------------------------------------------
34 33
35 34 def round_hour(dt):
36 35 return dt.replace(minute=0,second=0,microsecond=0)
37 36
38 37 def _parse_datetime(s):
39 38 """Parse dates in the format returned by the Github API."""
40 39 if s:
41 40 return datetime.strptime(s, ISO8601)
42 41 else:
43 42 return datetime.fromtimestamp(0)
44 43
45 44 def issues2dict(issues):
46 45 """Convert a list of issues to a dict, keyed by issue number."""
47 46 idict = {}
48 47 for i in issues:
49 48 idict[i['number']] = i
50 49 return idict
51 50
52 51 def split_pulls(all_issues, project="ipython/ipython"):
53 52 """split a list of closed issues into non-PR Issues and Pull Requests"""
54 53 pulls = []
55 54 issues = []
56 55 for i in all_issues:
57 56 if is_pull_request(i):
58 57 pull = get_pull_request(project, i['number'], auth=True)
59 58 pulls.append(pull)
60 59 else:
61 60 issues.append(i)
62 61 return issues, pulls
63 62
64 63
65 64 def issues_closed_since(period=timedelta(days=365), project="ipython/ipython", pulls=False):
66 65 """Get all issues closed since a particular point in time. period
67 66 can either be a datetime object, or a timedelta object. In the
68 67 latter case, it is used as a time before the present.
69 68 """
70 69
71 70 which = 'pulls' if pulls else 'issues'
72 71
73 72 if isinstance(period, timedelta):
74 73 since = round_hour(datetime.utcnow() - period)
75 74 else:
76 75 since = period
77 76 url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, since.strftime(ISO8601), PER_PAGE)
78 77 allclosed = get_paged_request(url, headers=make_auth_header())
79 78
80 79 filtered = [ i for i in allclosed if _parse_datetime(i['closed_at']) > since ]
81 80 if pulls:
82 81 filtered = [ i for i in filtered if _parse_datetime(i['merged_at']) > since ]
83 82 # filter out PRs not against master (backports)
84 83 filtered = [ i for i in filtered if i['base']['ref'] == 'master' ]
85 84 else:
86 85 filtered = [ i for i in filtered if not is_pull_request(i) ]
87 86
88 87 return filtered
89 88
90 89
91 90 def sorted_by_field(issues, field='closed_at', reverse=False):
92 91 """Return a list of issues sorted by closing date date."""
93 92 return sorted(issues, key = lambda i:i[field], reverse=reverse)
94 93
95 94
96 95 def report(issues, show_urls=False):
97 96 """Summary report about a list of issues, printing number and title."""
98 97 if show_urls:
99 98 for i in issues:
100 99 role = 'ghpull' if 'merged_at' in i else 'ghissue'
101 100 print(u'* :%s:`%d`: %s' % (role, i['number'],
102 101 i['title'].replace(u'`', u'``')))
103 102 else:
104 103 for i in issues:
105 104 print(u'* %d: %s' % (i['number'], i['title'].replace(u'`', u'``')))
106 105
107 106 #-----------------------------------------------------------------------------
108 107 # Main script
109 108 #-----------------------------------------------------------------------------
110 109
111 110 if __name__ == "__main__":
112 111
113 112 print("DEPRECATE: backport_pr.py is deprecated and it is now recommended"
114 113 "to install `ghpro` from PyPI.", file=sys.stderr)
115 114
116 115
117 116 # Whether to add reST urls for all issues in printout.
118 117 show_urls = True
119 118
120 119 parser = ArgumentParser()
121 120 parser.add_argument('--since-tag', type=str,
122 121 help="The git tag to use for the starting point (typically the last major release)."
123 122 )
124 123 parser.add_argument('--milestone', type=str,
125 124 help="The GitHub milestone to use for filtering issues [optional]."
126 125 )
127 126 parser.add_argument('--days', type=int,
128 127 help="The number of days of data to summarize (use this or --since-tag)."
129 128 )
130 129 parser.add_argument('--project', type=str, default="ipython/ipython",
131 130 help="The project to summarize."
132 131 )
133 132 parser.add_argument('--links', action='store_true', default=False,
134 133 help="Include links to all closed Issues and PRs in the output."
135 134 )
136 135
137 136 opts = parser.parse_args()
138 137 tag = opts.since_tag
139 138
140 139 # set `since` from days or git tag
141 140 if opts.days:
142 141 since = datetime.utcnow() - timedelta(days=opts.days)
143 142 else:
144 143 if not tag:
145 144 tag = check_output(['git', 'describe', '--abbrev=0']).strip().decode('utf8')
146 145 cmd = ['git', 'log', '-1', '--format=%ai', tag]
147 146 tagday, tz = check_output(cmd).strip().decode('utf8').rsplit(' ', 1)
148 147 since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S")
149 148 h = int(tz[1:3])
150 149 m = int(tz[3:])
151 150 td = timedelta(hours=h, minutes=m)
152 151 if tz[0] == '-':
153 152 since += td
154 153 else:
155 154 since -= td
156 155
157 156 since = round_hour(since)
158 157
159 158 milestone = opts.milestone
160 159 project = opts.project
161 160
162 161 print("fetching GitHub stats since %s (tag: %s, milestone: %s)" % (since, tag, milestone), file=sys.stderr)
163 162 if milestone:
164 163 milestone_id = get_milestone_id(project=project, milestone=milestone,
165 164 auth=True)
166 165 issues_and_pulls = get_issues_list(project=project,
167 166 milestone=milestone_id,
168 167 state='closed',
169 168 auth=True,
170 169 )
171 170 issues, pulls = split_pulls(issues_and_pulls, project=project)
172 171 else:
173 172 issues = issues_closed_since(since, project=project, pulls=False)
174 173 pulls = issues_closed_since(since, project=project, pulls=True)
175 174
176 175 # For regular reports, it's nice to show them in reverse chronological order
177 176 issues = sorted_by_field(issues, reverse=True)
178 177 pulls = sorted_by_field(pulls, reverse=True)
179 178
180 179 n_issues, n_pulls = map(len, (issues, pulls))
181 180 n_total = n_issues + n_pulls
182 181
183 182 # Print summary report we can directly include into release notes.
184 183
185 184 print()
186 185 since_day = since.strftime("%Y/%m/%d")
187 186 today = datetime.today().strftime("%Y/%m/%d")
188 187 print("GitHub stats for %s - %s (tag: %s)" % (since_day, today, tag))
189 188 print()
190 189 print("These lists are automatically generated, and may be incomplete or contain duplicates.")
191 190 print()
192 191
193 192 ncommits = 0
194 193 all_authors = []
195 194 if tag:
196 195 # print git info, in addition to GitHub info:
197 196 since_tag = tag+'..'
198 197 cmd = ['git', 'log', '--oneline', since_tag]
199 198 ncommits += len(check_output(cmd).splitlines())
200 199
201 200 author_cmd = ['git', 'log', '--use-mailmap', "--format=* %aN", since_tag]
202 201 all_authors.extend(check_output(author_cmd).decode('utf-8', 'replace').splitlines())
203 202
204 203 pr_authors = []
205 204 for pr in pulls:
206 205 pr_authors.extend(get_authors(pr))
207 206 ncommits = len(pr_authors) + ncommits - len(pulls)
208 207 author_cmd = ['git', 'check-mailmap'] + pr_authors
209 208 with_email = check_output(author_cmd).decode('utf-8', 'replace').splitlines()
210 209 all_authors.extend([ u'* ' + a.split(' <')[0] for a in with_email ])
211 210 unique_authors = sorted(set(all_authors), key=lambda s: s.lower())
212 211
213 212 print("We closed %d issues and merged %d pull requests." % (n_issues, n_pulls))
214 213 if milestone:
215 214 print("The full list can be seen `on GitHub <https://github.com/{project}/issues?q=milestone%3A{milestone}>`__".format(project=project,milestone=milestone)
216 215 )
217 216
218 217 print()
219 218 print("The following %i authors contributed %i commits." % (len(unique_authors), ncommits))
220 219 print()
221 220 print('\n'.join(unique_authors))
222 221
223 222 if opts.links:
224 223 print()
225 224 print("GitHub issues and pull requests:")
226 225 print()
227 226 print('Pull Requests (%d):\n' % n_pulls)
228 227 report(pulls, show_urls)
229 228 print()
230 229 print('Issues (%d):\n' % n_issues)
231 230 report(issues, show_urls)
1 NO CONTENT: modified file chmod 100755 => 100644
@@ -1,92 +1,85 b''
1 1 #!/usr/bin/env python3
2 2 """IPython release script.
3 3
4 4 This should ONLY be run at real release time.
5 5 """
6 6 from __future__ import print_function
7 7
8 8 import os
9 9 from glob import glob
10 10 from subprocess import call
11 11 import sys
12 12
13 13 from toollib import (get_ipdir, pjoin, cd, execfile, sh, archive,
14 14 archive_user, archive_dir)
15 from gh_api import post_download
16 15
17 16 # Get main ipython dir, this will raise if it doesn't pass some checks
18 17 ipdir = get_ipdir()
19 18 tooldir = pjoin(ipdir, 'tools')
20 19 distdir = pjoin(ipdir, 'dist')
21 20
22 21 # Where I keep static backups of each release
23 22 ipbackupdir = os.path.expanduser('~/ipython/backup')
24 23 if not os.path.exists(ipbackupdir):
25 24 os.makedirs(ipbackupdir)
26 25
27 26 # Start in main IPython dir
28 27 cd(ipdir)
29 28
30 29 # Load release info
31 30 version = None
32 31 execfile(pjoin('IPython','core','release.py'), globals())
33 32
34 33 # Build site addresses for file uploads
35 34 release_site = '%s/release/%s' % (archive, version)
36 35 backup_site = '%s/backup/' % archive
37 36
38 37 # Start actual release process
39 38 print()
40 39 print('Releasing IPython')
41 40 print('=================')
42 41 print()
43 42 print('Version:', version)
44 43 print()
45 44 print('Source IPython directory:', ipdir)
46 45 print()
47 46
48 47 # Perform local backup, go to tools dir to run it.
49 48 cd(tooldir)
50 49
51 50 if 'upload' in sys.argv:
52 51 cd(distdir)
53 52
54 53 # do not upload OS specific files like .DS_Store
55 54 to_upload = glob('*.whl')+glob('*.tar.gz')
56 for fname in to_upload:
57 # TODO: update to GitHub releases API
58 continue
59 print('uploading %s to GitHub' % fname)
60 desc = "IPython %s source distribution" % version
61 post_download("ipython/ipython", fname, description=desc)
62 55
63 56 # Make target dir if it doesn't exist
64 57 print('1. Uploading IPython to archive.ipython.org')
65 58 sh('ssh %s "mkdir -p %s/release/%s" ' % (archive_user, archive_dir, version))
66 59 sh('scp *.tar.gz *.tar.xz *.whl %s' % release_site)
67 60
68 61 print('2. Uploading backup files...')
69 62 cd(ipbackupdir)
70 63 sh('scp `ls -1tr *tgz | tail -1` %s' % backup_site)
71 64
72 65 print('3. Uploading to PyPI using twine')
73 66 cd(distdir)
74 67 call(['twine', 'upload', '--verbose'] + to_upload)
75 68
76 69 else:
77 70 # Build, but don't upload
78 71
79 72 # Make backup tarball
80 73 sh('./make_tarball.py')
81 74 sh('mv ipython-*.tgz %s' % ipbackupdir)
82 75
83 76 # Build release files
84 77 sh('./build_release')
85 78
86 79 cd(ipdir)
87 80
88 81 print("`./release upload` to upload source distribution on PyPI and ipython archive")
89 82 sys.exit(0)
90 83
91 84
92 85
1 NO CONTENT: modified file chmod 100755 => 100644
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now