Show More
@@ -1,154 +1,189 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | """Simple tools to query github.com and gather stats about issues. |
|
3 | 3 | """ |
|
4 | 4 | #----------------------------------------------------------------------------- |
|
5 | 5 | # Imports |
|
6 | 6 | #----------------------------------------------------------------------------- |
|
7 | 7 | |
|
8 | 8 | from __future__ import print_function |
|
9 | 9 | |
|
10 | 10 | import json |
|
11 | 11 | import re |
|
12 | 12 | import sys |
|
13 | 13 | |
|
14 | 14 | from datetime import datetime, timedelta |
|
15 | from subprocess import check_output | |
|
15 | 16 | from urllib import urlopen |
|
16 | 17 | |
|
17 | 18 | #----------------------------------------------------------------------------- |
|
18 | 19 | # Globals |
|
19 | 20 | #----------------------------------------------------------------------------- |
|
20 | 21 | |
|
21 | 22 | ISO8601 = "%Y-%m-%dT%H:%M:%SZ" |
|
22 | 23 | PER_PAGE = 100 |
|
23 | 24 | |
|
24 | 25 | element_pat = re.compile(r'<(.+?)>') |
|
25 | 26 | rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]') |
|
26 | 27 | |
|
27 | 28 | #----------------------------------------------------------------------------- |
|
28 | 29 | # Functions |
|
29 | 30 | #----------------------------------------------------------------------------- |
|
30 | 31 | |
|
31 | 32 | def parse_link_header(headers): |
|
32 | 33 | link_s = headers.get('link', '') |
|
33 | 34 | urls = element_pat.findall(link_s) |
|
34 | 35 | rels = rel_pat.findall(link_s) |
|
35 | 36 | d = {} |
|
36 | 37 | for rel,url in zip(rels, urls): |
|
37 | 38 | d[rel] = url |
|
38 | 39 | return d |
|
39 | 40 | |
|
40 | 41 | def get_paged_request(url): |
|
41 | 42 | """get a full list, handling APIv3's paging""" |
|
42 | 43 | results = [] |
|
43 | 44 | while url: |
|
44 | 45 | print("fetching %s" % url, file=sys.stderr) |
|
45 | 46 | f = urlopen(url) |
|
46 | 47 | results.extend(json.load(f)) |
|
47 | 48 | links = parse_link_header(f.headers) |
|
48 | 49 | url = links.get('next') |
|
49 | 50 | return results |
|
50 | 51 | |
|
51 | 52 | def get_issues(project="ipython/ipython", state="closed", pulls=False): |
|
52 | 53 | """Get a list of the issues from the Github API.""" |
|
53 | 54 | which = 'pulls' if pulls else 'issues' |
|
54 | 55 | url = "https://api.github.com/repos/%s/%s?state=%s&per_page=%i" % (project, which, state, PER_PAGE) |
|
55 | 56 | return get_paged_request(url) |
|
56 | 57 | |
|
57 | 58 | |
|
58 | 59 | def _parse_datetime(s): |
|
59 | 60 | """Parse dates in the format returned by the Github API.""" |
|
60 | 61 | if s: |
|
61 | 62 | return datetime.strptime(s, ISO8601) |
|
62 | 63 | else: |
|
63 | 64 | return datetime.fromtimestamp(0) |
|
64 | 65 | |
|
65 | 66 | |
|
66 | 67 | def issues2dict(issues): |
|
67 | 68 | """Convert a list of issues to a dict, keyed by issue number.""" |
|
68 | 69 | idict = {} |
|
69 | 70 | for i in issues: |
|
70 | 71 | idict[i['number']] = i |
|
71 | 72 | return idict |
|
72 | 73 | |
|
73 | 74 | |
|
74 | 75 | def is_pull_request(issue): |
|
75 | 76 | """Return True if the given issue is a pull request.""" |
|
76 | 77 | return 'pull_request_url' in issue |
|
77 | 78 | |
|
78 | 79 | |
|
79 | 80 | def issues_closed_since(period=timedelta(days=365), project="ipython/ipython", pulls=False): |
|
80 | 81 | """Get all issues closed since a particular point in time. period |
|
81 | 82 | can either be a datetime object, or a timedelta object. In the |
|
82 | 83 | latter case, it is used as a time before the present.""" |
|
83 | 84 | |
|
84 | 85 | which = 'pulls' if pulls else 'issues' |
|
85 | 86 | |
|
86 | 87 | if isinstance(period, timedelta): |
|
87 | 88 | period = datetime.now() - period |
|
88 | 89 | url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, period.strftime(ISO8601), PER_PAGE) |
|
89 | 90 | allclosed = get_paged_request(url) |
|
90 | 91 | # allclosed = get_issues(project=project, state='closed', pulls=pulls, since=period) |
|
91 | 92 | filtered = [i for i in allclosed if _parse_datetime(i['closed_at']) > period] |
|
92 | 93 | |
|
93 | 94 | # exclude rejected PRs |
|
94 | 95 | if pulls: |
|
95 | 96 | filtered = [ pr for pr in filtered if pr['merged_at'] ] |
|
96 | 97 | |
|
97 | 98 | return filtered |
|
98 | 99 | |
|
99 | 100 | |
|
100 | 101 | def sorted_by_field(issues, field='closed_at', reverse=False): |
|
101 | 102 | """Return a list of issues sorted by closing date date.""" |
|
102 | 103 | return sorted(issues, key = lambda i:i[field], reverse=reverse) |
|
103 | 104 | |
|
104 | 105 | |
|
105 | 106 | def report(issues, show_urls=False): |
|
106 | 107 | """Summary report about a list of issues, printing number and title. |
|
107 | 108 | """ |
|
108 | 109 | # titles may have unicode in them, so we must encode everything below |
|
109 | 110 | if show_urls: |
|
110 | 111 | for i in issues: |
|
111 | 112 | role = 'ghpull' if 'merged_at' in i else 'ghissue' |
|
112 | 113 | print('* :%s:`%d`: %s' % (role, i['number'], |
|
113 | 114 | i['title'].encode('utf-8'))) |
|
114 | 115 | else: |
|
115 | 116 | for i in issues: |
|
116 | 117 | print('* %d: %s' % (i['number'], i['title'].encode('utf-8'))) |
|
117 | 118 | |
|
118 | 119 | #----------------------------------------------------------------------------- |
|
119 | 120 | # Main script |
|
120 | 121 | #----------------------------------------------------------------------------- |
|
121 | 122 | |
|
122 | 123 | if __name__ == "__main__": |
|
123 | 124 | # Whether to add reST urls for all issues in printout. |
|
124 | 125 | show_urls = True |
|
125 | 126 | |
|
126 | 127 | # By default, search one month back |
|
128 | tag = None | |
|
127 | 129 | if len(sys.argv) > 1: |
|
128 | days = int(sys.argv[1]) | |
|
130 | try: | |
|
131 | days = int(sys.argv[1]) | |
|
132 | except: | |
|
133 | tag = sys.argv[1] | |
|
129 | 134 | else: |
|
130 | days = 30 | |
|
135 | tag = check_output(['git', 'describe', '--abbrev=0']).strip() | |
|
136 | ||
|
137 | if tag: | |
|
138 | cmd = ['git', 'log', '-1', '--format=%ai', tag] | |
|
139 | tagday, tz = check_output(cmd).strip().rsplit(' ', 1) | |
|
140 | since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S") | |
|
141 | else: | |
|
142 | since = datetime.now() - timedelta(days=days) | |
|
131 | 143 | |
|
144 | print("fetching GitHub stats since %s (tag: %s)" % (since, tag), file=sys.stderr) | |
|
132 | 145 | # turn off to play interactively without redownloading, use %run -i |
|
133 | 146 | if 1: |
|
134 |
issues = issues_closed_since( |
|
|
135 |
pulls = issues_closed_since( |
|
|
147 | issues = issues_closed_since(since, pulls=False) | |
|
148 | pulls = issues_closed_since(since, pulls=True) | |
|
136 | 149 | |
|
137 | 150 | # For regular reports, it's nice to show them in reverse chronological order |
|
138 | 151 | issues = sorted_by_field(issues, reverse=True) |
|
139 | 152 | pulls = sorted_by_field(pulls, reverse=True) |
|
140 | 153 | |
|
141 | 154 | n_issues, n_pulls = map(len, (issues, pulls)) |
|
142 | 155 | n_total = n_issues + n_pulls |
|
143 | ||
|
156 | ||
|
144 | 157 | # Print summary report we can directly include into release notes. |
|
145 | print("GitHub stats for the last %d days." % days) | |
|
146 | print("We closed a total of %d issues, %d pull requests and %d regular \n" | |
|
147 | "issues; this is the full list (generated with the script \n" | |
|
148 | "`tools/github_stats.py`):" % (n_total, n_pulls, n_issues)) | |
|
158 | print() | |
|
159 | since_day = since.strftime("%Y/%m/%d") | |
|
160 | today = datetime.today().strftime("%Y/%m/%d") | |
|
161 | print("GitHub stats for %s - %s (tag: %s)" % (since_day, today, tag)) | |
|
162 | print() | |
|
163 | print("These lists are automatically generated, and may be incomplete or contain duplicates.") | |
|
164 | print() | |
|
165 | if tag: | |
|
166 | # print git info, in addition to GitHub info: | |
|
167 | since_tag = tag+'..' | |
|
168 | cmd = ['git', 'log', '--oneline', since_tag] | |
|
169 | ncommits = len(check_output(cmd).splitlines()) | |
|
170 | ||
|
171 | author_cmd = ['git', 'log', '--format=* %aN', since_tag] | |
|
172 | all_authors = check_output(author_cmd).splitlines() | |
|
173 | unique_authors = sorted(set(all_authors)) | |
|
174 | ||
|
175 | print("The following %i authors contributed %i commits." % (len(unique_authors), ncommits)) | |
|
176 | print() | |
|
177 | print('\n'.join(unique_authors)) | |
|
178 | print() | |
|
179 | ||
|
180 | print() | |
|
181 | print("We closed a total of %d issues, %d pull requests and %d regular issues;\n" | |
|
182 | "this is the full list (generated with the script \n" | |
|
183 | ":file:`tools/github_stats.py`):" % (n_total, n_pulls, n_issues)) | |
|
149 | 184 | print() |
|
150 | 185 | print('Pull Requests (%d):\n' % n_pulls) |
|
151 | 186 | report(pulls, show_urls) |
|
152 | 187 | print() |
|
153 | 188 | print('Issues (%d):\n' % n_issues) |
|
154 | 189 | report(issues, show_urls) |
General Comments 0
You need to be logged in to leave comments.
Login now