Show More
@@ -1,149 +1,154 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | """Simple tools to query github.com and gather stats about issues. |
|
2 | """Simple tools to query github.com and gather stats about issues. | |
3 | """ |
|
3 | """ | |
4 | #----------------------------------------------------------------------------- |
|
4 | #----------------------------------------------------------------------------- | |
5 | # Imports |
|
5 | # Imports | |
6 | #----------------------------------------------------------------------------- |
|
6 | #----------------------------------------------------------------------------- | |
7 |
|
7 | |||
8 | from __future__ import print_function |
|
8 | from __future__ import print_function | |
9 |
|
9 | |||
10 | import json |
|
10 | import json | |
11 | import re |
|
11 | import re | |
12 | import sys |
|
12 | import sys | |
13 |
|
13 | |||
14 | from datetime import datetime, timedelta |
|
14 | from datetime import datetime, timedelta | |
15 | from urllib import urlopen |
|
15 | from urllib import urlopen | |
16 |
|
16 | |||
17 | #----------------------------------------------------------------------------- |
|
17 | #----------------------------------------------------------------------------- | |
18 | # Globals |
|
18 | # Globals | |
19 | #----------------------------------------------------------------------------- |
|
19 | #----------------------------------------------------------------------------- | |
20 |
|
20 | |||
21 | ISO8601 = "%Y-%m-%dT%H:%M:%SZ" |
|
21 | ISO8601 = "%Y-%m-%dT%H:%M:%SZ" | |
22 | PER_PAGE = 100 |
|
22 | PER_PAGE = 100 | |
23 |
|
23 | |||
24 | element_pat = re.compile(r'<(.+?)>') |
|
24 | element_pat = re.compile(r'<(.+?)>') | |
25 | rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]') |
|
25 | rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]') | |
26 |
|
26 | |||
27 | #----------------------------------------------------------------------------- |
|
27 | #----------------------------------------------------------------------------- | |
28 | # Functions |
|
28 | # Functions | |
29 | #----------------------------------------------------------------------------- |
|
29 | #----------------------------------------------------------------------------- | |
30 |
|
30 | |||
31 | def parse_link_header(headers): |
|
31 | def parse_link_header(headers): | |
32 | link_s = headers.get('link', '') |
|
32 | link_s = headers.get('link', '') | |
33 | urls = element_pat.findall(link_s) |
|
33 | urls = element_pat.findall(link_s) | |
34 | rels = rel_pat.findall(link_s) |
|
34 | rels = rel_pat.findall(link_s) | |
35 | d = {} |
|
35 | d = {} | |
36 | for rel,url in zip(rels, urls): |
|
36 | for rel,url in zip(rels, urls): | |
37 | d[rel] = url |
|
37 | d[rel] = url | |
38 | return d |
|
38 | return d | |
39 |
|
39 | |||
40 | def get_paged_request(url): |
|
40 | def get_paged_request(url): | |
41 | """get a full list, handling APIv3's paging""" |
|
41 | """get a full list, handling APIv3's paging""" | |
42 | results = [] |
|
42 | results = [] | |
43 | while url: |
|
43 | while url: | |
44 | print("fetching %s" % url, file=sys.stderr) |
|
44 | print("fetching %s" % url, file=sys.stderr) | |
45 | f = urlopen(url) |
|
45 | f = urlopen(url) | |
46 | results.extend(json.load(f)) |
|
46 | results.extend(json.load(f)) | |
47 | links = parse_link_header(f.headers) |
|
47 | links = parse_link_header(f.headers) | |
48 | url = links.get('next') |
|
48 | url = links.get('next') | |
49 | return results |
|
49 | return results | |
50 |
|
50 | |||
51 | def get_issues(project="ipython/ipython", state="closed", pulls=False): |
|
51 | def get_issues(project="ipython/ipython", state="closed", pulls=False): | |
52 | """Get a list of the issues from the Github API.""" |
|
52 | """Get a list of the issues from the Github API.""" | |
53 | which = 'pulls' if pulls else 'issues' |
|
53 | which = 'pulls' if pulls else 'issues' | |
54 | url = "https://api.github.com/repos/%s/%s?state=%s&per_page=%i" % (project, which, state, PER_PAGE) |
|
54 | url = "https://api.github.com/repos/%s/%s?state=%s&per_page=%i" % (project, which, state, PER_PAGE) | |
55 | return get_paged_request(url) |
|
55 | return get_paged_request(url) | |
56 |
|
56 | |||
57 |
|
57 | |||
58 | def _parse_datetime(s): |
|
58 | def _parse_datetime(s): | |
59 | """Parse dates in the format returned by the Github API.""" |
|
59 | """Parse dates in the format returned by the Github API.""" | |
60 | if s: |
|
60 | if s: | |
61 | return datetime.strptime(s, ISO8601) |
|
61 | return datetime.strptime(s, ISO8601) | |
62 | else: |
|
62 | else: | |
63 | return datetime.fromtimestamp(0) |
|
63 | return datetime.fromtimestamp(0) | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | def issues2dict(issues): |
|
66 | def issues2dict(issues): | |
67 | """Convert a list of issues to a dict, keyed by issue number.""" |
|
67 | """Convert a list of issues to a dict, keyed by issue number.""" | |
68 | idict = {} |
|
68 | idict = {} | |
69 | for i in issues: |
|
69 | for i in issues: | |
70 | idict[i['number']] = i |
|
70 | idict[i['number']] = i | |
71 | return idict |
|
71 | return idict | |
72 |
|
72 | |||
73 |
|
73 | |||
74 | def is_pull_request(issue): |
|
74 | def is_pull_request(issue): | |
75 | """Return True if the given issue is a pull request.""" |
|
75 | """Return True if the given issue is a pull request.""" | |
76 | return 'pull_request_url' in issue |
|
76 | return 'pull_request_url' in issue | |
77 |
|
77 | |||
78 |
|
78 | |||
79 | def issues_closed_since(period=timedelta(days=365), project="ipython/ipython", pulls=False): |
|
79 | def issues_closed_since(period=timedelta(days=365), project="ipython/ipython", pulls=False): | |
80 | """Get all issues closed since a particular point in time. period |
|
80 | """Get all issues closed since a particular point in time. period | |
81 | can either be a datetime object, or a timedelta object. In the |
|
81 | can either be a datetime object, or a timedelta object. In the | |
82 | latter case, it is used as a time before the present.""" |
|
82 | latter case, it is used as a time before the present.""" | |
83 |
|
83 | |||
84 | which = 'pulls' if pulls else 'issues' |
|
84 | which = 'pulls' if pulls else 'issues' | |
85 |
|
85 | |||
86 | if isinstance(period, timedelta): |
|
86 | if isinstance(period, timedelta): | |
87 | period = datetime.now() - period |
|
87 | period = datetime.now() - period | |
88 | url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, period.strftime(ISO8601), PER_PAGE) |
|
88 | url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, period.strftime(ISO8601), PER_PAGE) | |
89 | allclosed = get_paged_request(url) |
|
89 | allclosed = get_paged_request(url) | |
90 | # allclosed = get_issues(project=project, state='closed', pulls=pulls, since=period) |
|
90 | # allclosed = get_issues(project=project, state='closed', pulls=pulls, since=period) | |
91 | filtered = [i for i in allclosed if _parse_datetime(i['closed_at']) > period] |
|
91 | filtered = [i for i in allclosed if _parse_datetime(i['closed_at']) > period] | |
|
92 | ||||
|
93 | # exclude rejected PRs | |||
|
94 | if pulls: | |||
|
95 | filtered = [ pr for pr in filtered if pr['merged_at'] ] | |||
|
96 | ||||
92 | return filtered |
|
97 | return filtered | |
93 |
|
98 | |||
94 |
|
99 | |||
95 | def sorted_by_field(issues, field='closed_at', reverse=False): |
|
100 | def sorted_by_field(issues, field='closed_at', reverse=False): | |
96 | """Return a list of issues sorted by closing date date.""" |
|
101 | """Return a list of issues sorted by closing date date.""" | |
97 | return sorted(issues, key = lambda i:i[field], reverse=reverse) |
|
102 | return sorted(issues, key = lambda i:i[field], reverse=reverse) | |
98 |
|
103 | |||
99 |
|
104 | |||
100 | def report(issues, show_urls=False): |
|
105 | def report(issues, show_urls=False): | |
101 | """Summary report about a list of issues, printing number and title. |
|
106 | """Summary report about a list of issues, printing number and title. | |
102 | """ |
|
107 | """ | |
103 | # titles may have unicode in them, so we must encode everything below |
|
108 | # titles may have unicode in them, so we must encode everything below | |
104 | if show_urls: |
|
109 | if show_urls: | |
105 | for i in issues: |
|
110 | for i in issues: | |
106 | role = 'ghpull' if 'merged' in i else 'ghissue' |
|
111 | role = 'ghpull' if 'merged_at' in i else 'ghissue' | |
107 | print('* :%s:`%d`: %s' % (role, i['number'], |
|
112 | print('* :%s:`%d`: %s' % (role, i['number'], | |
108 | i['title'].encode('utf-8'))) |
|
113 | i['title'].encode('utf-8'))) | |
109 | else: |
|
114 | else: | |
110 | for i in issues: |
|
115 | for i in issues: | |
111 | print('* %d: %s' % (i['number'], i['title'].encode('utf-8'))) |
|
116 | print('* %d: %s' % (i['number'], i['title'].encode('utf-8'))) | |
112 |
|
117 | |||
113 | #----------------------------------------------------------------------------- |
|
118 | #----------------------------------------------------------------------------- | |
114 | # Main script |
|
119 | # Main script | |
115 | #----------------------------------------------------------------------------- |
|
120 | #----------------------------------------------------------------------------- | |
116 |
|
121 | |||
117 | if __name__ == "__main__": |
|
122 | if __name__ == "__main__": | |
118 | # Whether to add reST urls for all issues in printout. |
|
123 | # Whether to add reST urls for all issues in printout. | |
119 | show_urls = True |
|
124 | show_urls = True | |
120 |
|
125 | |||
121 | # By default, search one month back |
|
126 | # By default, search one month back | |
122 | if len(sys.argv) > 1: |
|
127 | if len(sys.argv) > 1: | |
123 | days = int(sys.argv[1]) |
|
128 | days = int(sys.argv[1]) | |
124 | else: |
|
129 | else: | |
125 | days = 30 |
|
130 | days = 30 | |
126 |
|
131 | |||
127 | # turn off to play interactively without redownloading, use %run -i |
|
132 | # turn off to play interactively without redownloading, use %run -i | |
128 | if 1: |
|
133 | if 1: | |
129 | issues = issues_closed_since(timedelta(days=days), pulls=False) |
|
134 | issues = issues_closed_since(timedelta(days=days), pulls=False) | |
130 | pulls = issues_closed_since(timedelta(days=days), pulls=True) |
|
135 | pulls = issues_closed_since(timedelta(days=days), pulls=True) | |
131 |
|
136 | |||
132 | # For regular reports, it's nice to show them in reverse chronological order |
|
137 | # For regular reports, it's nice to show them in reverse chronological order | |
133 | issues = sorted_by_field(issues, reverse=True) |
|
138 | issues = sorted_by_field(issues, reverse=True) | |
134 | pulls = sorted_by_field(pulls, reverse=True) |
|
139 | pulls = sorted_by_field(pulls, reverse=True) | |
135 |
|
140 | |||
136 | n_issues, n_pulls = map(len, (issues, pulls)) |
|
141 | n_issues, n_pulls = map(len, (issues, pulls)) | |
137 | n_total = n_issues + n_pulls |
|
142 | n_total = n_issues + n_pulls | |
138 |
|
143 | |||
139 | # Print summary report we can directly include into release notes. |
|
144 | # Print summary report we can directly include into release notes. | |
140 | print("GitHub stats for the last %d days." % days) |
|
145 | print("GitHub stats for the last %d days." % days) | |
141 | print("We closed a total of %d issues, %d pull requests and %d regular \n" |
|
146 | print("We closed a total of %d issues, %d pull requests and %d regular \n" | |
142 | "issues; this is the full list (generated with the script \n" |
|
147 | "issues; this is the full list (generated with the script \n" | |
143 | "`tools/github_stats.py`):" % (n_total, n_pulls, n_issues)) |
|
148 | "`tools/github_stats.py`):" % (n_total, n_pulls, n_issues)) | |
144 | print() |
|
149 | print() | |
145 | print('Pull Requests (%d):\n' % n_pulls) |
|
150 | print('Pull Requests (%d):\n' % n_pulls) | |
146 | report(pulls, show_urls) |
|
151 | report(pulls, show_urls) | |
147 | print() |
|
152 | print() | |
148 | print('Issues (%d):\n' % n_issues) |
|
153 | print('Issues (%d):\n' % n_issues) | |
149 | report(issues, show_urls) |
|
154 | report(issues, show_urls) |
General Comments 0
You need to be logged in to leave comments.
Login now