Show More
@@ -1,149 +1,154 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | """Simple tools to query github.com and gather stats about issues. |
|
3 | 3 | """ |
|
4 | 4 | #----------------------------------------------------------------------------- |
|
5 | 5 | # Imports |
|
6 | 6 | #----------------------------------------------------------------------------- |
|
7 | 7 | |
|
8 | 8 | from __future__ import print_function |
|
9 | 9 | |
|
10 | 10 | import json |
|
11 | 11 | import re |
|
12 | 12 | import sys |
|
13 | 13 | |
|
14 | 14 | from datetime import datetime, timedelta |
|
15 | 15 | from urllib import urlopen |
|
16 | 16 | |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | 18 | # Globals |
|
19 | 19 | #----------------------------------------------------------------------------- |
|
20 | 20 | |
|
21 | 21 | ISO8601 = "%Y-%m-%dT%H:%M:%SZ" |
|
22 | 22 | PER_PAGE = 100 |
|
23 | 23 | |
|
24 | 24 | element_pat = re.compile(r'<(.+?)>') |
|
25 | 25 | rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]') |
|
26 | 26 | |
|
27 | 27 | #----------------------------------------------------------------------------- |
|
28 | 28 | # Functions |
|
29 | 29 | #----------------------------------------------------------------------------- |
|
30 | 30 | |
|
31 | 31 | def parse_link_header(headers): |
|
32 | 32 | link_s = headers.get('link', '') |
|
33 | 33 | urls = element_pat.findall(link_s) |
|
34 | 34 | rels = rel_pat.findall(link_s) |
|
35 | 35 | d = {} |
|
36 | 36 | for rel,url in zip(rels, urls): |
|
37 | 37 | d[rel] = url |
|
38 | 38 | return d |
|
39 | 39 | |
|
40 | 40 | def get_paged_request(url): |
|
41 | 41 | """get a full list, handling APIv3's paging""" |
|
42 | 42 | results = [] |
|
43 | 43 | while url: |
|
44 | 44 | print("fetching %s" % url, file=sys.stderr) |
|
45 | 45 | f = urlopen(url) |
|
46 | 46 | results.extend(json.load(f)) |
|
47 | 47 | links = parse_link_header(f.headers) |
|
48 | 48 | url = links.get('next') |
|
49 | 49 | return results |
|
50 | 50 | |
|
51 | 51 | def get_issues(project="ipython/ipython", state="closed", pulls=False): |
|
52 | 52 | """Get a list of the issues from the Github API.""" |
|
53 | 53 | which = 'pulls' if pulls else 'issues' |
|
54 | 54 | url = "https://api.github.com/repos/%s/%s?state=%s&per_page=%i" % (project, which, state, PER_PAGE) |
|
55 | 55 | return get_paged_request(url) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | def _parse_datetime(s): |
|
59 | 59 | """Parse dates in the format returned by the Github API.""" |
|
60 | 60 | if s: |
|
61 | 61 | return datetime.strptime(s, ISO8601) |
|
62 | 62 | else: |
|
63 | 63 | return datetime.fromtimestamp(0) |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | def issues2dict(issues): |
|
67 | 67 | """Convert a list of issues to a dict, keyed by issue number.""" |
|
68 | 68 | idict = {} |
|
69 | 69 | for i in issues: |
|
70 | 70 | idict[i['number']] = i |
|
71 | 71 | return idict |
|
72 | 72 | |
|
73 | 73 | |
|
74 | 74 | def is_pull_request(issue): |
|
75 | 75 | """Return True if the given issue is a pull request.""" |
|
76 | 76 | return 'pull_request_url' in issue |
|
77 | 77 | |
|
78 | 78 | |
|
79 | 79 | def issues_closed_since(period=timedelta(days=365), project="ipython/ipython", pulls=False): |
|
80 | 80 | """Get all issues closed since a particular point in time. period |
|
81 | 81 | can either be a datetime object, or a timedelta object. In the |
|
82 | 82 | latter case, it is used as a time before the present.""" |
|
83 | 83 | |
|
84 | 84 | which = 'pulls' if pulls else 'issues' |
|
85 | 85 | |
|
86 | 86 | if isinstance(period, timedelta): |
|
87 | 87 | period = datetime.now() - period |
|
88 | 88 | url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, period.strftime(ISO8601), PER_PAGE) |
|
89 | 89 | allclosed = get_paged_request(url) |
|
90 | 90 | # allclosed = get_issues(project=project, state='closed', pulls=pulls, since=period) |
|
91 | 91 | filtered = [i for i in allclosed if _parse_datetime(i['closed_at']) > period] |
|
92 | ||
|
93 | # exclude rejected PRs | |
|
94 | if pulls: | |
|
95 | filtered = [ pr for pr in filtered if pr['merged_at'] ] | |
|
96 | ||
|
92 | 97 | return filtered |
|
93 | 98 | |
|
94 | 99 | |
|
95 | 100 | def sorted_by_field(issues, field='closed_at', reverse=False): |
|
96 | 101 | """Return a list of issues sorted by closing date date.""" |
|
97 | 102 | return sorted(issues, key = lambda i:i[field], reverse=reverse) |
|
98 | 103 | |
|
99 | 104 | |
|
100 | 105 | def report(issues, show_urls=False): |
|
101 | 106 | """Summary report about a list of issues, printing number and title. |
|
102 | 107 | """ |
|
103 | 108 | # titles may have unicode in them, so we must encode everything below |
|
104 | 109 | if show_urls: |
|
105 | 110 | for i in issues: |
|
106 | role = 'ghpull' if 'merged' in i else 'ghissue' | |
|
111 | role = 'ghpull' if 'merged_at' in i else 'ghissue' | |
|
107 | 112 | print('* :%s:`%d`: %s' % (role, i['number'], |
|
108 | 113 | i['title'].encode('utf-8'))) |
|
109 | 114 | else: |
|
110 | 115 | for i in issues: |
|
111 | 116 | print('* %d: %s' % (i['number'], i['title'].encode('utf-8'))) |
|
112 | 117 | |
|
113 | 118 | #----------------------------------------------------------------------------- |
|
114 | 119 | # Main script |
|
115 | 120 | #----------------------------------------------------------------------------- |
|
116 | 121 | |
|
117 | 122 | if __name__ == "__main__": |
|
118 | 123 | # Whether to add reST urls for all issues in printout. |
|
119 | 124 | show_urls = True |
|
120 | 125 | |
|
121 | 126 | # By default, search one month back |
|
122 | 127 | if len(sys.argv) > 1: |
|
123 | 128 | days = int(sys.argv[1]) |
|
124 | 129 | else: |
|
125 | 130 | days = 30 |
|
126 | 131 | |
|
127 | 132 | # turn off to play interactively without redownloading, use %run -i |
|
128 | 133 | if 1: |
|
129 | 134 | issues = issues_closed_since(timedelta(days=days), pulls=False) |
|
130 | 135 | pulls = issues_closed_since(timedelta(days=days), pulls=True) |
|
131 | 136 | |
|
132 | 137 | # For regular reports, it's nice to show them in reverse chronological order |
|
133 | 138 | issues = sorted_by_field(issues, reverse=True) |
|
134 | 139 | pulls = sorted_by_field(pulls, reverse=True) |
|
135 | 140 | |
|
136 | 141 | n_issues, n_pulls = map(len, (issues, pulls)) |
|
137 | 142 | n_total = n_issues + n_pulls |
|
138 | 143 | |
|
139 | 144 | # Print summary report we can directly include into release notes. |
|
140 | 145 | print("GitHub stats for the last %d days." % days) |
|
141 | 146 | print("We closed a total of %d issues, %d pull requests and %d regular \n" |
|
142 | 147 | "issues; this is the full list (generated with the script \n" |
|
143 | 148 | "`tools/github_stats.py`):" % (n_total, n_pulls, n_issues)) |
|
144 | 149 | print() |
|
145 | 150 | print('Pull Requests (%d):\n' % n_pulls) |
|
146 | 151 | report(pulls, show_urls) |
|
147 | 152 | print() |
|
148 | 153 | print('Issues (%d):\n' % n_issues) |
|
149 | 154 | report(issues, show_urls) |
General Comments 0
You need to be logged in to leave comments.
Login now