Show More
@@ -8,25 +8,59 b'' | |||||
8 | from __future__ import print_function |
|
8 | from __future__ import print_function | |
9 |
|
9 | |||
10 | import json |
|
10 | import json | |
|
11 | import re | |||
11 | import sys |
|
12 | import sys | |
12 |
|
13 | |||
13 | from datetime import datetime, timedelta |
|
14 | from datetime import datetime, timedelta | |
14 | from urllib import urlopen |
|
15 | from urllib import urlopen | |
15 |
|
16 | |||
16 | #----------------------------------------------------------------------------- |
|
17 | #----------------------------------------------------------------------------- | |
|
18 | # Globals | |||
|
19 | #----------------------------------------------------------------------------- | |||
|
20 | ||||
|
21 | ISO8601 = "%Y-%m-%dT%H:%M:%SZ" | |||
|
22 | PER_PAGE = 100 | |||
|
23 | ||||
|
24 | element_pat = re.compile(r'<(.+?)>') | |||
|
25 | rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]') | |||
|
26 | ||||
|
27 | #----------------------------------------------------------------------------- | |||
17 | # Functions |
|
28 | # Functions | |
18 | #----------------------------------------------------------------------------- |
|
29 | #----------------------------------------------------------------------------- | |
19 |
|
30 | |||
20 | def get_issues(project="ipython/ipython/", state="open"): |
|
31 | def parse_link_header(headers): | |
|
32 | link_s = headers.get('link', '') | |||
|
33 | urls = element_pat.findall(link_s) | |||
|
34 | rels = rel_pat.findall(link_s) | |||
|
35 | d = {} | |||
|
36 | for rel,url in zip(rels, urls): | |||
|
37 | d[rel] = url | |||
|
38 | return d | |||
|
39 | ||||
|
40 | def get_paged_request(url): | |||
|
41 | """get a full list, handling APIv3's paging""" | |||
|
42 | results = [] | |||
|
43 | while url: | |||
|
44 | print("fetching %s" % url, file=sys.stderr) | |||
|
45 | f = urlopen(url) | |||
|
46 | results.extend(json.load(f)) | |||
|
47 | links = parse_link_header(f.headers) | |||
|
48 | url = links.get('next') | |||
|
49 | return results | |||
|
50 | ||||
|
51 | def get_issues(project="ipython/ipython", state="closed", pulls=False): | |||
21 | """Get a list of the issues from the Github API.""" |
|
52 | """Get a list of the issues from the Github API.""" | |
22 | f = urlopen("http://github.com/api/v2/json/issues/list/%s%s" % (project, |
|
53 | which = 'pulls' if pulls else 'issues' | |
23 | state)) |
|
54 | url = "https://api.github.com/repos/%s/%s?state=%s&per_page=%i" % (project, which, state, PER_PAGE) | |
24 | return json.load(f)['issues'] |
|
55 | return get_paged_request(url) | |
25 |
|
56 | |||
26 |
|
57 | |||
27 | def _parse_datetime(s): |
|
58 | def _parse_datetime(s): | |
28 | """Parse dates in the format returned by the Github API.""" |
|
59 | """Parse dates in the format returned by the Github API.""" | |
29 | return datetime.strptime(s.rpartition(" ")[0], "%Y/%m/%d %H:%M:%S") |
|
60 | if s: | |
|
61 | return datetime.strptime(s, ISO8601) | |||
|
62 | else: | |||
|
63 | return datetime.fromtimestamp(0) | |||
30 |
|
64 | |||
31 |
|
65 | |||
32 | def issues2dict(issues): |
|
66 | def issues2dict(issues): | |
@@ -42,14 +76,20 b' def is_pull_request(issue):' | |||||
42 | return 'pull_request_url' in issue |
|
76 | return 'pull_request_url' in issue | |
43 |
|
77 | |||
44 |
|
78 | |||
45 |
def issues_closed_since(period=timedelta(days=365), project="ipython/ipython |
|
79 | def issues_closed_since(period=timedelta(days=365), project="ipython/ipython", pulls=False): | |
46 | """Get all issues closed since a particular point in time. period |
|
80 | """Get all issues closed since a particular point in time. period | |
47 | can either be a datetime object, or a timedelta object. In the |
|
81 | can either be a datetime object, or a timedelta object. In the | |
48 | latter case, it is used as a time before the present.""" |
|
82 | latter case, it is used as a time before the present.""" | |
49 | allclosed = get_issues(project=project, state='closed') |
|
83 | ||
|
84 | which = 'pulls' if pulls else 'issues' | |||
|
85 | ||||
50 | if isinstance(period, timedelta): |
|
86 | if isinstance(period, timedelta): | |
51 | period = datetime.now() - period |
|
87 | period = datetime.now() - period | |
52 | return [i for i in allclosed if _parse_datetime(i['closed_at']) > period] |
|
88 | url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, period.strftime(ISO8601), PER_PAGE) | |
|
89 | allclosed = get_paged_request(url) | |||
|
90 | # allclosed = get_issues(project=project, state='closed', pulls=pulls, since=period) | |||
|
91 | filtered = [i for i in allclosed if _parse_datetime(i['closed_at']) > period] | |||
|
92 | return filtered | |||
53 |
|
93 | |||
54 |
|
94 | |||
55 | def sorted_by_field(issues, field='closed_at', reverse=False): |
|
95 | def sorted_by_field(issues, field='closed_at', reverse=False): | |
@@ -63,8 +103,8 b' def report(issues, show_urls=False):' | |||||
63 | # titles may have unicode in them, so we must encode everything below |
|
103 | # titles may have unicode in them, so we must encode everything below | |
64 | if show_urls: |
|
104 | if show_urls: | |
65 | for i in issues: |
|
105 | for i in issues: | |
66 | print('* `%d <%s>`_: %s' % (i['number'], |
|
106 | role = 'ghpull' if 'merged' in i else 'ghissue' | |
67 | i['html_url'].encode('utf-8'), |
|
107 | print('* :%s:`%d`: %s' % (role, i['number'], | |
68 | i['title'].encode('utf-8'))) |
|
108 | i['title'].encode('utf-8'))) | |
69 | else: |
|
109 | else: | |
70 | for i in issues: |
|
110 | for i in issues: | |
@@ -86,24 +126,24 b' if __name__ == "__main__":' | |||||
86 |
|
126 | |||
87 | # turn off to play interactively without redownloading, use %run -i |
|
127 | # turn off to play interactively without redownloading, use %run -i | |
88 | if 1: |
|
128 | if 1: | |
89 | issues = issues_closed_since(timedelta(days=days)) |
|
129 | issues = issues_closed_since(timedelta(days=days), pulls=False) | |
|
130 | pulls = issues_closed_since(timedelta(days=days), pulls=True) | |||
90 |
|
131 | |||
91 | # For regular reports, it's nice to show them in reverse chronological order |
|
132 | # For regular reports, it's nice to show them in reverse chronological order | |
92 | issues = sorted_by_field(issues, reverse=True) |
|
133 | issues = sorted_by_field(issues, reverse=True) | |
|
134 | pulls = sorted_by_field(pulls, reverse=True) | |||
93 |
|
135 | |||
94 | # Break up into pull requests and regular issues |
|
136 | n_issues, n_pulls = map(len, (issues, pulls)) | |
95 | pulls = filter(is_pull_request, issues) |
|
137 | n_total = n_issues + n_pulls | |
96 | regular = filter(lambda i: not is_pull_request(i), issues) |
|
|||
97 | n_issues, n_pulls, n_regular = map(len, (issues, pulls, regular)) |
|
|||
98 |
|
138 | |||
99 | # Print summary report we can directly include into release notes. |
|
139 | # Print summary report we can directly include into release notes. | |
100 |
print("Git |
|
140 | print("GitHub stats for the last %d days." % days) | |
101 | print("We closed a total of %d issues, %d pull requests and %d regular \n" |
|
141 | print("We closed a total of %d issues, %d pull requests and %d regular \n" | |
102 | "issues; this is the full list (generated with the script \n" |
|
142 | "issues; this is the full list (generated with the script \n" | |
103 |
"`tools/github_stats.py`):" % (n_ |
|
143 | "`tools/github_stats.py`):" % (n_total, n_pulls, n_issues)) | |
104 | print() |
|
144 | print() | |
105 |
print('Pull |
|
145 | print('Pull Requests (%d):\n' % n_pulls) | |
106 | report(pulls, show_urls) |
|
146 | report(pulls, show_urls) | |
107 | print() |
|
147 | print() | |
108 |
print(' |
|
148 | print('Issues (%d):\n' % n_issues) | |
109 |
report( |
|
149 | report(issues, show_urls) |
General Comments 0
You need to be logged in to leave comments.
Login now