##// END OF EJS Templates
attempt to cache gh_api requests...
MinRK -
Show More
@@ -1,4 +1,4 b''
1 """Functions for Github authorisation."""
1 """Functions for Github API requests."""
2 2 from __future__ import print_function
3 3
4 4 try:
@@ -14,6 +14,13 b' import requests'
14 14 import getpass
15 15 import json
16 16
17 try:
18 import requests_cache
19 except ImportError:
20 print("no cache")
21 else:
22 requests_cache.install_cache("gh_api")
23
17 24 # Keyring stores passwords by a 'username', but we're not storing a username and
18 25 # password
19 26 fake_username = 'ipython_tools'
@@ -13,7 +13,7 b' import sys'
13 13
14 14 from datetime import datetime, timedelta
15 15 from subprocess import check_output
16 from urllib import urlopen
16 from gh_api import get_paged_request, make_auth_header, get_pull_request
17 17
18 18 #-----------------------------------------------------------------------------
19 19 # Globals
@@ -22,38 +22,15 b' from urllib import urlopen'
22 22 ISO8601 = "%Y-%m-%dT%H:%M:%SZ"
23 23 PER_PAGE = 100
24 24
25 element_pat = re.compile(r'<(.+?)>')
26 rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]')
27
28 25 #-----------------------------------------------------------------------------
29 26 # Functions
30 27 #-----------------------------------------------------------------------------
31 28
32 def parse_link_header(headers):
33 link_s = headers.get('link', '')
34 urls = element_pat.findall(link_s)
35 rels = rel_pat.findall(link_s)
36 d = {}
37 for rel,url in zip(rels, urls):
38 d[rel] = url
39 return d
40
41 def get_paged_request(url):
42 """get a full list, handling APIv3's paging"""
43 results = []
44 while url:
45 print("fetching %s" % url, file=sys.stderr)
46 f = urlopen(url)
47 results.extend(json.load(f))
48 links = parse_link_header(f.headers)
49 url = links.get('next')
50 return results
51
52 29 def get_issues(project="ipython/ipython", state="closed", pulls=False):
53 30 """Get a list of the issues from the Github API."""
54 31 which = 'pulls' if pulls else 'issues'
55 32 url = "https://api.github.com/repos/%s/%s?state=%s&per_page=%i" % (project, which, state, PER_PAGE)
56 return get_paged_request(url)
33 return get_paged_request(url, headers=make_auth_header())
57 34
58 35
59 36 def _parse_datetime(s):
@@ -74,28 +51,43 b' def issues2dict(issues):'
74 51
75 52 def is_pull_request(issue):
76 53 """Return True if the given issue is a pull request."""
77 return 'pull_request_url' in issue
54 return bool(issue.get('pull_request', {}).get('html_url', None))
55
56
57 def split_pulls(all_issues, project="ipython/ipython"):
58 """split a list of closed issues into non-PR Issues and Pull Requests"""
59 pulls = []
60 issues = []
61 for i in all_issues:
62 if is_pull_request(i):
63 pull = get_pull_request(project, i['number'], auth=True)
64 pulls.append(pull)
65 else:
66 issues.append(i)
67 return issues, pulls
78 68
79 69
80 def issues_closed_since(period=timedelta(days=365), project="ipython/ipython", pulls=False):
70
71 def issues_closed_since(period=timedelta(days=365), project="ipython/ipython"):
81 72 """Get all issues closed since a particular point in time. period
82 can either be a datetime object, or a timedelta object. In the
83 latter case, it is used as a time before the present."""
73 can either be a datetime object, or a timedelta object. In the
74 latter case, it is used as a time before the present.
75 """
84 76
85 which = 'pulls' if pulls else 'issues'
77 which = 'issues'
86 78
87 79 if isinstance(period, timedelta):
88 period = datetime.now() - period
89 url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, period.strftime(ISO8601), PER_PAGE)
90 allclosed = get_paged_request(url)
91 # allclosed = get_issues(project=project, state='closed', pulls=pulls, since=period)
92 filtered = [i for i in allclosed if _parse_datetime(i['closed_at']) > period]
80 since = datetime.now() - period
81 else:
82 since = period
83 url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, since.strftime(ISO8601), PER_PAGE)
84 allclosed = get_paged_request(url, headers=make_auth_header())
93 85
94 # exclude rejected PRs
95 if pulls:
96 filtered = [ pr for pr in filtered if pr['merged_at'] ]
86 issues, pulls = split_pulls(allclosed, project=project)
87 issues = [i for i in issues if _parse_datetime(i['closed_at']) > since]
88 pulls = [p for p in pulls if p['merged_at'] and _parse_datetime(p['merged_at']) > since]
97 89
98 return filtered
90 return issues, pulls
99 91
100 92
101 93 def sorted_by_field(issues, field='closed_at', reverse=False):
@@ -137,15 +129,14 b' if __name__ == "__main__":'
137 129 if tag:
138 130 cmd = ['git', 'log', '-1', '--format=%ai', tag]
139 131 tagday, tz = check_output(cmd).strip().rsplit(' ', 1)
140 since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S")
132 since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S")# - timedelta(days=30 * 6)
141 133 else:
142 134 since = datetime.now() - timedelta(days=days)
143 135
144 136 print("fetching GitHub stats since %s (tag: %s)" % (since, tag), file=sys.stderr)
145 137 # turn off to play interactively without redownloading, use %run -i
146 138 if 1:
147 issues = issues_closed_since(since, pulls=False)
148 pulls = issues_closed_since(since, pulls=True)
139 issues, pulls = issues_closed_since(since)
149 140
150 141 # For regular reports, it's nice to show them in reverse chronological order
151 142 issues = sorted_by_field(issues, reverse=True)
General Comments 0
You need to be logged in to leave comments. Login now