Show More
@@ -1,168 +1,161 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.feed |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Feed controller for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 23, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | import logging |
|
30 | 30 | |
|
31 |
from beaker.cache import cache_region |
|
|
31 | from beaker.cache import cache_region | |
|
32 | 32 | from tg import response |
|
33 | 33 | from tg import tmpl_context as c |
|
34 | 34 | from tg.i18n import ugettext as _ |
|
35 | 35 | from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed |
|
36 | 36 | |
|
37 | 37 | from kallithea import CONFIG |
|
38 | 38 | from kallithea.lib import helpers as h |
|
39 | 39 | from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired |
|
40 | 40 | from kallithea.lib.base import BaseRepoController |
|
41 | 41 | from kallithea.lib.diffs import DiffProcessor |
|
42 | 42 | from kallithea.lib.utils2 import safe_int, safe_unicode, str2bool |
|
43 | from kallithea.model.db import CacheInvalidation | |
|
44 | 43 | |
|
45 | 44 | |
|
46 | 45 | log = logging.getLogger(__name__) |
|
47 | 46 | |
|
48 | 47 | |
|
49 | 48 | language = 'en-us' |
|
50 | 49 | ttl = "5" |
|
51 | 50 | |
|
52 | 51 | |
|
53 | 52 | class FeedController(BaseRepoController): |
|
54 | 53 | |
|
55 | 54 | @LoginRequired(allow_default_user=True) |
|
56 | 55 | @HasRepoPermissionLevelDecorator('read') |
|
57 | 56 | def _before(self, *args, **kwargs): |
|
58 | 57 | super(FeedController, self)._before(*args, **kwargs) |
|
59 | 58 | |
|
60 | 59 | def _get_title(self, cs): |
|
61 | 60 | return h.shorter(cs.message, 160) |
|
62 | 61 | |
|
63 | 62 | def __get_desc(self, cs): |
|
64 | 63 | desc_msg = [(_('%s committed on %s') |
|
65 | 64 | % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>'] |
|
66 | 65 | # branches, tags, bookmarks |
|
67 | 66 | for branch in cs.branches: |
|
68 | 67 | desc_msg.append('branch: %s<br/>' % branch) |
|
69 | 68 | for book in cs.bookmarks: |
|
70 | 69 | desc_msg.append('bookmark: %s<br/>' % book) |
|
71 | 70 | for tag in cs.tags: |
|
72 | 71 | desc_msg.append('tag: %s<br/>' % tag) |
|
73 | 72 | |
|
74 | 73 | changes = [] |
|
75 | 74 | diff_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024)) |
|
76 | 75 | raw_diff = cs.diff() |
|
77 | 76 | diff_processor = DiffProcessor(raw_diff, |
|
78 | 77 | diff_limit=diff_limit, |
|
79 | 78 | inline_diff=False) |
|
80 | 79 | |
|
81 | 80 | for st in diff_processor.parsed: |
|
82 | 81 | st.update({'added': st['stats']['added'], |
|
83 | 82 | 'removed': st['stats']['deleted']}) |
|
84 | 83 | changes.append('\n %(operation)s %(filename)s ' |
|
85 | 84 | '(%(added)s lines added, %(removed)s lines removed)' |
|
86 | 85 | % st) |
|
87 | 86 | if diff_processor.limited_diff: |
|
88 | 87 | changes = changes + ['\n ' + |
|
89 | 88 | _('Changeset was too big and was cut off...')] |
|
90 | 89 | |
|
91 | 90 | # rev link |
|
92 | 91 | _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name, |
|
93 | 92 | revision=cs.raw_id) |
|
94 | 93 | desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8])) |
|
95 | 94 | |
|
96 | 95 | desc_msg.append('<pre>') |
|
97 | 96 | desc_msg.append(h.urlify_text(cs.message)) |
|
98 | 97 | desc_msg.append('\n') |
|
99 | 98 | desc_msg.extend(changes) |
|
100 | 99 | if str2bool(CONFIG.get('rss_include_diff', False)): |
|
101 | 100 | desc_msg.append('\n\n') |
|
102 | 101 | desc_msg.append(raw_diff) |
|
103 | 102 | desc_msg.append('</pre>') |
|
104 | 103 | return map(safe_unicode, desc_msg) |
|
105 | 104 | |
|
106 | 105 | def atom(self, repo_name): |
|
107 | 106 | """Produce an atom-1.0 feed via feedgenerator module""" |
|
108 | 107 | |
|
109 | 108 | @cache_region('long_term', '_get_feed_from_cache') |
|
110 | 109 | def _get_feed_from_cache(*_cache_keys): # parameters are not really used - only as caching key |
|
111 | 110 | feed = Atom1Feed( |
|
112 | 111 | title=_('%s %s feed') % (c.site_name, repo_name), |
|
113 | 112 | link=h.canonical_url('summary_home', repo_name=repo_name), |
|
114 | 113 | description=_('Changes on %s repository') % repo_name, |
|
115 | 114 | language=language, |
|
116 | 115 | ttl=ttl |
|
117 | 116 | ) |
|
118 | 117 | |
|
119 | 118 | rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20)) |
|
120 | 119 | for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])): |
|
121 | 120 | feed.add_item(title=self._get_title(cs), |
|
122 | 121 | link=h.canonical_url('changeset_home', repo_name=repo_name, |
|
123 | 122 | revision=cs.raw_id), |
|
124 | 123 | author_name=cs.author, |
|
125 | 124 | description=''.join(self.__get_desc(cs)), |
|
126 | 125 | pubdate=cs.date, |
|
127 | 126 | ) |
|
128 | 127 | |
|
129 | 128 | response.content_type = feed.mime_type |
|
130 | 129 | return feed.writeString('utf-8') |
|
131 | 130 | |
|
132 | 131 | kind = 'ATOM' |
|
133 | valid = CacheInvalidation.test_and_set_valid(repo_name, kind) | |
|
134 | if not valid: | |
|
135 | region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind) | |
|
136 | return _get_feed_from_cache(repo_name, kind) | |
|
132 | return _get_feed_from_cache(repo_name, kind, c.db_repo.changeset_cache.get('raw_id')) | |
|
137 | 133 | |
|
138 | 134 | def rss(self, repo_name): |
|
139 | 135 | """Produce an rss2 feed via feedgenerator module""" |
|
140 | 136 | |
|
141 | 137 | @cache_region('long_term', '_get_feed_from_cache') |
|
142 | 138 | def _get_feed_from_cache(*_cache_keys): # parameters are not really used - only as caching key |
|
143 | 139 | feed = Rss201rev2Feed( |
|
144 | 140 | title=_('%s %s feed') % (c.site_name, repo_name), |
|
145 | 141 | link=h.canonical_url('summary_home', repo_name=repo_name), |
|
146 | 142 | description=_('Changes on %s repository') % repo_name, |
|
147 | 143 | language=language, |
|
148 | 144 | ttl=ttl |
|
149 | 145 | ) |
|
150 | 146 | |
|
151 | 147 | rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20)) |
|
152 | 148 | for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])): |
|
153 | 149 | feed.add_item(title=self._get_title(cs), |
|
154 | 150 | link=h.canonical_url('changeset_home', repo_name=repo_name, |
|
155 | 151 | revision=cs.raw_id), |
|
156 | 152 | author_name=cs.author, |
|
157 | 153 | description=''.join(self.__get_desc(cs)), |
|
158 | 154 | pubdate=cs.date, |
|
159 | 155 | ) |
|
160 | 156 | |
|
161 | 157 | response.content_type = feed.mime_type |
|
162 | 158 | return feed.writeString('utf-8') |
|
163 | 159 | |
|
164 | 160 | kind = 'RSS' |
|
165 | valid = CacheInvalidation.test_and_set_valid(repo_name, kind) | |
|
166 | if not valid: | |
|
167 | region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind) | |
|
168 | return _get_feed_from_cache(repo_name, kind) | |
|
161 | return _get_feed_from_cache(repo_name, kind, c.db_repo.changeset_cache.get('raw_id')) |
@@ -1,219 +1,216 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.summary |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Summary controller for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 18, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import calendar |
|
29 | 29 | import itertools |
|
30 | 30 | import logging |
|
31 | 31 | import traceback |
|
32 | 32 | from datetime import date, timedelta |
|
33 | 33 | from time import mktime |
|
34 | 34 | |
|
35 |
from beaker.cache import cache_region |
|
|
35 | from beaker.cache import cache_region | |
|
36 | 36 | from tg import request |
|
37 | 37 | from tg import tmpl_context as c |
|
38 | 38 | from tg.i18n import ugettext as _ |
|
39 | 39 | from webob.exc import HTTPBadRequest |
|
40 | 40 | |
|
41 | 41 | from kallithea.config.conf import ALL_EXTS, ALL_READMES, LANGUAGES_EXTENSIONS_MAP |
|
42 | 42 | from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired |
|
43 | 43 | from kallithea.lib.base import BaseRepoController, jsonify, render |
|
44 | 44 | from kallithea.lib.celerylib.tasks import get_commits_stats |
|
45 | 45 | from kallithea.lib.compat import json |
|
46 | 46 | from kallithea.lib.markup_renderer import MarkupRenderer |
|
47 | 47 | from kallithea.lib.page import RepoPage |
|
48 | 48 | from kallithea.lib.utils2 import safe_int |
|
49 | 49 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
50 | 50 | from kallithea.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, NodeDoesNotExistError |
|
51 | 51 | from kallithea.lib.vcs.nodes import FileNode |
|
52 |
from kallithea.model.db import |
|
|
52 | from kallithea.model.db import Statistics | |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | README_FILES = [''.join([x[0][0], x[1][0]]) for x in |
|
58 | 58 | sorted(list(itertools.product(ALL_READMES, ALL_EXTS)), |
|
59 | 59 | key=lambda y:y[0][1] + y[1][1])] |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | class SummaryController(BaseRepoController): |
|
63 | 63 | |
|
64 | 64 | def __get_readme_data(self, db_repo): |
|
65 | 65 | repo_name = db_repo.repo_name |
|
66 | 66 | log.debug('Looking for README file') |
|
67 | 67 | |
|
68 | 68 | @cache_region('long_term', '_get_readme_from_cache') |
|
69 | 69 | def _get_readme_from_cache(*_cache_keys): # parameters are not really used - only as caching key |
|
70 | 70 | readme_data = None |
|
71 | 71 | readme_file = None |
|
72 | 72 | try: |
|
73 | 73 | # gets the landing revision! or tip if fails |
|
74 | 74 | cs = db_repo.get_landing_changeset() |
|
75 | 75 | if isinstance(cs, EmptyChangeset): |
|
76 | 76 | raise EmptyRepositoryError() |
|
77 | 77 | renderer = MarkupRenderer() |
|
78 | 78 | for f in README_FILES: |
|
79 | 79 | try: |
|
80 | 80 | readme = cs.get_node(f) |
|
81 | 81 | if not isinstance(readme, FileNode): |
|
82 | 82 | continue |
|
83 | 83 | readme_file = f |
|
84 | 84 | log.debug('Found README file `%s` rendering...', |
|
85 | 85 | readme_file) |
|
86 | 86 | readme_data = renderer.render(readme.content, |
|
87 | 87 | filename=f) |
|
88 | 88 | break |
|
89 | 89 | except NodeDoesNotExistError: |
|
90 | 90 | continue |
|
91 | 91 | except ChangesetError: |
|
92 | 92 | log.error(traceback.format_exc()) |
|
93 | 93 | pass |
|
94 | 94 | except EmptyRepositoryError: |
|
95 | 95 | pass |
|
96 | 96 | |
|
97 | 97 | return readme_data, readme_file |
|
98 | 98 | |
|
99 | 99 | kind = 'README' |
|
100 | valid = CacheInvalidation.test_and_set_valid(repo_name, kind) | |
|
101 | if not valid: | |
|
102 | region_invalidate(_get_readme_from_cache, None, '_get_readme_from_cache', repo_name, kind) | |
|
103 | return _get_readme_from_cache(repo_name, kind) | |
|
100 | return _get_readme_from_cache(repo_name, kind, c.db_repo.changeset_cache.get('raw_id')) | |
|
104 | 101 | |
|
105 | 102 | @LoginRequired(allow_default_user=True) |
|
106 | 103 | @HasRepoPermissionLevelDecorator('read') |
|
107 | 104 | def index(self, repo_name): |
|
108 | 105 | p = safe_int(request.GET.get('page'), 1) |
|
109 | 106 | size = safe_int(request.GET.get('size'), 10) |
|
110 | 107 | collection = c.db_repo_scm_instance |
|
111 | 108 | c.cs_pagination = RepoPage(collection, page=p, items_per_page=size) |
|
112 | 109 | page_revisions = [x.raw_id for x in list(c.cs_pagination)] |
|
113 | 110 | c.cs_comments = c.db_repo.get_comments(page_revisions) |
|
114 | 111 | c.cs_statuses = c.db_repo.statuses(page_revisions) |
|
115 | 112 | |
|
116 | 113 | c.ssh_repo_url = None |
|
117 | 114 | if request.authuser.is_default_user: |
|
118 | 115 | username = None |
|
119 | 116 | else: |
|
120 | 117 | username = request.authuser.username |
|
121 | 118 | if c.ssh_enabled: |
|
122 | 119 | c.ssh_repo_url = c.db_repo.clone_url(clone_uri_tmpl=c.clone_ssh_tmpl) |
|
123 | 120 | |
|
124 | 121 | c.clone_repo_url = c.db_repo.clone_url(clone_uri_tmpl=c.clone_uri_tmpl, with_id=False, username=username) |
|
125 | 122 | c.clone_repo_url_id = c.db_repo.clone_url(clone_uri_tmpl=c.clone_uri_tmpl, with_id=True, username=username) |
|
126 | 123 | |
|
127 | 124 | if c.db_repo.enable_statistics: |
|
128 | 125 | c.show_stats = True |
|
129 | 126 | else: |
|
130 | 127 | c.show_stats = False |
|
131 | 128 | |
|
132 | 129 | stats = Statistics.query() \ |
|
133 | 130 | .filter(Statistics.repository == c.db_repo) \ |
|
134 | 131 | .scalar() |
|
135 | 132 | |
|
136 | 133 | c.stats_percentage = 0 |
|
137 | 134 | |
|
138 | 135 | if stats and stats.languages: |
|
139 | 136 | c.no_data = False is c.db_repo.enable_statistics |
|
140 | 137 | lang_stats_d = json.loads(stats.languages) |
|
141 | 138 | |
|
142 | 139 | lang_stats = [(x, {"count": y, |
|
143 | 140 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x, '?')}) |
|
144 | 141 | for x, y in lang_stats_d.items()] |
|
145 | 142 | lang_stats.sort(key=lambda k: (-k[1]['count'], k[0])) |
|
146 | 143 | |
|
147 | 144 | c.trending_languages = lang_stats[:10] |
|
148 | 145 | else: |
|
149 | 146 | c.no_data = True |
|
150 | 147 | c.trending_languages = [] |
|
151 | 148 | |
|
152 | 149 | c.enable_downloads = c.db_repo.enable_downloads |
|
153 | 150 | c.readme_data, c.readme_file = \ |
|
154 | 151 | self.__get_readme_data(c.db_repo) |
|
155 | 152 | return render('summary/summary.html') |
|
156 | 153 | |
|
157 | 154 | @LoginRequired() |
|
158 | 155 | @HasRepoPermissionLevelDecorator('read') |
|
159 | 156 | @jsonify |
|
160 | 157 | def repo_size(self, repo_name): |
|
161 | 158 | if request.is_xhr: |
|
162 | 159 | return c.db_repo._repo_size() |
|
163 | 160 | else: |
|
164 | 161 | raise HTTPBadRequest() |
|
165 | 162 | |
|
166 | 163 | @LoginRequired(allow_default_user=True) |
|
167 | 164 | @HasRepoPermissionLevelDecorator('read') |
|
168 | 165 | def statistics(self, repo_name): |
|
169 | 166 | if c.db_repo.enable_statistics: |
|
170 | 167 | c.show_stats = True |
|
171 | 168 | c.no_data_msg = _('No data ready yet') |
|
172 | 169 | else: |
|
173 | 170 | c.show_stats = False |
|
174 | 171 | c.no_data_msg = _('Statistics are disabled for this repository') |
|
175 | 172 | |
|
176 | 173 | td = date.today() + timedelta(days=1) |
|
177 | 174 | td_1m = td - timedelta(days=calendar.mdays[td.month]) |
|
178 | 175 | td_1y = td - timedelta(days=365) |
|
179 | 176 | |
|
180 | 177 | ts_min_m = mktime(td_1m.timetuple()) |
|
181 | 178 | ts_min_y = mktime(td_1y.timetuple()) |
|
182 | 179 | ts_max_y = mktime(td.timetuple()) |
|
183 | 180 | c.ts_min = ts_min_m |
|
184 | 181 | c.ts_max = ts_max_y |
|
185 | 182 | |
|
186 | 183 | stats = Statistics.query() \ |
|
187 | 184 | .filter(Statistics.repository == c.db_repo) \ |
|
188 | 185 | .scalar() |
|
189 | 186 | c.stats_percentage = 0 |
|
190 | 187 | if stats and stats.languages: |
|
191 | 188 | c.no_data = False is c.db_repo.enable_statistics |
|
192 | 189 | lang_stats_d = json.loads(stats.languages) |
|
193 | 190 | c.commit_data = json.loads(stats.commit_activity) |
|
194 | 191 | c.overview_data = json.loads(stats.commit_activity_combined) |
|
195 | 192 | |
|
196 | 193 | lang_stats = ((x, {"count": y, |
|
197 | 194 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x)}) |
|
198 | 195 | for x, y in lang_stats_d.items()) |
|
199 | 196 | |
|
200 | 197 | c.trending_languages = ( |
|
201 | 198 | sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10] |
|
202 | 199 | ) |
|
203 | 200 | last_rev = stats.stat_on_revision + 1 |
|
204 | 201 | c.repo_last_rev = c.db_repo_scm_instance.count() \ |
|
205 | 202 | if c.db_repo_scm_instance.revisions else 0 |
|
206 | 203 | if last_rev == 0 or c.repo_last_rev == 0: |
|
207 | 204 | pass |
|
208 | 205 | else: |
|
209 | 206 | c.stats_percentage = '%.2f' % ((float((last_rev)) / |
|
210 | 207 | c.repo_last_rev) * 100) |
|
211 | 208 | else: |
|
212 | 209 | c.commit_data = {} |
|
213 | 210 | c.overview_data = ([[ts_min_y, 0], [ts_max_y, 10]]) |
|
214 | 211 | c.trending_languages = {} |
|
215 | 212 | c.no_data = True |
|
216 | 213 | |
|
217 | 214 | recurse_limit = 500 # don't recurse more than 500 times when parsing |
|
218 | 215 | get_commits_stats(c.db_repo.repo_name, ts_min_y, ts_max_y, recurse_limit) |
|
219 | 216 | return render('summary/statistics.html') |
General Comments 0
You need to be logged in to leave comments.
Login now