Show More
The requested changes are too big and content was truncated. Show full diff
@@ -0,0 +1,368 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2011-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import logging | |
|
22 | import string | |
|
23 | ||
|
24 | from pyramid.view import view_config | |
|
25 | ||
|
26 | from beaker.cache import cache_region | |
|
27 | ||
|
28 | ||
|
29 | from rhodecode.controllers import utils | |
|
30 | ||
|
31 | from rhodecode.apps._base import RepoAppView | |
|
32 | from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP) | |
|
33 | from rhodecode.lib import caches, helpers as h | |
|
34 | from rhodecode.lib.helpers import RepoPage | |
|
35 | from rhodecode.lib.utils2 import safe_str, safe_int | |
|
36 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator | |
|
37 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links | |
|
38 | from rhodecode.lib.ext_json import json | |
|
39 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
|
40 | from rhodecode.lib.vcs.exceptions import CommitError, EmptyRepositoryError | |
|
41 | from rhodecode.model.db import Statistics, CacheKey, User | |
|
42 | from rhodecode.model.meta import Session | |
|
43 | from rhodecode.model.repo import ReadmeFinder | |
|
44 | from rhodecode.model.scm import ScmModel | |
|
45 | ||
|
46 | log = logging.getLogger(__name__) | |
|
47 | ||
|
48 | ||
|
49 | class RepoSummaryView(RepoAppView): | |
|
50 | ||
|
51 | def load_default_context(self): | |
|
52 | c = self._get_local_tmpl_context(include_app_defaults=True) | |
|
53 | ||
|
54 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
|
55 | c.repo_info = self.db_repo | |
|
56 | c.rhodecode_repo = None | |
|
57 | if not c.repository_requirements_missing: | |
|
58 | c.rhodecode_repo = self.rhodecode_vcs_repo | |
|
59 | ||
|
60 | self._register_global_c(c) | |
|
61 | return c | |
|
62 | ||
|
63 | def _get_readme_data(self, db_repo, default_renderer): | |
|
64 | repo_name = db_repo.repo_name | |
|
65 | log.debug('Looking for README file') | |
|
66 | ||
|
67 | @cache_region('long_term') | |
|
68 | def _generate_readme(cache_key): | |
|
69 | readme_data = None | |
|
70 | readme_node = None | |
|
71 | readme_filename = None | |
|
72 | commit = self._get_landing_commit_or_none(db_repo) | |
|
73 | if commit: | |
|
74 | log.debug("Searching for a README file.") | |
|
75 | readme_node = ReadmeFinder(default_renderer).search(commit) | |
|
76 | if readme_node: | |
|
77 | relative_url = h.url('files_raw_home', | |
|
78 | repo_name=repo_name, | |
|
79 | revision=commit.raw_id, | |
|
80 | f_path=readme_node.path) | |
|
81 | readme_data = self._render_readme_or_none( | |
|
82 | commit, readme_node, relative_url) | |
|
83 | readme_filename = readme_node.path | |
|
84 | return readme_data, readme_filename | |
|
85 | ||
|
86 | invalidator_context = CacheKey.repo_context_cache( | |
|
87 | _generate_readme, repo_name, CacheKey.CACHE_TYPE_README) | |
|
88 | ||
|
89 | with invalidator_context as context: | |
|
90 | context.invalidate() | |
|
91 | computed = context.compute() | |
|
92 | ||
|
93 | return computed | |
|
94 | ||
|
95 | def _get_landing_commit_or_none(self, db_repo): | |
|
96 | log.debug("Getting the landing commit.") | |
|
97 | try: | |
|
98 | commit = db_repo.get_landing_commit() | |
|
99 | if not isinstance(commit, EmptyCommit): | |
|
100 | return commit | |
|
101 | else: | |
|
102 | log.debug("Repository is empty, no README to render.") | |
|
103 | except CommitError: | |
|
104 | log.exception( | |
|
105 | "Problem getting commit when trying to render the README.") | |
|
106 | ||
|
107 | def _render_readme_or_none(self, commit, readme_node, relative_url): | |
|
108 | log.debug( | |
|
109 | 'Found README file `%s` rendering...', readme_node.path) | |
|
110 | renderer = MarkupRenderer() | |
|
111 | try: | |
|
112 | html_source = renderer.render( | |
|
113 | readme_node.content, filename=readme_node.path) | |
|
114 | if relative_url: | |
|
115 | return relative_links(html_source, relative_url) | |
|
116 | return html_source | |
|
117 | except Exception: | |
|
118 | log.exception( | |
|
119 | "Exception while trying to render the README") | |
|
120 | ||
|
121 | def _load_commits_context(self, c): | |
|
122 | p = safe_int(self.request.GET.get('page'), 1) | |
|
123 | size = safe_int(self.request.GET.get('size'), 10) | |
|
124 | ||
|
125 | def url_generator(**kw): | |
|
126 | query_params = { | |
|
127 | 'size': size | |
|
128 | } | |
|
129 | query_params.update(kw) | |
|
130 | return h.route_path( | |
|
131 | 'repo_summary_commits', | |
|
132 | repo_name=c.rhodecode_db_repo.repo_name, _query=query_params) | |
|
133 | ||
|
134 | pre_load = ['author', 'branch', 'date', 'message'] | |
|
135 | try: | |
|
136 | collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load) | |
|
137 | except EmptyRepositoryError: | |
|
138 | collection = self.rhodecode_vcs_repo | |
|
139 | ||
|
140 | c.repo_commits = RepoPage( | |
|
141 | collection, page=p, items_per_page=size, url=url_generator) | |
|
142 | page_ids = [x.raw_id for x in c.repo_commits] | |
|
143 | c.comments = self.db_repo.get_comments(page_ids) | |
|
144 | c.statuses = self.db_repo.statuses(page_ids) | |
|
145 | ||
|
146 | @LoginRequired() | |
|
147 | @HasRepoPermissionAnyDecorator( | |
|
148 | 'repository.read', 'repository.write', 'repository.admin') | |
|
149 | @view_config( | |
|
150 | route_name='repo_summary_commits', request_method='GET', | |
|
151 | renderer='rhodecode:templates/summary/summary_commits.mako') | |
|
152 | def summary_commits(self): | |
|
153 | c = self.load_default_context() | |
|
154 | self._load_commits_context(c) | |
|
155 | return self._get_template_context(c) | |
|
156 | ||
|
157 | @LoginRequired() | |
|
158 | @HasRepoPermissionAnyDecorator( | |
|
159 | 'repository.read', 'repository.write', 'repository.admin') | |
|
160 | @view_config( | |
|
161 | route_name='repo_summary', request_method='GET', | |
|
162 | renderer='rhodecode:templates/summary/summary.mako') | |
|
163 | @view_config( | |
|
164 | route_name='repo_summary_slash', request_method='GET', | |
|
165 | renderer='rhodecode:templates/summary/summary.mako') | |
|
166 | def summary(self): | |
|
167 | c = self.load_default_context() | |
|
168 | ||
|
169 | # Prepare the clone URL | |
|
170 | username = '' | |
|
171 | if self._rhodecode_user.username != User.DEFAULT_USER: | |
|
172 | username = safe_str(self._rhodecode_user.username) | |
|
173 | ||
|
174 | _def_clone_uri = _def_clone_uri_by_id = c.clone_uri_tmpl | |
|
175 | if '{repo}' in _def_clone_uri: | |
|
176 | _def_clone_uri_by_id = _def_clone_uri.replace( | |
|
177 | '{repo}', '_{repoid}') | |
|
178 | elif '{repoid}' in _def_clone_uri: | |
|
179 | _def_clone_uri_by_id = _def_clone_uri.replace( | |
|
180 | '_{repoid}', '{repo}') | |
|
181 | ||
|
182 | c.clone_repo_url = self.db_repo.clone_url( | |
|
183 | user=username, uri_tmpl=_def_clone_uri) | |
|
184 | c.clone_repo_url_id = self.db_repo.clone_url( | |
|
185 | user=username, uri_tmpl=_def_clone_uri_by_id) | |
|
186 | ||
|
187 | # If enabled, get statistics data | |
|
188 | ||
|
189 | c.show_stats = bool(self.db_repo.enable_statistics) | |
|
190 | ||
|
191 | stats = Session().query(Statistics) \ | |
|
192 | .filter(Statistics.repository == self.db_repo) \ | |
|
193 | .scalar() | |
|
194 | ||
|
195 | c.stats_percentage = 0 | |
|
196 | ||
|
197 | if stats and stats.languages: | |
|
198 | c.no_data = False is self.db_repo.enable_statistics | |
|
199 | lang_stats_d = json.loads(stats.languages) | |
|
200 | ||
|
201 | # Sort first by decreasing count and second by the file extension, | |
|
202 | # so we have a consistent output. | |
|
203 | lang_stats_items = sorted(lang_stats_d.iteritems(), | |
|
204 | key=lambda k: (-k[1], k[0]))[:10] | |
|
205 | lang_stats = [(x, {"count": y, | |
|
206 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x)}) | |
|
207 | for x, y in lang_stats_items] | |
|
208 | ||
|
209 | c.trending_languages = json.dumps(lang_stats) | |
|
210 | else: | |
|
211 | c.no_data = True | |
|
212 | c.trending_languages = json.dumps({}) | |
|
213 | ||
|
214 | scm_model = ScmModel() | |
|
215 | c.enable_downloads = self.db_repo.enable_downloads | |
|
216 | c.repository_followers = scm_model.get_followers(self.db_repo) | |
|
217 | c.repository_forks = scm_model.get_forks(self.db_repo) | |
|
218 | c.repository_is_user_following = scm_model.is_following_repo( | |
|
219 | self.db_repo_name, self._rhodecode_user.user_id) | |
|
220 | ||
|
221 | # first interaction with the VCS instance after here... | |
|
222 | if c.repository_requirements_missing: | |
|
223 | self.request.override_renderer = \ | |
|
224 | 'rhodecode:templates/summary/missing_requirements.mako' | |
|
225 | return self._get_template_context(c) | |
|
226 | ||
|
227 | c.readme_data, c.readme_file = \ | |
|
228 | self._get_readme_data(self.db_repo, c.visual.default_renderer) | |
|
229 | ||
|
230 | # loads the summary commits template context | |
|
231 | self._load_commits_context(c) | |
|
232 | ||
|
233 | return self._get_template_context(c) | |
|
234 | ||
|
235 | def get_request_commit_id(self): | |
|
236 | return self.request.matchdict['commit_id'] | |
|
237 | ||
|
238 | @LoginRequired() | |
|
239 | @HasRepoPermissionAnyDecorator( | |
|
240 | 'repository.read', 'repository.write', 'repository.admin') | |
|
241 | @view_config( | |
|
242 | route_name='repo_stats', request_method='GET', | |
|
243 | renderer='json_ext') | |
|
244 | def repo_stats(self): | |
|
245 | commit_id = self.get_request_commit_id() | |
|
246 | ||
|
247 | _namespace = caches.get_repo_namespace_key( | |
|
248 | caches.SUMMARY_STATS, self.db_repo_name) | |
|
249 | show_stats = bool(self.db_repo.enable_statistics) | |
|
250 | cache_manager = caches.get_cache_manager( | |
|
251 | 'repo_cache_long', _namespace) | |
|
252 | _cache_key = caches.compute_key_from_params( | |
|
253 | self.db_repo_name, commit_id, show_stats) | |
|
254 | ||
|
255 | def compute_stats(): | |
|
256 | code_stats = {} | |
|
257 | size = 0 | |
|
258 | try: | |
|
259 | scm_instance = self.db_repo.scm_instance() | |
|
260 | commit = scm_instance.get_commit(commit_id) | |
|
261 | ||
|
262 | for node in commit.get_filenodes_generator(): | |
|
263 | size += node.size | |
|
264 | if not show_stats: | |
|
265 | continue | |
|
266 | ext = string.lower(node.extension) | |
|
267 | ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext) | |
|
268 | if ext_info: | |
|
269 | if ext in code_stats: | |
|
270 | code_stats[ext]['count'] += 1 | |
|
271 | else: | |
|
272 | code_stats[ext] = {"count": 1, "desc": ext_info} | |
|
273 | except EmptyRepositoryError: | |
|
274 | pass | |
|
275 | return {'size': h.format_byte_size_binary(size), | |
|
276 | 'code_stats': code_stats} | |
|
277 | ||
|
278 | stats = cache_manager.get(_cache_key, createfunc=compute_stats) | |
|
279 | return stats | |
|
280 | ||
|
281 | @LoginRequired() | |
|
282 | @HasRepoPermissionAnyDecorator( | |
|
283 | 'repository.read', 'repository.write', 'repository.admin') | |
|
284 | @view_config( | |
|
285 | route_name='repo_refs_data', request_method='GET', | |
|
286 | renderer='json_ext') | |
|
287 | def repo_refs_data(self): | |
|
288 | _ = self.request.translate | |
|
289 | self.load_default_context() | |
|
290 | ||
|
291 | repo = self.rhodecode_vcs_repo | |
|
292 | refs_to_create = [ | |
|
293 | (_("Branch"), repo.branches, 'branch'), | |
|
294 | (_("Tag"), repo.tags, 'tag'), | |
|
295 | (_("Bookmark"), repo.bookmarks, 'book'), | |
|
296 | ] | |
|
297 | res = self._create_reference_data( | |
|
298 | repo, self.db_repo_name, refs_to_create) | |
|
299 | data = { | |
|
300 | 'more': False, | |
|
301 | 'results': res | |
|
302 | } | |
|
303 | return data | |
|
304 | ||
|
305 | @LoginRequired() | |
|
306 | @HasRepoPermissionAnyDecorator( | |
|
307 | 'repository.read', 'repository.write', 'repository.admin') | |
|
308 | @view_config( | |
|
309 | route_name='repo_refs_changelog_data', request_method='GET', | |
|
310 | renderer='json_ext') | |
|
311 | def repo_refs_changelog_data(self): | |
|
312 | _ = self.request.translate | |
|
313 | self.load_default_context() | |
|
314 | ||
|
315 | repo = self.rhodecode_vcs_repo | |
|
316 | ||
|
317 | refs_to_create = [ | |
|
318 | (_("Branches"), repo.branches, 'branch'), | |
|
319 | (_("Closed branches"), repo.branches_closed, 'branch_closed'), | |
|
320 | # TODO: enable when vcs can handle bookmarks filters | |
|
321 | # (_("Bookmarks"), repo.bookmarks, "book"), | |
|
322 | ] | |
|
323 | res = self._create_reference_data( | |
|
324 | repo, self.db_repo_name, refs_to_create) | |
|
325 | data = { | |
|
326 | 'more': False, | |
|
327 | 'results': res | |
|
328 | } | |
|
329 | return data | |
|
330 | ||
|
331 | def _create_reference_data(self, repo, full_repo_name, refs_to_create): | |
|
332 | format_ref_id = utils.get_format_ref_id(repo) | |
|
333 | ||
|
334 | result = [] | |
|
335 | for title, refs, ref_type in refs_to_create: | |
|
336 | if refs: | |
|
337 | result.append({ | |
|
338 | 'text': title, | |
|
339 | 'children': self._create_reference_items( | |
|
340 | repo, full_repo_name, refs, ref_type, | |
|
341 | format_ref_id), | |
|
342 | }) | |
|
343 | return result | |
|
344 | ||
|
345 | def _create_reference_items(self, repo, full_repo_name, refs, ref_type, | |
|
346 | format_ref_id): | |
|
347 | result = [] | |
|
348 | is_svn = h.is_svn(repo) | |
|
349 | for ref_name, raw_id in refs.iteritems(): | |
|
350 | files_url = self._create_files_url( | |
|
351 | repo, full_repo_name, ref_name, raw_id, is_svn) | |
|
352 | result.append({ | |
|
353 | 'text': ref_name, | |
|
354 | 'id': format_ref_id(ref_name, raw_id), | |
|
355 | 'raw_id': raw_id, | |
|
356 | 'type': ref_type, | |
|
357 | 'files_url': files_url, | |
|
358 | }) | |
|
359 | return result | |
|
360 | ||
|
361 | def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn): | |
|
362 | use_commit_id = '/' in ref_name or is_svn | |
|
363 | return h.url( | |
|
364 | 'files_home', | |
|
365 | repo_name=full_repo_name, | |
|
366 | f_path=ref_name if is_svn else '', | |
|
367 | revision=raw_id if use_commit_id else ref_name, | |
|
368 | at=ref_name) |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,350 +1,357 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import time |
|
22 | 22 | import logging |
|
23 | 23 | from pylons import tmpl_context as c |
|
24 | 24 | from pyramid.httpexceptions import HTTPFound |
|
25 | 25 | |
|
26 | 26 | from rhodecode.lib import helpers as h |
|
27 | 27 | from rhodecode.lib.utils import PartialRenderer |
|
28 | 28 | from rhodecode.lib.utils2 import StrictAttributeDict, safe_int, datetime_to_time |
|
29 | 29 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError |
|
30 | 30 | from rhodecode.lib.ext_json import json |
|
31 | 31 | from rhodecode.model import repo |
|
32 | 32 | from rhodecode.model import repo_group |
|
33 | 33 | from rhodecode.model.db import User |
|
34 | 34 | from rhodecode.model.scm import ScmModel |
|
35 | 35 | |
|
36 | 36 | log = logging.getLogger(__name__) |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | ADMIN_PREFIX = '/_admin' |
|
40 | 40 | STATIC_FILE_PREFIX = '/_static' |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | def add_route_with_slash(config,name, pattern, **kw): |
|
44 | 44 | config.add_route(name, pattern, **kw) |
|
45 | 45 | if not pattern.endswith('/'): |
|
46 | 46 | config.add_route(name + '_slash', pattern + '/', **kw) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | def get_format_ref_id(repo): |
|
50 | 50 | """Returns a `repo` specific reference formatter function""" |
|
51 | 51 | if h.is_svn(repo): |
|
52 | 52 | return _format_ref_id_svn |
|
53 | 53 | else: |
|
54 | 54 | return _format_ref_id |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | def _format_ref_id(name, raw_id): |
|
58 | 58 | """Default formatting of a given reference `name`""" |
|
59 | 59 | return name |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | def _format_ref_id_svn(name, raw_id): |
|
63 | 63 | """Special way of formatting a reference for Subversion including path""" |
|
64 | 64 | return '%s@%s' % (name, raw_id) |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | class TemplateArgs(StrictAttributeDict): |
|
68 | 68 | pass |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | class BaseAppView(object): |
|
72 | 72 | |
|
73 | 73 | def __init__(self, context, request): |
|
74 | 74 | self.request = request |
|
75 | 75 | self.context = context |
|
76 | 76 | self.session = request.session |
|
77 | 77 | self._rhodecode_user = request.user # auth user |
|
78 | 78 | self._rhodecode_db_user = self._rhodecode_user.get_instance() |
|
79 | 79 | self._maybe_needs_password_change( |
|
80 | 80 | request.matched_route.name, self._rhodecode_db_user) |
|
81 | 81 | |
|
82 | 82 | def _maybe_needs_password_change(self, view_name, user_obj): |
|
83 | 83 | log.debug('Checking if user %s needs password change on view %s', |
|
84 | 84 | user_obj, view_name) |
|
85 | 85 | skip_user_views = [ |
|
86 | 86 | 'logout', 'login', |
|
87 | 87 | 'my_account_password', 'my_account_password_update' |
|
88 | 88 | ] |
|
89 | 89 | |
|
90 | 90 | if not user_obj: |
|
91 | 91 | return |
|
92 | 92 | |
|
93 | 93 | if user_obj.username == User.DEFAULT_USER: |
|
94 | 94 | return |
|
95 | 95 | |
|
96 | 96 | now = time.time() |
|
97 | 97 | should_change = user_obj.user_data.get('force_password_change') |
|
98 | 98 | change_after = safe_int(should_change) or 0 |
|
99 | 99 | if should_change and now > change_after: |
|
100 | 100 | log.debug('User %s requires password change', user_obj) |
|
101 | 101 | h.flash('You are required to change your password', 'warning', |
|
102 | 102 | ignore_duplicate=True) |
|
103 | 103 | |
|
104 | 104 | if view_name not in skip_user_views: |
|
105 | 105 | raise HTTPFound( |
|
106 | 106 | self.request.route_path('my_account_password')) |
|
107 | 107 | |
|
108 | def _get_local_tmpl_context(self): | |
|
108 | def _get_local_tmpl_context(self, include_app_defaults=False): | |
|
109 | 109 | c = TemplateArgs() |
|
110 | 110 | c.auth_user = self.request.user |
|
111 | if include_app_defaults: | |
|
112 | # NOTE(marcink): after full pyramid migration include_app_defaults | |
|
113 | # should be turned on by default | |
|
114 | from rhodecode.lib.base import attach_context_attributes | |
|
115 | attach_context_attributes(c, self.request, self.request.user.user_id) | |
|
111 | 116 | return c |
|
112 | 117 | |
|
113 | 118 | def _register_global_c(self, tmpl_args): |
|
114 | 119 | """ |
|
115 | 120 | Registers attributes to pylons global `c` |
|
116 | 121 | """ |
|
117 | 122 | # TODO(marcink): remove once pyramid migration is finished |
|
118 | 123 | for k, v in tmpl_args.items(): |
|
119 | 124 | setattr(c, k, v) |
|
120 | 125 | |
|
121 | 126 | def _get_template_context(self, tmpl_args): |
|
122 | 127 | self._register_global_c(tmpl_args) |
|
123 | 128 | |
|
124 | 129 | local_tmpl_args = { |
|
125 | 130 | 'defaults': {}, |
|
126 | 131 | 'errors': {}, |
|
127 | 132 | } |
|
128 | 133 | local_tmpl_args.update(tmpl_args) |
|
129 | 134 | return local_tmpl_args |
|
130 | 135 | |
|
131 | 136 | def load_default_context(self): |
|
132 | 137 | """ |
|
133 | 138 | example: |
|
134 | 139 | |
|
135 | 140 | def load_default_context(self): |
|
136 | 141 | c = self._get_local_tmpl_context() |
|
137 | 142 | c.custom_var = 'foobar' |
|
138 | 143 | self._register_global_c(c) |
|
139 | 144 | return c |
|
140 | 145 | """ |
|
141 | 146 | raise NotImplementedError('Needs implementation in view class') |
|
142 | 147 | |
|
143 | 148 | |
|
144 | 149 | class RepoAppView(BaseAppView): |
|
145 | 150 | |
|
146 | 151 | def __init__(self, context, request): |
|
147 | 152 | super(RepoAppView, self).__init__(context, request) |
|
148 | 153 | self.db_repo = request.db_repo |
|
149 | 154 | self.db_repo_name = self.db_repo.repo_name |
|
150 | 155 | self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo) |
|
151 | 156 | |
|
152 | 157 | def _handle_missing_requirements(self, error): |
|
153 | 158 | log.error( |
|
154 | 159 | 'Requirements are missing for repository %s: %s', |
|
155 | 160 | self.db_repo_name, error.message) |
|
156 | 161 | |
|
157 | def _get_local_tmpl_context(self): | |
|
158 |
c = super(RepoAppView, self)._get_local_tmpl_context( |
|
|
162 | def _get_local_tmpl_context(self, include_app_defaults=False): | |
|
163 | c = super(RepoAppView, self)._get_local_tmpl_context( | |
|
164 | include_app_defaults=include_app_defaults) | |
|
165 | ||
|
159 | 166 | # register common vars for this type of view |
|
160 | 167 | c.rhodecode_db_repo = self.db_repo |
|
161 | 168 | c.repo_name = self.db_repo_name |
|
162 | 169 | c.repository_pull_requests = self.db_repo_pull_requests |
|
163 | 170 | |
|
164 | 171 | c.repository_requirements_missing = False |
|
165 | 172 | try: |
|
166 | 173 | self.rhodecode_vcs_repo = self.db_repo.scm_instance() |
|
167 | 174 | except RepositoryRequirementError as e: |
|
168 | 175 | c.repository_requirements_missing = True |
|
169 | 176 | self._handle_missing_requirements(e) |
|
170 | 177 | |
|
171 | 178 | return c |
|
172 | 179 | |
|
173 | 180 | |
|
174 | 181 | class DataGridAppView(object): |
|
175 | 182 | """ |
|
176 | 183 | Common class to have re-usable grid rendering components |
|
177 | 184 | """ |
|
178 | 185 | |
|
179 | 186 | def _extract_ordering(self, request, column_map=None): |
|
180 | 187 | column_map = column_map or {} |
|
181 | 188 | column_index = safe_int(request.GET.get('order[0][column]')) |
|
182 | 189 | order_dir = request.GET.get( |
|
183 | 190 | 'order[0][dir]', 'desc') |
|
184 | 191 | order_by = request.GET.get( |
|
185 | 192 | 'columns[%s][data][sort]' % column_index, 'name_raw') |
|
186 | 193 | |
|
187 | 194 | # translate datatable to DB columns |
|
188 | 195 | order_by = column_map.get(order_by) or order_by |
|
189 | 196 | |
|
190 | 197 | search_q = request.GET.get('search[value]') |
|
191 | 198 | return search_q, order_by, order_dir |
|
192 | 199 | |
|
193 | 200 | def _extract_chunk(self, request): |
|
194 | 201 | start = safe_int(request.GET.get('start'), 0) |
|
195 | 202 | length = safe_int(request.GET.get('length'), 25) |
|
196 | 203 | draw = safe_int(request.GET.get('draw')) |
|
197 | 204 | return draw, start, length |
|
198 | 205 | |
|
199 | 206 | |
|
200 | 207 | class BaseReferencesView(RepoAppView): |
|
201 | 208 | """ |
|
202 | 209 | Base for reference view for branches, tags and bookmarks. |
|
203 | 210 | """ |
|
204 | 211 | def load_default_context(self): |
|
205 | 212 | c = self._get_local_tmpl_context() |
|
206 | 213 | |
|
207 | 214 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
208 | 215 | c.repo_info = self.db_repo |
|
209 | 216 | |
|
210 | 217 | self._register_global_c(c) |
|
211 | 218 | return c |
|
212 | 219 | |
|
213 | 220 | def load_refs_context(self, ref_items, partials_template): |
|
214 | 221 | _render = PartialRenderer(partials_template) |
|
215 | 222 | _data = [] |
|
216 | 223 | pre_load = ["author", "date", "message"] |
|
217 | 224 | |
|
218 | 225 | is_svn = h.is_svn(self.rhodecode_vcs_repo) |
|
219 | 226 | format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo) |
|
220 | 227 | |
|
221 | 228 | for ref_name, commit_id in ref_items: |
|
222 | 229 | commit = self.rhodecode_vcs_repo.get_commit( |
|
223 | 230 | commit_id=commit_id, pre_load=pre_load) |
|
224 | 231 | |
|
225 | 232 | # TODO: johbo: Unify generation of reference links |
|
226 | 233 | use_commit_id = '/' in ref_name or is_svn |
|
227 | 234 | files_url = h.url( |
|
228 | 235 | 'files_home', |
|
229 | 236 | repo_name=c.repo_name, |
|
230 | 237 | f_path=ref_name if is_svn else '', |
|
231 | 238 | revision=commit_id if use_commit_id else ref_name, |
|
232 | 239 | at=ref_name) |
|
233 | 240 | |
|
234 | 241 | _data.append({ |
|
235 | 242 | "name": _render('name', ref_name, files_url), |
|
236 | 243 | "name_raw": ref_name, |
|
237 | 244 | "date": _render('date', commit.date), |
|
238 | 245 | "date_raw": datetime_to_time(commit.date), |
|
239 | 246 | "author": _render('author', commit.author), |
|
240 | 247 | "commit": _render( |
|
241 | 248 | 'commit', commit.message, commit.raw_id, commit.idx), |
|
242 | 249 | "commit_raw": commit.idx, |
|
243 | 250 | "compare": _render( |
|
244 | 251 | 'compare', format_ref_id(ref_name, commit.raw_id)), |
|
245 | 252 | }) |
|
246 | 253 | c.has_references = bool(_data) |
|
247 | 254 | c.data = json.dumps(_data) |
|
248 | 255 | |
|
249 | 256 | |
|
250 | 257 | class RepoRoutePredicate(object): |
|
251 | 258 | def __init__(self, val, config): |
|
252 | 259 | self.val = val |
|
253 | 260 | |
|
254 | 261 | def text(self): |
|
255 | 262 | return 'repo_route = %s' % self.val |
|
256 | 263 | |
|
257 | 264 | phash = text |
|
258 | 265 | |
|
259 | 266 | def __call__(self, info, request): |
|
260 | 267 | |
|
261 | 268 | if hasattr(request, 'vcs_call'): |
|
262 | 269 | # skip vcs calls |
|
263 | 270 | return |
|
264 | 271 | |
|
265 | 272 | repo_name = info['match']['repo_name'] |
|
266 | 273 | repo_model = repo.RepoModel() |
|
267 | 274 | by_name_match = repo_model.get_by_repo_name(repo_name, cache=True) |
|
268 | 275 | |
|
269 | 276 | if by_name_match: |
|
270 | 277 | # register this as request object we can re-use later |
|
271 | 278 | request.db_repo = by_name_match |
|
272 | 279 | return True |
|
273 | 280 | |
|
274 | 281 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
275 | 282 | if by_id_match: |
|
276 | 283 | request.db_repo = by_id_match |
|
277 | 284 | return True |
|
278 | 285 | |
|
279 | 286 | return False |
|
280 | 287 | |
|
281 | 288 | |
|
282 | 289 | class RepoTypeRoutePredicate(object): |
|
283 | 290 | def __init__(self, val, config): |
|
284 | 291 | self.val = val or ['hg', 'git', 'svn'] |
|
285 | 292 | |
|
286 | 293 | def text(self): |
|
287 | 294 | return 'repo_accepted_type = %s' % self.val |
|
288 | 295 | |
|
289 | 296 | phash = text |
|
290 | 297 | |
|
291 | 298 | def __call__(self, info, request): |
|
292 | 299 | if hasattr(request, 'vcs_call'): |
|
293 | 300 | # skip vcs calls |
|
294 | 301 | return |
|
295 | 302 | |
|
296 | 303 | rhodecode_db_repo = request.db_repo |
|
297 | 304 | |
|
298 | 305 | log.debug( |
|
299 | 306 | '%s checking repo type for %s in %s', |
|
300 | 307 | self.__class__.__name__, rhodecode_db_repo.repo_type, self.val) |
|
301 | 308 | |
|
302 | 309 | if rhodecode_db_repo.repo_type in self.val: |
|
303 | 310 | return True |
|
304 | 311 | else: |
|
305 | 312 | log.warning('Current view is not supported for repo type:%s', |
|
306 | 313 | rhodecode_db_repo.repo_type) |
|
307 | 314 | # |
|
308 | 315 | # h.flash(h.literal( |
|
309 | 316 | # _('Action not supported for %s.' % rhodecode_repo.alias)), |
|
310 | 317 | # category='warning') |
|
311 | 318 | # return redirect( |
|
312 |
# |
|
|
319 | # route_path('repo_summary', repo_name=cls.rhodecode_db_repo.repo_name)) | |
|
313 | 320 | |
|
314 | 321 | return False |
|
315 | 322 | |
|
316 | 323 | |
|
317 | 324 | class RepoGroupRoutePredicate(object): |
|
318 | 325 | def __init__(self, val, config): |
|
319 | 326 | self.val = val |
|
320 | 327 | |
|
321 | 328 | def text(self): |
|
322 | 329 | return 'repo_group_route = %s' % self.val |
|
323 | 330 | |
|
324 | 331 | phash = text |
|
325 | 332 | |
|
326 | 333 | def __call__(self, info, request): |
|
327 | 334 | if hasattr(request, 'vcs_call'): |
|
328 | 335 | # skip vcs calls |
|
329 | 336 | return |
|
330 | 337 | |
|
331 | 338 | repo_group_name = info['match']['repo_group_name'] |
|
332 | 339 | repo_group_model = repo_group.RepoGroupModel() |
|
333 | 340 | by_name_match = repo_group_model.get_by_group_name( |
|
334 | 341 | repo_group_name, cache=True) |
|
335 | 342 | |
|
336 | 343 | if by_name_match: |
|
337 | 344 | # register this as request object we can re-use later |
|
338 | 345 | request.db_repo_group = by_name_match |
|
339 | 346 | return True |
|
340 | 347 | |
|
341 | 348 | return False |
|
342 | 349 | |
|
343 | 350 | |
|
344 | 351 | def includeme(config): |
|
345 | 352 | config.add_route_predicate( |
|
346 | 353 | 'repo_route', RepoRoutePredicate) |
|
347 | 354 | config.add_route_predicate( |
|
348 | 355 | 'repo_accepted_types', RepoTypeRoutePredicate) |
|
349 | 356 | config.add_route_predicate( |
|
350 | 357 | 'repo_group_route', RepoGroupRoutePredicate) |
@@ -1,304 +1,304 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import re |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | from pyramid.view import view_config |
|
25 | 25 | |
|
26 | 26 | from rhodecode.apps._base import BaseAppView |
|
27 | 27 | from rhodecode.lib import helpers as h |
|
28 | 28 | from rhodecode.lib.auth import LoginRequired, NotAnonymous, \ |
|
29 | 29 | HasRepoGroupPermissionAnyDecorator |
|
30 | 30 | from rhodecode.lib.index import searcher_from_config |
|
31 | 31 | from rhodecode.lib.utils2 import safe_unicode, str2bool |
|
32 | 32 | from rhodecode.lib.ext_json import json |
|
33 | 33 | from rhodecode.model.db import func, Repository, RepoGroup |
|
34 | 34 | from rhodecode.model.repo import RepoModel |
|
35 | 35 | from rhodecode.model.repo_group import RepoGroupModel |
|
36 | 36 | from rhodecode.model.scm import ScmModel, RepoGroupList, RepoList |
|
37 | 37 | from rhodecode.model.user import UserModel |
|
38 | 38 | from rhodecode.model.user_group import UserGroupModel |
|
39 | 39 | |
|
40 | 40 | log = logging.getLogger(__name__) |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | class HomeView(BaseAppView): |
|
44 | 44 | |
|
45 | 45 | def load_default_context(self): |
|
46 | 46 | c = self._get_local_tmpl_context() |
|
47 | 47 | c.user = c.auth_user.get_instance() |
|
48 | 48 | self._register_global_c(c) |
|
49 | 49 | return c |
|
50 | 50 | |
|
51 | 51 | @LoginRequired() |
|
52 | 52 | @view_config( |
|
53 | 53 | route_name='user_autocomplete_data', request_method='GET', |
|
54 | 54 | renderer='json_ext', xhr=True) |
|
55 | 55 | def user_autocomplete_data(self): |
|
56 | 56 | query = self.request.GET.get('query') |
|
57 | 57 | active = str2bool(self.request.GET.get('active') or True) |
|
58 | 58 | include_groups = str2bool(self.request.GET.get('user_groups')) |
|
59 | 59 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
60 | 60 | skip_default_user = str2bool(self.request.GET.get('skip_default_user')) |
|
61 | 61 | |
|
62 | 62 | log.debug('generating user list, query:%s, active:%s, with_groups:%s', |
|
63 | 63 | query, active, include_groups) |
|
64 | 64 | |
|
65 | 65 | _users = UserModel().get_users( |
|
66 | 66 | name_contains=query, only_active=active) |
|
67 | 67 | |
|
68 | 68 | def maybe_skip_default_user(usr): |
|
69 | 69 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: |
|
70 | 70 | return False |
|
71 | 71 | return True |
|
72 | 72 | _users = filter(maybe_skip_default_user, _users) |
|
73 | 73 | |
|
74 | 74 | if include_groups: |
|
75 | 75 | # extend with user groups |
|
76 | 76 | _user_groups = UserGroupModel().get_user_groups( |
|
77 | 77 | name_contains=query, only_active=active, |
|
78 | 78 | expand_groups=expand_groups) |
|
79 | 79 | _users = _users + _user_groups |
|
80 | 80 | |
|
81 | 81 | return {'suggestions': _users} |
|
82 | 82 | |
|
83 | 83 | @LoginRequired() |
|
84 | 84 | @NotAnonymous() |
|
85 | 85 | @view_config( |
|
86 | 86 | route_name='user_group_autocomplete_data', request_method='GET', |
|
87 | 87 | renderer='json_ext', xhr=True) |
|
88 | 88 | def user_group_autocomplete_data(self): |
|
89 | 89 | query = self.request.GET.get('query') |
|
90 | 90 | active = str2bool(self.request.GET.get('active') or True) |
|
91 | 91 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
92 | 92 | |
|
93 | 93 | log.debug('generating user group list, query:%s, active:%s', |
|
94 | 94 | query, active) |
|
95 | 95 | |
|
96 | 96 | _user_groups = UserGroupModel().get_user_groups( |
|
97 | 97 | name_contains=query, only_active=active, |
|
98 | 98 | expand_groups=expand_groups) |
|
99 | 99 | _user_groups = _user_groups |
|
100 | 100 | |
|
101 | 101 | return {'suggestions': _user_groups} |
|
102 | 102 | |
|
103 | 103 | def _get_repo_list(self, name_contains=None, repo_type=None, limit=20): |
|
104 | 104 | query = Repository.query()\ |
|
105 | 105 | .order_by(func.length(Repository.repo_name))\ |
|
106 | 106 | .order_by(Repository.repo_name) |
|
107 | 107 | |
|
108 | 108 | if repo_type: |
|
109 | 109 | query = query.filter(Repository.repo_type == repo_type) |
|
110 | 110 | |
|
111 | 111 | if name_contains: |
|
112 | 112 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
113 | 113 | query = query.filter( |
|
114 | 114 | Repository.repo_name.ilike(ilike_expression)) |
|
115 | 115 | query = query.limit(limit) |
|
116 | 116 | |
|
117 | 117 | all_repos = query.all() |
|
118 | 118 | # permission checks are inside this function |
|
119 | 119 | repo_iter = ScmModel().get_repos(all_repos) |
|
120 | 120 | return [ |
|
121 | 121 | { |
|
122 | 122 | 'id': obj['name'], |
|
123 | 123 | 'text': obj['name'], |
|
124 | 124 | 'type': 'repo', |
|
125 | 125 | 'obj': obj['dbrepo'], |
|
126 |
'url': h. |
|
|
126 | 'url': h.route_path('repo_summary', repo_name=obj['name']) | |
|
127 | 127 | } |
|
128 | 128 | for obj in repo_iter] |
|
129 | 129 | |
|
130 | 130 | def _get_repo_group_list(self, name_contains=None, limit=20): |
|
131 | 131 | query = RepoGroup.query()\ |
|
132 | 132 | .order_by(func.length(RepoGroup.group_name))\ |
|
133 | 133 | .order_by(RepoGroup.group_name) |
|
134 | 134 | |
|
135 | 135 | if name_contains: |
|
136 | 136 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
137 | 137 | query = query.filter( |
|
138 | 138 | RepoGroup.group_name.ilike(ilike_expression)) |
|
139 | 139 | query = query.limit(limit) |
|
140 | 140 | |
|
141 | 141 | all_groups = query.all() |
|
142 | 142 | repo_groups_iter = ScmModel().get_repo_groups(all_groups) |
|
143 | 143 | return [ |
|
144 | 144 | { |
|
145 | 145 | 'id': obj.group_name, |
|
146 | 146 | 'text': obj.group_name, |
|
147 | 147 | 'type': 'group', |
|
148 | 148 | 'obj': {}, |
|
149 | 149 | 'url': h.route_path('repo_group_home', repo_group_name=obj.group_name) |
|
150 | 150 | } |
|
151 | 151 | for obj in repo_groups_iter] |
|
152 | 152 | |
|
153 | 153 | def _get_hash_commit_list(self, auth_user, hash_starts_with=None): |
|
154 | 154 | if not hash_starts_with or len(hash_starts_with) < 3: |
|
155 | 155 | return [] |
|
156 | 156 | |
|
157 | 157 | commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with) |
|
158 | 158 | |
|
159 | 159 | if len(commit_hashes) != 1: |
|
160 | 160 | return [] |
|
161 | 161 | |
|
162 | 162 | commit_hash_prefix = commit_hashes[0] |
|
163 | 163 | |
|
164 | 164 | searcher = searcher_from_config(self.request.registry.settings) |
|
165 | 165 | result = searcher.search( |
|
166 | 166 | 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user, |
|
167 | 167 | raise_on_exc=False) |
|
168 | 168 | |
|
169 | 169 | return [ |
|
170 | 170 | { |
|
171 | 171 | 'id': entry['commit_id'], |
|
172 | 172 | 'text': entry['commit_id'], |
|
173 | 173 | 'type': 'commit', |
|
174 | 174 | 'obj': {'repo': entry['repository']}, |
|
175 | 175 | 'url': h.url('changeset_home', |
|
176 | 176 | repo_name=entry['repository'], |
|
177 | 177 | revision=entry['commit_id']) |
|
178 | 178 | } |
|
179 | 179 | for entry in result['results']] |
|
180 | 180 | |
|
181 | 181 | @LoginRequired() |
|
182 | 182 | @view_config( |
|
183 | 183 | route_name='repo_list_data', request_method='GET', |
|
184 | 184 | renderer='json_ext', xhr=True) |
|
185 | 185 | def repo_list_data(self): |
|
186 | 186 | _ = self.request.translate |
|
187 | 187 | |
|
188 | 188 | query = self.request.GET.get('query') |
|
189 | 189 | repo_type = self.request.GET.get('repo_type') |
|
190 | 190 | log.debug('generating repo list, query:%s, repo_type:%s', |
|
191 | 191 | query, repo_type) |
|
192 | 192 | |
|
193 | 193 | res = [] |
|
194 | 194 | repos = self._get_repo_list(query, repo_type=repo_type) |
|
195 | 195 | if repos: |
|
196 | 196 | res.append({ |
|
197 | 197 | 'text': _('Repositories'), |
|
198 | 198 | 'children': repos |
|
199 | 199 | }) |
|
200 | 200 | |
|
201 | 201 | data = { |
|
202 | 202 | 'more': False, |
|
203 | 203 | 'results': res |
|
204 | 204 | } |
|
205 | 205 | return data |
|
206 | 206 | |
|
207 | 207 | @LoginRequired() |
|
208 | 208 | @view_config( |
|
209 | 209 | route_name='goto_switcher_data', request_method='GET', |
|
210 | 210 | renderer='json_ext', xhr=True) |
|
211 | 211 | def goto_switcher_data(self): |
|
212 | 212 | c = self.load_default_context() |
|
213 | 213 | |
|
214 | 214 | _ = self.request.translate |
|
215 | 215 | |
|
216 | 216 | query = self.request.GET.get('query') |
|
217 | 217 | log.debug('generating goto switcher list, query %s', query) |
|
218 | 218 | |
|
219 | 219 | res = [] |
|
220 | 220 | repo_groups = self._get_repo_group_list(query) |
|
221 | 221 | if repo_groups: |
|
222 | 222 | res.append({ |
|
223 | 223 | 'text': _('Groups'), |
|
224 | 224 | 'children': repo_groups |
|
225 | 225 | }) |
|
226 | 226 | |
|
227 | 227 | repos = self._get_repo_list(query) |
|
228 | 228 | if repos: |
|
229 | 229 | res.append({ |
|
230 | 230 | 'text': _('Repositories'), |
|
231 | 231 | 'children': repos |
|
232 | 232 | }) |
|
233 | 233 | |
|
234 | 234 | commits = self._get_hash_commit_list(c.auth_user, query) |
|
235 | 235 | if commits: |
|
236 | 236 | unique_repos = {} |
|
237 | 237 | for commit in commits: |
|
238 | 238 | unique_repos.setdefault(commit['obj']['repo'], [] |
|
239 | 239 | ).append(commit) |
|
240 | 240 | |
|
241 | 241 | for repo in unique_repos: |
|
242 | 242 | res.append({ |
|
243 | 243 | 'text': _('Commits in %(repo)s') % {'repo': repo}, |
|
244 | 244 | 'children': unique_repos[repo] |
|
245 | 245 | }) |
|
246 | 246 | |
|
247 | 247 | data = { |
|
248 | 248 | 'more': False, |
|
249 | 249 | 'results': res |
|
250 | 250 | } |
|
251 | 251 | return data |
|
252 | 252 | |
|
253 | 253 | def _get_groups_and_repos(self, repo_group_id=None): |
|
254 | 254 | # repo groups groups |
|
255 | 255 | repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) |
|
256 | 256 | _perms = ['group.read', 'group.write', 'group.admin'] |
|
257 | 257 | repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) |
|
258 | 258 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( |
|
259 | 259 | repo_group_list=repo_group_list_acl, admin=False) |
|
260 | 260 | |
|
261 | 261 | # repositories |
|
262 | 262 | repo_list = Repository.get_all_repos(group_id=repo_group_id) |
|
263 | 263 | _perms = ['repository.read', 'repository.write', 'repository.admin'] |
|
264 | 264 | repo_list_acl = RepoList(repo_list, perm_set=_perms) |
|
265 | 265 | repo_data = RepoModel().get_repos_as_dict( |
|
266 | 266 | repo_list=repo_list_acl, admin=False) |
|
267 | 267 | |
|
268 | 268 | return repo_data, repo_group_data |
|
269 | 269 | |
|
270 | 270 | @LoginRequired() |
|
271 | 271 | @view_config( |
|
272 | 272 | route_name='home', request_method='GET', |
|
273 | 273 | renderer='rhodecode:templates/index.mako') |
|
274 | 274 | def main_page(self): |
|
275 | 275 | c = self.load_default_context() |
|
276 | 276 | c.repo_group = None |
|
277 | 277 | |
|
278 | 278 | repo_data, repo_group_data = self._get_groups_and_repos() |
|
279 | 279 | # json used to render the grids |
|
280 | 280 | c.repos_data = json.dumps(repo_data) |
|
281 | 281 | c.repo_groups_data = json.dumps(repo_group_data) |
|
282 | 282 | |
|
283 | 283 | return self._get_template_context(c) |
|
284 | 284 | |
|
285 | 285 | @LoginRequired() |
|
286 | 286 | @HasRepoGroupPermissionAnyDecorator( |
|
287 | 287 | 'group.read', 'group.write', 'group.admin') |
|
288 | 288 | @view_config( |
|
289 | 289 | route_name='repo_group_home', request_method='GET', |
|
290 | 290 | renderer='rhodecode:templates/index_repo_group.mako') |
|
291 | 291 | @view_config( |
|
292 | 292 | route_name='repo_group_home_slash', request_method='GET', |
|
293 | 293 | renderer='rhodecode:templates/index_repo_group.mako') |
|
294 | 294 | def repo_group_main_page(self): |
|
295 | 295 | c = self.load_default_context() |
|
296 | 296 | c.repo_group = self.request.db_repo_group |
|
297 | 297 | repo_data, repo_group_data = self._get_groups_and_repos( |
|
298 | 298 | c.repo_group.group_id) |
|
299 | 299 | |
|
300 | 300 | # json used to render the grids |
|
301 | 301 | c.repos_data = json.dumps(repo_data) |
|
302 | 302 | c.repo_groups_data = json.dumps(repo_group_data) |
|
303 | 303 | |
|
304 | 304 | return self._get_template_context(c) |
@@ -1,133 +1,148 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | from rhodecode.apps._base import add_route_with_slash | |
|
20 | 21 | |
|
21 | 22 | |
|
22 | 23 | def includeme(config): |
|
23 | 24 | |
|
24 | 25 | # Summary |
|
25 | config.add_route( | |
|
26 | name='repo_summary', | |
|
27 | pattern='/{repo_name:.*?[^/]}', repo_route=True) | |
|
28 | ||
|
26 | # NOTE(marcink): one additional route is defined in very bottom, catch | |
|
27 | # all pattern | |
|
29 | 28 | config.add_route( |
|
30 | 29 | name='repo_summary_explicit', |
|
31 | 30 | pattern='/{repo_name:.*?[^/]}/summary', repo_route=True) |
|
31 | config.add_route( | |
|
32 | name='repo_summary_commits', | |
|
33 | pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True) | |
|
34 | ||
|
35 | # refs data | |
|
36 | config.add_route( | |
|
37 | name='repo_refs_data', | |
|
38 | pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True) | |
|
39 | ||
|
40 | config.add_route( | |
|
41 | name='repo_refs_changelog_data', | |
|
42 | pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True) | |
|
43 | ||
|
44 | config.add_route( | |
|
45 | name='repo_stats', | |
|
46 | pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True) | |
|
32 | 47 | |
|
33 | 48 | # Tags |
|
34 | 49 | config.add_route( |
|
35 | 50 | name='tags_home', |
|
36 | 51 | pattern='/{repo_name:.*?[^/]}/tags', repo_route=True) |
|
37 | 52 | |
|
38 | 53 | # Branches |
|
39 | 54 | config.add_route( |
|
40 | 55 | name='branches_home', |
|
41 | 56 | pattern='/{repo_name:.*?[^/]}/branches', repo_route=True) |
|
42 | 57 | |
|
43 | # Bookmarks | |
|
44 | 58 | config.add_route( |
|
45 | 59 | name='bookmarks_home', |
|
46 | 60 | pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True) |
|
47 | 61 | |
|
48 | 62 | # Pull Requests |
|
49 | 63 | config.add_route( |
|
50 | 64 | name='pullrequest_show', |
|
51 | 65 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id}', |
|
52 | 66 | repo_route=True) |
|
53 | 67 | |
|
54 | 68 | config.add_route( |
|
55 | 69 | name='pullrequest_show_all', |
|
56 | 70 | pattern='/{repo_name:.*?[^/]}/pull-request', |
|
57 | 71 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
58 | 72 | |
|
59 | 73 | config.add_route( |
|
60 | 74 | name='pullrequest_show_all_data', |
|
61 | 75 | pattern='/{repo_name:.*?[^/]}/pull-request-data', |
|
62 | 76 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
63 | 77 | |
|
64 | 78 | # Settings |
|
65 | 79 | config.add_route( |
|
66 | 80 | name='edit_repo', |
|
67 | 81 | pattern='/{repo_name:.*?[^/]}/settings', repo_route=True) |
|
68 | 82 | |
|
69 | 83 | # Settings advanced |
|
70 | 84 | config.add_route( |
|
71 | 85 | name='edit_repo_advanced', |
|
72 | 86 | pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True) |
|
73 | 87 | config.add_route( |
|
74 | 88 | name='edit_repo_advanced_delete', |
|
75 | 89 | pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True) |
|
76 | 90 | config.add_route( |
|
77 | 91 | name='edit_repo_advanced_locking', |
|
78 | 92 | pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True) |
|
79 | 93 | config.add_route( |
|
80 | 94 | name='edit_repo_advanced_journal', |
|
81 | 95 | pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True) |
|
82 | 96 | config.add_route( |
|
83 | 97 | name='edit_repo_advanced_fork', |
|
84 | 98 | pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True) |
|
85 | 99 | |
|
86 | 100 | # Caches |
|
87 | 101 | config.add_route( |
|
88 | 102 | name='edit_repo_caches', |
|
89 | 103 | pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True) |
|
90 | 104 | |
|
91 | 105 | # Permissions |
|
92 | 106 | config.add_route( |
|
93 | 107 | name='edit_repo_perms', |
|
94 | 108 | pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True) |
|
95 | 109 | |
|
96 | 110 | # Repo Review Rules |
|
97 | 111 | config.add_route( |
|
98 | 112 | name='repo_reviewers', |
|
99 | 113 | pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True) |
|
100 | 114 | |
|
101 | 115 | config.add_route( |
|
102 | 116 | name='repo_default_reviewers_data', |
|
103 | 117 | pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True) |
|
104 | 118 | |
|
105 | 119 | # Maintenance |
|
106 | 120 | config.add_route( |
|
107 | 121 | name='repo_maintenance', |
|
108 | 122 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) |
|
109 | 123 | |
|
110 | 124 | config.add_route( |
|
111 | 125 | name='repo_maintenance_execute', |
|
112 | 126 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) |
|
113 | 127 | |
|
114 | 128 | # Strip |
|
115 | 129 | config.add_route( |
|
116 | 130 | name='strip', |
|
117 | 131 | pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True) |
|
118 | 132 | |
|
119 | 133 | config.add_route( |
|
120 | 134 | name='strip_check', |
|
121 | 135 | pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True) |
|
122 | 136 | |
|
123 | 137 | config.add_route( |
|
124 | 138 | name='strip_execute', |
|
125 | 139 | pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True) |
|
126 | 140 | |
|
127 | 141 | # NOTE(marcink): needs to be at the end for catch-all |
|
128 | # config.add_route( | |
|
129 | # name='repo_summary', | |
|
130 | # pattern='/{repo_name:.*?[^/]}', repo_route=True) | |
|
142 | add_route_with_slash( | |
|
143 | config, | |
|
144 | name='repo_summary', | |
|
145 | pattern='/{repo_name:.*?[^/]}', repo_route=True) | |
|
131 | 146 | |
|
132 | 147 | # Scan module for configuration decorators. |
|
133 | 148 | config.scan() |
@@ -1,516 +1,494 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import re |
|
22 | 22 | |
|
23 | 23 | import mock |
|
24 | 24 | import pytest |
|
25 | 25 | |
|
26 |
from rhodecode. |
|
|
26 | from rhodecode.apps.repository.views.repo_summary import RepoSummaryView | |
|
27 | 27 | from rhodecode.lib import helpers as h |
|
28 | 28 | from rhodecode.lib.compat import OrderedDict |
|
29 | from rhodecode.lib.utils2 import AttributeDict | |
|
29 | 30 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError |
|
30 | 31 | from rhodecode.model.db import Repository |
|
31 | 32 | from rhodecode.model.meta import Session |
|
32 | 33 | from rhodecode.model.repo import RepoModel |
|
33 | 34 | from rhodecode.model.scm import ScmModel |
|
34 |
from rhodecode.tests import |
|
|
35 | TestController, url, HG_REPO, assert_session_flash) | |
|
35 | from rhodecode.tests import assert_session_flash | |
|
36 | 36 | from rhodecode.tests.fixture import Fixture |
|
37 | 37 | from rhodecode.tests.utils import AssertResponse, repo_on_filesystem |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | fixture = Fixture() |
|
41 | 41 | |
|
42 | 42 | |
|
43 | class TestSummaryController(TestController): | |
|
44 | def test_index(self, backend, http_host_only_stub): | |
|
45 | self.log_user() | |
|
43 | def route_path(name, params=None, **kwargs): | |
|
44 | import urllib | |
|
45 | ||
|
46 | base_url = { | |
|
47 | 'repo_summary': '/{repo_name}', | |
|
48 | 'repo_stats': '/{repo_name}/repo_stats/{commit_id}', | |
|
49 | 'repo_refs_data': '/{repo_name}/refs-data', | |
|
50 | 'repo_refs_changelog_data': '/{repo_name}/refs-data-changelog' | |
|
51 | ||
|
52 | }[name].format(**kwargs) | |
|
53 | ||
|
54 | if params: | |
|
55 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
|
56 | return base_url | |
|
57 | ||
|
58 | ||
|
59 | @pytest.mark.usefixtures('app') | |
|
60 | class TestSummaryView(object): | |
|
61 | def test_index(self, autologin_user, backend, http_host_only_stub): | |
|
46 | 62 | repo_id = backend.repo.repo_id |
|
47 | 63 | repo_name = backend.repo_name |
|
48 | 64 | with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy', |
|
49 | 65 | return_value=False): |
|
50 |
response = self.app.get( |
|
|
66 | response = self.app.get( | |
|
67 | route_path('repo_summary', repo_name=repo_name)) | |
|
51 | 68 | |
|
52 | 69 | # repo type |
|
53 | 70 | response.mustcontain( |
|
54 | 71 | '<i class="icon-%s">' % (backend.alias, ) |
|
55 | 72 | ) |
|
56 | 73 | # public/private |
|
57 | 74 | response.mustcontain( |
|
58 | 75 | """<i class="icon-unlock-alt">""" |
|
59 | 76 | ) |
|
60 | 77 | |
|
61 | 78 | # clone url... |
|
62 | 79 | response.mustcontain( |
|
63 | 80 | 'id="clone_url" readonly="readonly"' |
|
64 | 81 | ' value="http://test_admin@%s/%s"' % (http_host_only_stub, repo_name, )) |
|
65 | 82 | response.mustcontain( |
|
66 | 83 | 'id="clone_url_id" readonly="readonly"' |
|
67 | 84 | ' value="http://test_admin@%s/_%s"' % (http_host_only_stub, repo_id, )) |
|
68 | 85 | |
|
69 |
def test_index_svn_without_proxy( |
|
|
70 | self.log_user() | |
|
86 | def test_index_svn_without_proxy( | |
|
87 | self, autologin_user, backend_svn, http_host_only_stub): | |
|
71 | 88 | repo_id = backend_svn.repo.repo_id |
|
72 | 89 | repo_name = backend_svn.repo_name |
|
73 |
response = self.app.get( |
|
|
90 | response = self.app.get(route_path('repo_summary', repo_name=repo_name)) | |
|
74 | 91 | # clone url... |
|
75 | 92 | response.mustcontain( |
|
76 | 93 | 'id="clone_url" disabled' |
|
77 | 94 | ' value="http://test_admin@%s/%s"' % (http_host_only_stub, repo_name, )) |
|
78 | 95 | response.mustcontain( |
|
79 | 96 | 'id="clone_url_id" disabled' |
|
80 | 97 | ' value="http://test_admin@%s/_%s"' % (http_host_only_stub, repo_id, )) |
|
81 | 98 | |
|
82 |
def test_index_with_trailing_slash( |
|
|
83 | http_host_only_stub): | |
|
99 | def test_index_with_trailing_slash( | |
|
100 | self, autologin_user, backend, http_host_only_stub): | |
|
101 | ||
|
84 | 102 | repo_id = backend.repo.repo_id |
|
85 | 103 | repo_name = backend.repo_name |
|
86 | 104 | with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy', |
|
87 | 105 | return_value=False): |
|
88 | 106 | response = self.app.get( |
|
89 |
|
|
|
107 | route_path('repo_summary', repo_name=repo_name) + '/', | |
|
90 | 108 | status=200) |
|
91 | 109 | |
|
92 | 110 | # clone url... |
|
93 | 111 | response.mustcontain( |
|
94 | 112 | 'id="clone_url" readonly="readonly"' |
|
95 | 113 | ' value="http://test_admin@%s/%s"' % (http_host_only_stub, repo_name, )) |
|
96 | 114 | response.mustcontain( |
|
97 | 115 | 'id="clone_url_id" readonly="readonly"' |
|
98 | 116 | ' value="http://test_admin@%s/_%s"' % (http_host_only_stub, repo_id, )) |
|
99 | 117 | |
|
100 | def test_index_by_id(self, backend): | |
|
101 | self.log_user() | |
|
118 | def test_index_by_id(self, autologin_user, backend): | |
|
102 | 119 | repo_id = backend.repo.repo_id |
|
103 |
response = self.app.get( |
|
|
104 |
'summary |
|
|
120 | response = self.app.get( | |
|
121 | route_path('repo_summary', repo_name='_%s' % (repo_id,))) | |
|
105 | 122 | |
|
106 | 123 | # repo type |
|
107 | 124 | response.mustcontain( |
|
108 | 125 | '<i class="icon-%s">' % (backend.alias, ) |
|
109 | 126 | ) |
|
110 | 127 | # public/private |
|
111 | 128 | response.mustcontain( |
|
112 | 129 | """<i class="icon-unlock-alt">""" |
|
113 | 130 | ) |
|
114 | 131 | |
|
115 | def test_index_by_repo_having_id_path_in_name_hg(self): | |
|
116 | self.log_user() | |
|
132 | def test_index_by_repo_having_id_path_in_name_hg(self, autologin_user): | |
|
117 | 133 | fixture.create_repo(name='repo_1') |
|
118 |
response = self.app.get( |
|
|
134 | response = self.app.get(route_path('repo_summary', repo_name='repo_1')) | |
|
119 | 135 | |
|
120 | 136 | try: |
|
121 | 137 | response.mustcontain("repo_1") |
|
122 | 138 | finally: |
|
123 | 139 | RepoModel().delete(Repository.get_by_repo_name('repo_1')) |
|
124 | 140 | Session().commit() |
|
125 | 141 | |
|
126 |
def test_index_with_anonymous_access_disabled( |
|
|
127 | with fixture.anon_access(False): | |
|
128 | response = self.app.get(url('summary_home', repo_name=HG_REPO), | |
|
129 | status=302) | |
|
130 |
|
|
|
142 | def test_index_with_anonymous_access_disabled( | |
|
143 | self, backend, disable_anonymous_user): | |
|
144 | response = self.app.get( | |
|
145 | route_path('repo_summary', repo_name=backend.repo_name), status=302) | |
|
146 | assert 'login' in response.location | |
|
131 | 147 | |
|
132 | 148 | def _enable_stats(self, repo): |
|
133 | 149 | r = Repository.get_by_repo_name(repo) |
|
134 | 150 | r.enable_statistics = True |
|
135 | 151 | Session().add(r) |
|
136 | 152 | Session().commit() |
|
137 | 153 | |
|
138 | 154 | expected_trending = { |
|
139 | 155 | 'hg': { |
|
140 | 156 | "py": {"count": 68, "desc": ["Python"]}, |
|
141 | 157 | "rst": {"count": 16, "desc": ["Rst"]}, |
|
142 | 158 | "css": {"count": 2, "desc": ["Css"]}, |
|
143 | 159 | "sh": {"count": 2, "desc": ["Bash"]}, |
|
144 | 160 | "bat": {"count": 1, "desc": ["Batch"]}, |
|
145 | 161 | "cfg": {"count": 1, "desc": ["Ini"]}, |
|
146 | 162 | "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]}, |
|
147 | 163 | "ini": {"count": 1, "desc": ["Ini"]}, |
|
148 | 164 | "js": {"count": 1, "desc": ["Javascript"]}, |
|
149 | 165 | "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]} |
|
150 | 166 | }, |
|
151 | 167 | 'git': { |
|
152 | 168 | "py": {"count": 68, "desc": ["Python"]}, |
|
153 | 169 | "rst": {"count": 16, "desc": ["Rst"]}, |
|
154 | 170 | "css": {"count": 2, "desc": ["Css"]}, |
|
155 | 171 | "sh": {"count": 2, "desc": ["Bash"]}, |
|
156 | 172 | "bat": {"count": 1, "desc": ["Batch"]}, |
|
157 | 173 | "cfg": {"count": 1, "desc": ["Ini"]}, |
|
158 | 174 | "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]}, |
|
159 | 175 | "ini": {"count": 1, "desc": ["Ini"]}, |
|
160 | 176 | "js": {"count": 1, "desc": ["Javascript"]}, |
|
161 | 177 | "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]} |
|
162 | 178 | }, |
|
163 | 179 | 'svn': { |
|
164 | 180 | "py": {"count": 75, "desc": ["Python"]}, |
|
165 | 181 | "rst": {"count": 16, "desc": ["Rst"]}, |
|
166 | 182 | "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]}, |
|
167 | 183 | "css": {"count": 2, "desc": ["Css"]}, |
|
168 | 184 | "bat": {"count": 1, "desc": ["Batch"]}, |
|
169 | 185 | "cfg": {"count": 1, "desc": ["Ini"]}, |
|
170 | 186 | "ini": {"count": 1, "desc": ["Ini"]}, |
|
171 | 187 | "js": {"count": 1, "desc": ["Javascript"]}, |
|
172 | 188 | "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}, |
|
173 | 189 | "sh": {"count": 1, "desc": ["Bash"]} |
|
174 | 190 | }, |
|
175 | 191 | } |
|
176 | 192 | |
|
177 | def test_repo_stats(self, backend, xhr_header): | |
|
178 | self.log_user() | |
|
193 | def test_repo_stats(self, autologin_user, backend, xhr_header): | |
|
179 | 194 | response = self.app.get( |
|
180 |
|
|
|
181 | repo_name=backend.repo_name, commit_id='tip'), | |
|
195 | route_path( | |
|
196 | 'repo_stats', repo_name=backend.repo_name, commit_id='tip'), | |
|
182 | 197 | extra_environ=xhr_header, |
|
183 | 198 | status=200) |
|
184 | 199 | assert re.match(r'6[\d\.]+ KiB', response.json['size']) |
|
185 | 200 | |
|
186 | def test_repo_stats_code_stats_enabled(self, backend, xhr_header): | |
|
187 | self.log_user() | |
|
201 | def test_repo_stats_code_stats_enabled(self, autologin_user, backend, xhr_header): | |
|
188 | 202 | repo_name = backend.repo_name |
|
189 | 203 | |
|
190 | 204 | # codes stats |
|
191 | 205 | self._enable_stats(repo_name) |
|
192 | 206 | ScmModel().mark_for_invalidation(repo_name) |
|
193 | 207 | |
|
194 | 208 | response = self.app.get( |
|
195 |
|
|
|
196 | repo_name=backend.repo_name, commit_id='tip'), | |
|
209 | route_path( | |
|
210 | 'repo_stats', repo_name=backend.repo_name, commit_id='tip'), | |
|
197 | 211 | extra_environ=xhr_header, |
|
198 | 212 | status=200) |
|
199 | 213 | |
|
200 | 214 | expected_data = self.expected_trending[backend.alias] |
|
201 | 215 | returned_stats = response.json['code_stats'] |
|
202 | 216 | for k, v in expected_data.items(): |
|
203 | 217 | assert v == returned_stats[k] |
|
204 | 218 | |
|
205 | 219 | def test_repo_refs_data(self, backend): |
|
206 | 220 | response = self.app.get( |
|
207 |
|
|
|
221 | route_path('repo_refs_data', repo_name=backend.repo_name), | |
|
208 | 222 | status=200) |
|
209 | 223 | |
|
210 | 224 | # Ensure that there is the correct amount of items in the result |
|
211 | 225 | repo = backend.repo.scm_instance() |
|
212 | 226 | data = response.json['results'] |
|
213 | 227 | items = sum(len(section['children']) for section in data) |
|
214 | 228 | repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks) |
|
215 | 229 | assert items == repo_refs |
|
216 | 230 | |
|
217 | 231 | def test_index_shows_missing_requirements_message( |
|
218 | 232 | self, backend, autologin_user): |
|
219 | 233 | repo_name = backend.repo_name |
|
220 | 234 | scm_patcher = mock.patch.object( |
|
221 | 235 | Repository, 'scm_instance', side_effect=RepositoryRequirementError) |
|
222 | 236 | |
|
223 | 237 | with scm_patcher: |
|
224 |
response = self.app.get( |
|
|
238 | response = self.app.get(route_path('repo_summary', repo_name=repo_name)) | |
|
225 | 239 | assert_response = AssertResponse(response) |
|
226 | 240 | assert_response.element_contains( |
|
227 | 241 | '.main .alert-warning strong', 'Missing requirements') |
|
228 | 242 | assert_response.element_contains( |
|
229 | 243 | '.main .alert-warning', |
|
230 |
' |
|
|
231 |
' |
|
|
244 | 'Commits cannot be displayed, because this repository ' | |
|
245 | 'uses one or more extensions, which was not enabled.') | |
|
232 | 246 | |
|
233 | 247 | def test_missing_requirements_page_does_not_contains_switch_to( |
|
234 | self, backend): | |
|
235 | self.log_user() | |
|
248 | self, autologin_user, backend): | |
|
236 | 249 | repo_name = backend.repo_name |
|
237 | 250 | scm_patcher = mock.patch.object( |
|
238 | 251 | Repository, 'scm_instance', side_effect=RepositoryRequirementError) |
|
239 | 252 | |
|
240 | 253 | with scm_patcher: |
|
241 |
response = self.app.get( |
|
|
254 | response = self.app.get(route_path('repo_summary', repo_name=repo_name)) | |
|
242 | 255 | response.mustcontain(no='Switch To') |
|
243 | 256 | |
|
244 | 257 | |
|
245 |
@pytest.mark.usefixtures(' |
|
|
246 | class TestSwitcherReferenceData: | |
|
258 | @pytest.mark.usefixtures('app') | |
|
259 | class TestRepoLocation(object): | |
|
247 | 260 | |
|
248 | def test_creates_reference_urls_based_on_name(self): | |
|
249 | references = { | |
|
250 | 'name': 'commit_id', | |
|
251 | } | |
|
252 | controller = summary.SummaryController() | |
|
253 | is_svn = False | |
|
254 | result = controller._switcher_reference_data( | |
|
255 | 'repo_name', references, is_svn) | |
|
256 | expected_url = h.url( | |
|
257 | 'files_home', repo_name='repo_name', revision='name', | |
|
258 | at='name') | |
|
259 | assert result[0]['files_url'] == expected_url | |
|
261 | @pytest.mark.parametrize("suffix", [u'', u'ąęł'], ids=['', 'non-ascii']) | |
|
262 | def test_manual_delete(self, autologin_user, backend, suffix, csrf_token): | |
|
263 | repo = backend.create_repo(name_suffix=suffix) | |
|
264 | repo_name = repo.repo_name | |
|
265 | ||
|
266 | # delete from file system | |
|
267 | RepoModel()._delete_filesystem_repo(repo) | |
|
260 | 268 | |
|
261 | def test_urls_contain_commit_id_if_slash_in_name(self): | |
|
262 | references = { | |
|
263 | 'name/with/slash': 'commit_id', | |
|
264 | } | |
|
265 | controller = summary.SummaryController() | |
|
266 | is_svn = False | |
|
267 | result = controller._switcher_reference_data( | |
|
268 | 'repo_name', references, is_svn) | |
|
269 | expected_url = h.url( | |
|
270 | 'files_home', repo_name='repo_name', revision='commit_id', | |
|
271 | at='name/with/slash') | |
|
272 | assert result[0]['files_url'] == expected_url | |
|
269 | # test if the repo is still in the database | |
|
270 | new_repo = RepoModel().get_by_repo_name(repo_name) | |
|
271 | assert new_repo.repo_name == repo_name | |
|
273 | 272 | |
|
274 | def test_adds_reference_to_path_for_svn(self): | |
|
275 | references = { | |
|
276 | 'name/with/slash': 'commit_id', | |
|
277 | } | |
|
278 | controller = summary.SummaryController() | |
|
279 | is_svn = True | |
|
280 | result = controller._switcher_reference_data( | |
|
281 | 'repo_name', references, is_svn) | |
|
282 | expected_url = h.url( | |
|
283 | 'files_home', repo_name='repo_name', f_path='name/with/slash', | |
|
284 | revision='commit_id', at='name/with/slash') | |
|
285 | assert result[0]['files_url'] == expected_url | |
|
273 | # check if repo is not in the filesystem | |
|
274 | assert not repo_on_filesystem(repo_name) | |
|
275 | self.assert_repo_not_found_redirect(repo_name) | |
|
276 | ||
|
277 | def assert_repo_not_found_redirect(self, repo_name): | |
|
278 | # run the check page that triggers the other flash message | |
|
279 | response = self.app.get(h.url('repo_check_home', repo_name=repo_name)) | |
|
280 | assert_session_flash( | |
|
281 | response, 'The repository at %s cannot be located.' % repo_name) | |
|
286 | 282 | |
|
287 | 283 | |
|
288 |
@pytest. |
|
|
289 | class TestCreateReferenceData: | |
|
284 | @pytest.fixture() | |
|
285 | def summary_view(context_stub, request_stub, user_util): | |
|
286 | """ | |
|
287 | Bootstrap view to test the view functions | |
|
288 | """ | |
|
289 | request_stub.matched_route = AttributeDict(name='test_view') | |
|
290 | ||
|
291 | request_stub.user = user_util.create_user().AuthUser | |
|
292 | request_stub.db_repo = user_util.create_repo() | |
|
293 | ||
|
294 | view = RepoSummaryView(context=context_stub, request=request_stub) | |
|
295 | return view | |
|
296 | ||
|
297 | ||
|
298 | @pytest.mark.usefixtures('app') | |
|
299 | class TestCreateReferenceData(object): | |
|
290 | 300 | |
|
291 | 301 | @pytest.fixture |
|
292 | 302 | def example_refs(self): |
|
293 | 303 | section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id'))) |
|
294 | 304 | example_refs = [ |
|
295 | 305 | ('section_1', section_1_refs, 't1'), |
|
296 | 306 | ('section_2', {'c': 'c_id'}, 't2'), |
|
297 | 307 | ] |
|
298 | 308 | return example_refs |
|
299 | 309 | |
|
300 | def test_generates_refs_based_on_commit_ids(self, example_refs): | |
|
310 | def test_generates_refs_based_on_commit_ids(self, example_refs, summary_view): | |
|
301 | 311 | repo = mock.Mock() |
|
302 | 312 | repo.name = 'test-repo' |
|
303 | 313 | repo.alias = 'git' |
|
304 | 314 | full_repo_name = 'pytest-repo-group/' + repo.name |
|
305 | controller = summary.SummaryController() | |
|
306 | 315 | |
|
307 |
result = |
|
|
316 | result = summary_view._create_reference_data( | |
|
308 | 317 | repo, full_repo_name, example_refs) |
|
309 | 318 | |
|
310 | 319 | expected_files_url = '/{}/files/'.format(full_repo_name) |
|
311 | 320 | expected_result = [ |
|
312 | 321 | { |
|
313 | 322 | 'children': [ |
|
314 | 323 | { |
|
315 | 324 | 'id': 'a', 'raw_id': 'a_id', 'text': 'a', 'type': 't1', |
|
316 | 325 | 'files_url': expected_files_url + 'a/?at=a', |
|
317 | 326 | }, |
|
318 | 327 | { |
|
319 | 328 | 'id': 'b', 'raw_id': 'b_id', 'text': 'b', 'type': 't1', |
|
320 | 329 | 'files_url': expected_files_url + 'b/?at=b', |
|
321 | 330 | } |
|
322 | 331 | ], |
|
323 | 332 | 'text': 'section_1' |
|
324 | 333 | }, |
|
325 | 334 | { |
|
326 | 335 | 'children': [ |
|
327 | 336 | { |
|
328 | 337 | 'id': 'c', 'raw_id': 'c_id', 'text': 'c', 'type': 't2', |
|
329 | 338 | 'files_url': expected_files_url + 'c/?at=c', |
|
330 | 339 | } |
|
331 | 340 | ], |
|
332 | 341 | 'text': 'section_2' |
|
333 | 342 | }] |
|
334 | 343 | assert result == expected_result |
|
335 | 344 | |
|
336 | def test_generates_refs_with_path_for_svn(self, example_refs): | |
|
345 | def test_generates_refs_with_path_for_svn(self, example_refs, summary_view): | |
|
337 | 346 | repo = mock.Mock() |
|
338 | 347 | repo.name = 'test-repo' |
|
339 | 348 | repo.alias = 'svn' |
|
340 | 349 | full_repo_name = 'pytest-repo-group/' + repo.name |
|
341 | controller = summary.SummaryController() | |
|
342 |
result = |
|
|
350 | ||
|
351 | result = summary_view._create_reference_data( | |
|
343 | 352 | repo, full_repo_name, example_refs) |
|
344 | 353 | |
|
345 | 354 | expected_files_url = '/{}/files/'.format(full_repo_name) |
|
346 | 355 | expected_result = [ |
|
347 | 356 | { |
|
348 | 357 | 'children': [ |
|
349 | 358 | { |
|
350 | 359 | 'id': 'a@a_id', 'raw_id': 'a_id', |
|
351 | 360 | 'text': 'a', 'type': 't1', |
|
352 | 361 | 'files_url': expected_files_url + 'a_id/a?at=a', |
|
353 | 362 | }, |
|
354 | 363 | { |
|
355 | 364 | 'id': 'b@b_id', 'raw_id': 'b_id', |
|
356 | 365 | 'text': 'b', 'type': 't1', |
|
357 | 366 | 'files_url': expected_files_url + 'b_id/b?at=b', |
|
358 | 367 | } |
|
359 | 368 | ], |
|
360 | 369 | 'text': 'section_1' |
|
361 | 370 | }, |
|
362 | 371 | { |
|
363 | 372 | 'children': [ |
|
364 | 373 | { |
|
365 | 374 | 'id': 'c@c_id', 'raw_id': 'c_id', |
|
366 | 375 | 'text': 'c', 'type': 't2', |
|
367 | 376 | 'files_url': expected_files_url + 'c_id/c?at=c', |
|
368 | 377 | } |
|
369 | 378 | ], |
|
370 | 379 | 'text': 'section_2' |
|
371 | 380 | } |
|
372 | 381 | ] |
|
373 | 382 | assert result == expected_result |
|
374 | 383 | |
|
375 | 384 | |
|
376 | @pytest.mark.usefixtures("app") | |
|
377 | class TestRepoLocation: | |
|
378 | ||
|
379 | @pytest.mark.parametrize("suffix", [u'', u'ąęł'], ids=['', 'non-ascii']) | |
|
380 | def test_manual_delete(self, autologin_user, backend, suffix, csrf_token): | |
|
381 | repo = backend.create_repo(name_suffix=suffix) | |
|
382 | repo_name = repo.repo_name | |
|
383 | ||
|
384 | # delete from file system | |
|
385 | RepoModel()._delete_filesystem_repo(repo) | |
|
386 | ||
|
387 | # test if the repo is still in the database | |
|
388 | new_repo = RepoModel().get_by_repo_name(repo_name) | |
|
389 | assert new_repo.repo_name == repo_name | |
|
385 | class TestCreateFilesUrl(object): | |
|
390 | 386 | |
|
391 | # check if repo is not in the filesystem | |
|
392 | assert not repo_on_filesystem(repo_name) | |
|
393 | self.assert_repo_not_found_redirect(repo_name) | |
|
394 | ||
|
395 | def assert_repo_not_found_redirect(self, repo_name): | |
|
396 | # run the check page that triggers the other flash message | |
|
397 | response = self.app.get(url('repo_check_home', repo_name=repo_name)) | |
|
398 | assert_session_flash( | |
|
399 | response, 'The repository at %s cannot be located.' % repo_name) | |
|
400 | ||
|
401 | ||
|
402 | class TestCreateFilesUrl(object): | |
|
403 | def test_creates_non_svn_url(self): | |
|
404 | controller = summary.SummaryController() | |
|
387 | def test_creates_non_svn_url(self, summary_view): | |
|
405 | 388 | repo = mock.Mock() |
|
406 | 389 | repo.name = 'abcde' |
|
407 | 390 | full_repo_name = 'test-repo-group/' + repo.name |
|
408 | 391 | ref_name = 'branch1' |
|
409 | 392 | raw_id = 'deadbeef0123456789' |
|
410 | 393 | is_svn = False |
|
411 | 394 | |
|
412 |
with mock.patch |
|
|
413 |
result = |
|
|
395 | with mock.patch('rhodecode.lib.helpers.url') as url_mock: | |
|
396 | result = summary_view._create_files_url( | |
|
414 | 397 | repo, full_repo_name, ref_name, raw_id, is_svn) |
|
415 | 398 | url_mock.assert_called_once_with( |
|
416 | 399 | 'files_home', repo_name=full_repo_name, f_path='', |
|
417 | 400 | revision=ref_name, at=ref_name) |
|
418 | 401 | assert result == url_mock.return_value |
|
419 | 402 | |
|
420 | def test_creates_svn_url(self): | |
|
421 | controller = summary.SummaryController() | |
|
403 | def test_creates_svn_url(self, summary_view): | |
|
422 | 404 | repo = mock.Mock() |
|
423 | 405 | repo.name = 'abcde' |
|
424 | 406 | full_repo_name = 'test-repo-group/' + repo.name |
|
425 | 407 | ref_name = 'branch1' |
|
426 | 408 | raw_id = 'deadbeef0123456789' |
|
427 | 409 | is_svn = True |
|
428 | 410 | |
|
429 |
with mock.patch |
|
|
430 |
result = |
|
|
411 | with mock.patch('rhodecode.lib.helpers.url') as url_mock: | |
|
412 | result = summary_view._create_files_url( | |
|
431 | 413 | repo, full_repo_name, ref_name, raw_id, is_svn) |
|
432 | 414 | url_mock.assert_called_once_with( |
|
433 | 415 | 'files_home', repo_name=full_repo_name, f_path=ref_name, |
|
434 | 416 | revision=raw_id, at=ref_name) |
|
435 | 417 | assert result == url_mock.return_value |
|
436 | 418 | |
|
437 | def test_name_has_slashes(self): | |
|
438 | controller = summary.SummaryController() | |
|
419 | def test_name_has_slashes(self, summary_view): | |
|
439 | 420 | repo = mock.Mock() |
|
440 | 421 | repo.name = 'abcde' |
|
441 | 422 | full_repo_name = 'test-repo-group/' + repo.name |
|
442 | 423 | ref_name = 'branch1/branch2' |
|
443 | 424 | raw_id = 'deadbeef0123456789' |
|
444 | 425 | is_svn = False |
|
445 | 426 | |
|
446 |
with mock.patch |
|
|
447 |
result = |
|
|
427 | with mock.patch('rhodecode.lib.helpers.url') as url_mock: | |
|
428 | result = summary_view._create_files_url( | |
|
448 | 429 | repo, full_repo_name, ref_name, raw_id, is_svn) |
|
449 | 430 | url_mock.assert_called_once_with( |
|
450 | 431 | 'files_home', repo_name=full_repo_name, f_path='', revision=raw_id, |
|
451 | 432 | at=ref_name) |
|
452 | 433 | assert result == url_mock.return_value |
|
453 | 434 | |
|
454 | 435 | |
|
455 | 436 | class TestReferenceItems(object): |
|
456 | 437 | repo = mock.Mock() |
|
457 | 438 | repo.name = 'pytest-repo' |
|
458 | 439 | repo_full_name = 'pytest-repo-group/' + repo.name |
|
459 | 440 | ref_type = 'branch' |
|
460 | 441 | fake_url = '/abcde/' |
|
461 | 442 | |
|
462 | 443 | @staticmethod |
|
463 | 444 | def _format_function(name, id_): |
|
464 | 445 | return 'format_function_{}_{}'.format(name, id_) |
|
465 | 446 | |
|
466 | def test_creates_required_amount_of_items(self): | |
|
447 | def test_creates_required_amount_of_items(self, summary_view): | |
|
467 | 448 | amount = 100 |
|
468 | 449 | refs = { |
|
469 | 450 | 'ref{}'.format(i): '{0:040d}'.format(i) |
|
470 | 451 | for i in range(amount) |
|
471 | 452 | } |
|
472 | 453 | |
|
473 | controller = summary.SummaryController() | |
|
474 | ||
|
475 | url_patcher = mock.patch.object( | |
|
476 | controller, '_create_files_url') | |
|
477 | svn_patcher = mock.patch.object( | |
|
478 | summary.h, 'is_svn', return_value=False) | |
|
454 | url_patcher = mock.patch.object(summary_view, '_create_files_url') | |
|
455 | svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn', | |
|
456 | return_value=False) | |
|
479 | 457 | |
|
480 | 458 | with url_patcher as url_mock, svn_patcher: |
|
481 |
result = |
|
|
459 | result = summary_view._create_reference_items( | |
|
482 | 460 | self.repo, self.repo_full_name, refs, self.ref_type, |
|
483 | 461 | self._format_function) |
|
484 | 462 | assert len(result) == amount |
|
485 | 463 | assert url_mock.call_count == amount |
|
486 | 464 | |
|
487 | def test_single_item_details(self): | |
|
465 | def test_single_item_details(self, summary_view): | |
|
488 | 466 | ref_name = 'ref1' |
|
489 | 467 | ref_id = 'deadbeef' |
|
490 | 468 | refs = { |
|
491 | 469 | ref_name: ref_id |
|
492 | 470 | } |
|
493 | 471 | |
|
494 | controller = summary.SummaryController() | |
|
472 | svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn', | |
|
473 | return_value=False) | |
|
474 | ||
|
495 | 475 | url_patcher = mock.patch.object( |
|
496 |
|
|
|
497 | svn_patcher = mock.patch.object( | |
|
498 | summary.h, 'is_svn', return_value=False) | |
|
476 | summary_view, '_create_files_url', return_value=self.fake_url) | |
|
499 | 477 | |
|
500 | 478 | with url_patcher as url_mock, svn_patcher: |
|
501 |
result = |
|
|
479 | result = summary_view._create_reference_items( | |
|
502 | 480 | self.repo, self.repo_full_name, refs, self.ref_type, |
|
503 | 481 | self._format_function) |
|
504 | 482 | |
|
505 | 483 | url_mock.assert_called_once_with( |
|
506 | 484 | self.repo, self.repo_full_name, ref_name, ref_id, False) |
|
507 | 485 | expected_result = [ |
|
508 | 486 | { |
|
509 | 487 | 'text': ref_name, |
|
510 | 488 | 'id': self._format_function(ref_name, ref_id), |
|
511 | 489 | 'raw_id': ref_id, |
|
512 | 490 | 'type': self.ref_type, |
|
513 | 491 | 'files_url': self.fake_url |
|
514 | 492 | } |
|
515 | 493 | ] |
|
516 | 494 | assert result == expected_result |
@@ -1,516 +1,521 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Pylons middleware initialization |
|
23 | 23 | """ |
|
24 | 24 | import logging |
|
25 | 25 | from collections import OrderedDict |
|
26 | 26 | |
|
27 | 27 | from paste.registry import RegistryManager |
|
28 | 28 | from paste.gzipper import make_gzip_middleware |
|
29 | 29 | from pylons.wsgiapp import PylonsApp |
|
30 | 30 | from pyramid.authorization import ACLAuthorizationPolicy |
|
31 | 31 | from pyramid.config import Configurator |
|
32 | 32 | from pyramid.settings import asbool, aslist |
|
33 | 33 | from pyramid.wsgi import wsgiapp |
|
34 | 34 | from pyramid.httpexceptions import ( |
|
35 | 35 | HTTPException, HTTPError, HTTPInternalServerError, HTTPFound) |
|
36 | 36 | from pyramid.events import ApplicationCreated |
|
37 | 37 | from pyramid.renderers import render_to_response |
|
38 | 38 | from routes.middleware import RoutesMiddleware |
|
39 | 39 | import routes.util |
|
40 | 40 | |
|
41 | 41 | import rhodecode |
|
42 | ||
|
42 | 43 | from rhodecode.model import meta |
|
43 | 44 | from rhodecode.config import patches |
|
44 | 45 | from rhodecode.config.routing import STATIC_FILE_PREFIX |
|
45 | 46 | from rhodecode.config.environment import ( |
|
46 | 47 | load_environment, load_pyramid_environment) |
|
48 | ||
|
49 | from rhodecode.lib.vcs import VCSCommunicationError | |
|
50 | from rhodecode.lib.exceptions import VCSServerUnavailable | |
|
47 | 51 | from rhodecode.lib.middleware import csrf |
|
48 | 52 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
49 | 53 | from rhodecode.lib.middleware.error_handling import ( |
|
50 | 54 | PylonsErrorHandlingMiddleware) |
|
51 | 55 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
52 | 56 | from rhodecode.lib.middleware.vcs import VCSMiddleware |
|
53 | 57 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin |
|
54 | from rhodecode.lib.utils2 import aslist as rhodecode_aslist | |
|
58 | from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict | |
|
55 | 59 | from rhodecode.subscribers import ( |
|
56 | 60 | scan_repositories_if_enabled, write_js_routes_if_enabled, |
|
57 | 61 | write_metadata_if_needed) |
|
58 | 62 | |
|
59 | 63 | |
|
60 | 64 | log = logging.getLogger(__name__) |
|
61 | 65 | |
|
62 | 66 | |
|
63 | 67 | # this is used to avoid avoid the route lookup overhead in routesmiddleware |
|
64 | 68 | # for certain routes which won't go to pylons to - eg. static files, debugger |
|
65 | 69 | # it is only needed for the pylons migration and can be removed once complete |
|
66 | 70 | class SkippableRoutesMiddleware(RoutesMiddleware): |
|
67 | 71 | """ Routes middleware that allows you to skip prefixes """ |
|
68 | 72 | |
|
69 | 73 | def __init__(self, *args, **kw): |
|
70 | 74 | self.skip_prefixes = kw.pop('skip_prefixes', []) |
|
71 | 75 | super(SkippableRoutesMiddleware, self).__init__(*args, **kw) |
|
72 | 76 | |
|
73 | 77 | def __call__(self, environ, start_response): |
|
74 | 78 | for prefix in self.skip_prefixes: |
|
75 | 79 | if environ['PATH_INFO'].startswith(prefix): |
|
76 | 80 | # added to avoid the case when a missing /_static route falls |
|
77 | 81 | # through to pylons and causes an exception as pylons is |
|
78 | 82 | # expecting wsgiorg.routingargs to be set in the environ |
|
79 | 83 | # by RoutesMiddleware. |
|
80 | 84 | if 'wsgiorg.routing_args' not in environ: |
|
81 | 85 | environ['wsgiorg.routing_args'] = (None, {}) |
|
82 | 86 | return self.app(environ, start_response) |
|
83 | 87 | |
|
84 | 88 | return super(SkippableRoutesMiddleware, self).__call__( |
|
85 | 89 | environ, start_response) |
|
86 | 90 | |
|
87 | 91 | |
|
88 | 92 | def make_app(global_conf, static_files=True, **app_conf): |
|
89 | 93 | """Create a Pylons WSGI application and return it |
|
90 | 94 | |
|
91 | 95 | ``global_conf`` |
|
92 | 96 | The inherited configuration for this application. Normally from |
|
93 | 97 | the [DEFAULT] section of the Paste ini file. |
|
94 | 98 | |
|
95 | 99 | ``app_conf`` |
|
96 | 100 | The application's local configuration. Normally specified in |
|
97 | 101 | the [app:<name>] section of the Paste ini file (where <name> |
|
98 | 102 | defaults to main). |
|
99 | 103 | |
|
100 | 104 | """ |
|
101 | 105 | # Apply compatibility patches |
|
102 | 106 | patches.kombu_1_5_1_python_2_7_11() |
|
103 | 107 | patches.inspect_getargspec() |
|
104 | 108 | |
|
105 | 109 | # Configure the Pylons environment |
|
106 | 110 | config = load_environment(global_conf, app_conf) |
|
107 | 111 | |
|
108 | 112 | # The Pylons WSGI app |
|
109 | 113 | app = PylonsApp(config=config) |
|
110 | 114 | if rhodecode.is_test: |
|
111 | 115 | app = csrf.CSRFDetector(app) |
|
112 | 116 | |
|
113 | 117 | expected_origin = config.get('expected_origin') |
|
114 | 118 | if expected_origin: |
|
115 | 119 | # The API can be accessed from other Origins. |
|
116 | 120 | app = csrf.OriginChecker(app, expected_origin, |
|
117 | 121 | skip_urls=[routes.util.url_for('api')]) |
|
118 | 122 | |
|
119 | 123 | # Establish the Registry for this application |
|
120 | 124 | app = RegistryManager(app) |
|
121 | 125 | |
|
122 | 126 | app.config = config |
|
123 | 127 | |
|
124 | 128 | return app |
|
125 | 129 | |
|
126 | 130 | |
|
127 | 131 | def make_pyramid_app(global_config, **settings): |
|
128 | 132 | """ |
|
129 | 133 | Constructs the WSGI application based on Pyramid and wraps the Pylons based |
|
130 | 134 | application. |
|
131 | 135 | |
|
132 | 136 | Specials: |
|
133 | 137 | |
|
134 | 138 | * We migrate from Pylons to Pyramid. While doing this, we keep both |
|
135 | 139 | frameworks functional. This involves moving some WSGI middlewares around |
|
136 | 140 | and providing access to some data internals, so that the old code is |
|
137 | 141 | still functional. |
|
138 | 142 | |
|
139 | 143 | * The application can also be integrated like a plugin via the call to |
|
140 | 144 | `includeme`. This is accompanied with the other utility functions which |
|
141 | 145 | are called. Changing this should be done with great care to not break |
|
142 | 146 | cases when these fragments are assembled from another place. |
|
143 | 147 | |
|
144 | 148 | """ |
|
145 | 149 | # The edition string should be available in pylons too, so we add it here |
|
146 | 150 | # before copying the settings. |
|
147 | 151 | settings.setdefault('rhodecode.edition', 'Community Edition') |
|
148 | 152 | |
|
149 | 153 | # As long as our Pylons application does expect "unprepared" settings, make |
|
150 | 154 | # sure that we keep an unmodified copy. This avoids unintentional change of |
|
151 | 155 | # behavior in the old application. |
|
152 | 156 | settings_pylons = settings.copy() |
|
153 | 157 | |
|
154 | 158 | sanitize_settings_and_apply_defaults(settings) |
|
155 | 159 | config = Configurator(settings=settings) |
|
156 | 160 | add_pylons_compat_data(config.registry, global_config, settings_pylons) |
|
157 | 161 | |
|
158 | 162 | load_pyramid_environment(global_config, settings) |
|
159 | 163 | |
|
160 | 164 | includeme_first(config) |
|
161 | 165 | includeme(config) |
|
162 | 166 | pyramid_app = config.make_wsgi_app() |
|
163 | 167 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) |
|
164 | 168 | pyramid_app.config = config |
|
165 | 169 | |
|
166 | 170 | # creating the app uses a connection - return it after we are done |
|
167 | 171 | meta.Session.remove() |
|
168 | 172 | |
|
169 | 173 | return pyramid_app |
|
170 | 174 | |
|
171 | 175 | |
|
172 | 176 | def make_not_found_view(config): |
|
173 | 177 | """ |
|
174 | 178 | This creates the view which should be registered as not-found-view to |
|
175 | 179 | pyramid. Basically it contains of the old pylons app, converted to a view. |
|
176 | 180 | Additionally it is wrapped by some other middlewares. |
|
177 | 181 | """ |
|
178 | 182 | settings = config.registry.settings |
|
179 | 183 | vcs_server_enabled = settings['vcs.server.enable'] |
|
180 | 184 | |
|
181 | 185 | # Make pylons app from unprepared settings. |
|
182 | 186 | pylons_app = make_app( |
|
183 | 187 | config.registry._pylons_compat_global_config, |
|
184 | 188 | **config.registry._pylons_compat_settings) |
|
185 | 189 | config.registry._pylons_compat_config = pylons_app.config |
|
186 | 190 | |
|
187 | 191 | # Appenlight monitoring. |
|
188 | 192 | pylons_app, appenlight_client = wrap_in_appenlight_if_enabled( |
|
189 | 193 | pylons_app, settings) |
|
190 | 194 | |
|
191 | 195 | # The pylons app is executed inside of the pyramid 404 exception handler. |
|
192 | 196 | # Exceptions which are raised inside of it are not handled by pyramid |
|
193 | 197 | # again. Therefore we add a middleware that invokes the error handler in |
|
194 | 198 | # case of an exception or error response. This way we return proper error |
|
195 | 199 | # HTML pages in case of an error. |
|
196 | 200 | reraise = (settings.get('debugtoolbar.enabled', False) or |
|
197 | 201 | rhodecode.disable_error_handler) |
|
198 | 202 | pylons_app = PylonsErrorHandlingMiddleware( |
|
199 | 203 | pylons_app, error_handler, reraise) |
|
200 | 204 | |
|
201 | 205 | # The VCSMiddleware shall operate like a fallback if pyramid doesn't find a |
|
202 | 206 | # view to handle the request. Therefore it is wrapped around the pylons |
|
203 | 207 | # app. It has to be outside of the error handling otherwise error responses |
|
204 | 208 | # from the vcsserver are converted to HTML error pages. This confuses the |
|
205 | 209 | # command line tools and the user won't get a meaningful error message. |
|
206 | 210 | if vcs_server_enabled: |
|
207 | 211 | pylons_app = VCSMiddleware( |
|
208 | 212 | pylons_app, settings, appenlight_client, registry=config.registry) |
|
209 | 213 | |
|
210 | 214 | # Convert WSGI app to pyramid view and return it. |
|
211 | 215 | return wsgiapp(pylons_app) |
|
212 | 216 | |
|
213 | 217 | |
|
214 | 218 | def add_pylons_compat_data(registry, global_config, settings): |
|
215 | 219 | """ |
|
216 | 220 | Attach data to the registry to support the Pylons integration. |
|
217 | 221 | """ |
|
218 | 222 | registry._pylons_compat_global_config = global_config |
|
219 | 223 | registry._pylons_compat_settings = settings |
|
220 | 224 | |
|
221 | 225 | |
|
222 | 226 | def error_handler(exception, request): |
|
223 | 227 | import rhodecode |
|
224 | from rhodecode.lib.utils2 import AttributeDict | |
|
225 | 228 | from rhodecode.lib import helpers |
|
226 | 229 | |
|
227 | 230 | rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode' |
|
228 | 231 | |
|
229 | 232 | base_response = HTTPInternalServerError() |
|
230 | 233 | # prefer original exception for the response since it may have headers set |
|
231 | 234 | if isinstance(exception, HTTPException): |
|
232 | 235 | base_response = exception |
|
236 | elif isinstance(exception, VCSCommunicationError): | |
|
237 | base_response = VCSServerUnavailable() | |
|
233 | 238 | |
|
234 | 239 | def is_http_error(response): |
|
235 | 240 | # error which should have traceback |
|
236 | 241 | return response.status_code > 499 |
|
237 | 242 | |
|
238 | 243 | if is_http_error(base_response): |
|
239 | 244 | log.exception( |
|
240 | 245 | 'error occurred handling this request for path: %s', request.path) |
|
241 | 246 | |
|
242 | 247 | c = AttributeDict() |
|
243 | 248 | c.error_message = base_response.status |
|
244 | 249 | c.error_explanation = base_response.explanation or str(base_response) |
|
245 | 250 | c.visual = AttributeDict() |
|
246 | 251 | |
|
247 | 252 | c.visual.rhodecode_support_url = ( |
|
248 | 253 | request.registry.settings.get('rhodecode_support_url') or |
|
249 | 254 | request.route_url('rhodecode_support') |
|
250 | 255 | ) |
|
251 | 256 | c.redirect_time = 0 |
|
252 | 257 | c.rhodecode_name = rhodecode_title |
|
253 | 258 | if not c.rhodecode_name: |
|
254 | 259 | c.rhodecode_name = 'Rhodecode' |
|
255 | 260 | |
|
256 | 261 | c.causes = [] |
|
257 | 262 | if hasattr(base_response, 'causes'): |
|
258 | 263 | c.causes = base_response.causes |
|
259 | 264 | c.messages = helpers.flash.pop_messages() |
|
265 | ||
|
260 | 266 | response = render_to_response( |
|
261 | 267 | '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request, |
|
262 | 268 | response=base_response) |
|
263 | 269 | |
|
264 | 270 | return response |
|
265 | 271 | |
|
266 | 272 | |
|
267 | 273 | def includeme(config): |
|
268 | 274 | settings = config.registry.settings |
|
269 | 275 | |
|
270 | 276 | # plugin information |
|
271 | 277 | config.registry.rhodecode_plugins = OrderedDict() |
|
272 | 278 | |
|
273 | 279 | config.add_directive( |
|
274 | 280 | 'register_rhodecode_plugin', register_rhodecode_plugin) |
|
275 | 281 | |
|
276 | 282 | if asbool(settings.get('appenlight', 'false')): |
|
277 | 283 | config.include('appenlight_client.ext.pyramid_tween') |
|
278 | 284 | |
|
279 | 285 | # Includes which are required. The application would fail without them. |
|
280 | 286 | config.include('pyramid_mako') |
|
281 | 287 | config.include('pyramid_beaker') |
|
282 | 288 | |
|
283 | 289 | config.include('rhodecode.authentication') |
|
284 | 290 | config.include('rhodecode.integrations') |
|
285 | 291 | |
|
286 | 292 | # apps |
|
287 | 293 | config.include('rhodecode.apps._base') |
|
288 | 294 | config.include('rhodecode.apps.ops') |
|
289 | 295 | |
|
290 | 296 | config.include('rhodecode.apps.admin') |
|
291 | 297 | config.include('rhodecode.apps.channelstream') |
|
292 | 298 | config.include('rhodecode.apps.login') |
|
293 | 299 | config.include('rhodecode.apps.home') |
|
294 | 300 | config.include('rhodecode.apps.repository') |
|
295 | 301 | config.include('rhodecode.apps.repo_group') |
|
296 | 302 | config.include('rhodecode.apps.search') |
|
297 | 303 | config.include('rhodecode.apps.user_profile') |
|
298 | 304 | config.include('rhodecode.apps.my_account') |
|
299 | 305 | config.include('rhodecode.apps.svn_support') |
|
300 | 306 | |
|
301 | 307 | config.include('rhodecode.tweens') |
|
302 | 308 | config.include('rhodecode.api') |
|
303 | 309 | |
|
304 | 310 | config.add_route( |
|
305 | 311 | 'rhodecode_support', 'https://rhodecode.com/help/', static=True) |
|
306 | 312 | |
|
307 | 313 | config.add_translation_dirs('rhodecode:i18n/') |
|
308 | 314 | settings['default_locale_name'] = settings.get('lang', 'en') |
|
309 | 315 | |
|
310 | 316 | # Add subscribers. |
|
311 | 317 | config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated) |
|
312 | 318 | config.add_subscriber(write_metadata_if_needed, ApplicationCreated) |
|
313 | 319 | config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated) |
|
314 | 320 | |
|
315 | 321 | # Set the authorization policy. |
|
316 | 322 | authz_policy = ACLAuthorizationPolicy() |
|
317 | 323 | config.set_authorization_policy(authz_policy) |
|
318 | 324 | |
|
319 | 325 | # Set the default renderer for HTML templates to mako. |
|
320 | 326 | config.add_mako_renderer('.html') |
|
321 | 327 | |
|
322 | 328 | config.add_renderer( |
|
323 | 329 | name='json_ext', |
|
324 | 330 | factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json') |
|
325 | 331 | |
|
326 | 332 | # include RhodeCode plugins |
|
327 | 333 | includes = aslist(settings.get('rhodecode.includes', [])) |
|
328 | 334 | for inc in includes: |
|
329 | 335 | config.include(inc) |
|
330 | 336 | |
|
331 | 337 | # This is the glue which allows us to migrate in chunks. By registering the |
|
332 | 338 | # pylons based application as the "Not Found" view in Pyramid, we will |
|
333 | 339 | # fallback to the old application each time the new one does not yet know |
|
334 | 340 | # how to handle a request. |
|
335 | 341 | config.add_notfound_view(make_not_found_view(config)) |
|
336 | 342 | |
|
337 | 343 | if not settings.get('debugtoolbar.enabled', False): |
|
338 | 344 | # if no toolbar, then any exception gets caught and rendered |
|
339 | 345 | config.add_view(error_handler, context=Exception) |
|
340 | 346 | |
|
341 | 347 | config.add_view(error_handler, context=HTTPError) |
|
342 | 348 | |
|
343 | 349 | |
|
344 | 350 | def includeme_first(config): |
|
345 | 351 | # redirect automatic browser favicon.ico requests to correct place |
|
346 | 352 | def favicon_redirect(context, request): |
|
347 | 353 | return HTTPFound( |
|
348 | 354 | request.static_path('rhodecode:public/images/favicon.ico')) |
|
349 | 355 | |
|
350 | 356 | config.add_view(favicon_redirect, route_name='favicon') |
|
351 | 357 | config.add_route('favicon', '/favicon.ico') |
|
352 | 358 | |
|
353 | 359 | def robots_redirect(context, request): |
|
354 | 360 | return HTTPFound( |
|
355 | 361 | request.static_path('rhodecode:public/robots.txt')) |
|
356 | 362 | |
|
357 | 363 | config.add_view(robots_redirect, route_name='robots') |
|
358 | 364 | config.add_route('robots', '/robots.txt') |
|
359 | 365 | |
|
360 | 366 | config.add_static_view( |
|
361 | 367 | '_static/deform', 'deform:static') |
|
362 | 368 | config.add_static_view( |
|
363 | 369 | '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24) |
|
364 | 370 | |
|
365 | 371 | |
|
366 | 372 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): |
|
367 | 373 | """ |
|
368 | 374 | Apply outer WSGI middlewares around the application. |
|
369 | 375 | |
|
370 | 376 | Part of this has been moved up from the Pylons layer, so that the |
|
371 | 377 | data is also available if old Pylons code is hit through an already ported |
|
372 | 378 | view. |
|
373 | 379 | """ |
|
374 | 380 | settings = config.registry.settings |
|
375 | 381 | |
|
376 | 382 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy |
|
377 | 383 | pyramid_app = HttpsFixup(pyramid_app, settings) |
|
378 | 384 | |
|
379 | 385 | # Add RoutesMiddleware to support the pylons compatibility tween during |
|
380 | 386 | # migration to pyramid. |
|
381 | 387 | pyramid_app = SkippableRoutesMiddleware( |
|
382 | 388 | pyramid_app, config.registry._pylons_compat_config['routes.map'], |
|
383 | 389 | skip_prefixes=(STATIC_FILE_PREFIX, '/_debug_toolbar')) |
|
384 | 390 | |
|
385 | 391 | pyramid_app, _ = wrap_in_appenlight_if_enabled(pyramid_app, settings) |
|
386 | 392 | |
|
387 | 393 | if settings['gzip_responses']: |
|
388 | 394 | pyramid_app = make_gzip_middleware( |
|
389 | 395 | pyramid_app, settings, compress_level=1) |
|
390 | 396 | |
|
391 | 397 | # this should be the outer most middleware in the wsgi stack since |
|
392 | 398 | # middleware like Routes make database calls |
|
393 | 399 | def pyramid_app_with_cleanup(environ, start_response): |
|
394 | 400 | try: |
|
395 | 401 | return pyramid_app(environ, start_response) |
|
396 | 402 | finally: |
|
397 | 403 | # Dispose current database session and rollback uncommitted |
|
398 | 404 | # transactions. |
|
399 | 405 | meta.Session.remove() |
|
400 | 406 | |
|
401 | 407 | # In a single threaded mode server, on non sqlite db we should have |
|
402 | 408 | # '0 Current Checked out connections' at the end of a request, |
|
403 | 409 | # if not, then something, somewhere is leaving a connection open |
|
404 | 410 | pool = meta.Base.metadata.bind.engine.pool |
|
405 | 411 | log.debug('sa pool status: %s', pool.status()) |
|
406 | 412 | |
|
407 | ||
|
408 | 413 | return pyramid_app_with_cleanup |
|
409 | 414 | |
|
410 | 415 | |
|
411 | 416 | def sanitize_settings_and_apply_defaults(settings): |
|
412 | 417 | """ |
|
413 | 418 | Applies settings defaults and does all type conversion. |
|
414 | 419 | |
|
415 | 420 | We would move all settings parsing and preparation into this place, so that |
|
416 | 421 | we have only one place left which deals with this part. The remaining parts |
|
417 | 422 | of the application would start to rely fully on well prepared settings. |
|
418 | 423 | |
|
419 | 424 | This piece would later be split up per topic to avoid a big fat monster |
|
420 | 425 | function. |
|
421 | 426 | """ |
|
422 | 427 | |
|
423 | 428 | # Pyramid's mako renderer has to search in the templates folder so that the |
|
424 | 429 | # old templates still work. Ported and new templates are expected to use |
|
425 | 430 | # real asset specifications for the includes. |
|
426 | 431 | mako_directories = settings.setdefault('mako.directories', [ |
|
427 | 432 | # Base templates of the original Pylons application |
|
428 | 433 | 'rhodecode:templates', |
|
429 | 434 | ]) |
|
430 | 435 | log.debug( |
|
431 | 436 | "Using the following Mako template directories: %s", |
|
432 | 437 | mako_directories) |
|
433 | 438 | |
|
434 | 439 | # Default includes, possible to change as a user |
|
435 | 440 | pyramid_includes = settings.setdefault('pyramid.includes', [ |
|
436 | 441 | 'rhodecode.lib.middleware.request_wrapper', |
|
437 | 442 | ]) |
|
438 | 443 | log.debug( |
|
439 | 444 | "Using the following pyramid.includes: %s", |
|
440 | 445 | pyramid_includes) |
|
441 | 446 | |
|
442 | 447 | # TODO: johbo: Re-think this, usually the call to config.include |
|
443 | 448 | # should allow to pass in a prefix. |
|
444 | 449 | settings.setdefault('rhodecode.api.url', '/_admin/api') |
|
445 | 450 | |
|
446 | 451 | # Sanitize generic settings. |
|
447 | 452 | _list_setting(settings, 'default_encoding', 'UTF-8') |
|
448 | 453 | _bool_setting(settings, 'is_test', 'false') |
|
449 | 454 | _bool_setting(settings, 'gzip_responses', 'false') |
|
450 | 455 | |
|
451 | 456 | # Call split out functions that sanitize settings for each topic. |
|
452 | 457 | _sanitize_appenlight_settings(settings) |
|
453 | 458 | _sanitize_vcs_settings(settings) |
|
454 | 459 | |
|
455 | 460 | return settings |
|
456 | 461 | |
|
457 | 462 | |
|
458 | 463 | def _sanitize_appenlight_settings(settings): |
|
459 | 464 | _bool_setting(settings, 'appenlight', 'false') |
|
460 | 465 | |
|
461 | 466 | |
|
462 | 467 | def _sanitize_vcs_settings(settings): |
|
463 | 468 | """ |
|
464 | 469 | Applies settings defaults and does type conversion for all VCS related |
|
465 | 470 | settings. |
|
466 | 471 | """ |
|
467 | 472 | _string_setting(settings, 'vcs.svn.compatible_version', '') |
|
468 | 473 | _string_setting(settings, 'git_rev_filter', '--all') |
|
469 | 474 | _string_setting(settings, 'vcs.hooks.protocol', 'http') |
|
470 | 475 | _string_setting(settings, 'vcs.scm_app_implementation', 'http') |
|
471 | 476 | _string_setting(settings, 'vcs.server', '') |
|
472 | 477 | _string_setting(settings, 'vcs.server.log_level', 'debug') |
|
473 | 478 | _string_setting(settings, 'vcs.server.protocol', 'http') |
|
474 | 479 | _bool_setting(settings, 'startup.import_repos', 'false') |
|
475 | 480 | _bool_setting(settings, 'vcs.hooks.direct_calls', 'false') |
|
476 | 481 | _bool_setting(settings, 'vcs.server.enable', 'true') |
|
477 | 482 | _bool_setting(settings, 'vcs.start_server', 'false') |
|
478 | 483 | _list_setting(settings, 'vcs.backends', 'hg, git, svn') |
|
479 | 484 | _int_setting(settings, 'vcs.connection_timeout', 3600) |
|
480 | 485 | |
|
481 | 486 | # Support legacy values of vcs.scm_app_implementation. Legacy |
|
482 | 487 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http' |
|
483 | 488 | # which is now mapped to 'http'. |
|
484 | 489 | scm_app_impl = settings['vcs.scm_app_implementation'] |
|
485 | 490 | if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http': |
|
486 | 491 | settings['vcs.scm_app_implementation'] = 'http' |
|
487 | 492 | |
|
488 | 493 | |
|
489 | 494 | def _int_setting(settings, name, default): |
|
490 | 495 | settings[name] = int(settings.get(name, default)) |
|
491 | 496 | |
|
492 | 497 | |
|
493 | 498 | def _bool_setting(settings, name, default): |
|
494 | 499 | input = settings.get(name, default) |
|
495 | 500 | if isinstance(input, unicode): |
|
496 | 501 | input = input.encode('utf8') |
|
497 | 502 | settings[name] = asbool(input) |
|
498 | 503 | |
|
499 | 504 | |
|
500 | 505 | def _list_setting(settings, name, default): |
|
501 | 506 | raw_value = settings.get(name, default) |
|
502 | 507 | |
|
503 | 508 | old_separator = ',' |
|
504 | 509 | if old_separator in raw_value: |
|
505 | 510 | # If we get a comma separated list, pass it to our own function. |
|
506 | 511 | settings[name] = rhodecode_aslist(raw_value, sep=old_separator) |
|
507 | 512 | else: |
|
508 | 513 | # Otherwise we assume it uses pyramids space/newline separation. |
|
509 | 514 | settings[name] = aslist(raw_value) |
|
510 | 515 | |
|
511 | 516 | |
|
512 | 517 | def _string_setting(settings, name, default, lower=True): |
|
513 | 518 | value = settings.get(name, default) |
|
514 | 519 | if lower: |
|
515 | 520 | value = value.lower() |
|
516 | 521 | settings[name] = value |
@@ -1,1017 +1,982 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Routes configuration |
|
23 | 23 | |
|
24 | 24 | The more specific and detailed routes should be defined first so they |
|
25 | 25 | may take precedent over the more generic routes. For more information |
|
26 | 26 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
27 | 27 | |
|
28 | 28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py |
|
29 | 29 | and _route_name variable which uses some of stored naming here to do redirects. |
|
30 | 30 | """ |
|
31 | 31 | import os |
|
32 | 32 | import re |
|
33 | 33 | from routes import Mapper |
|
34 | 34 | |
|
35 | 35 | # prefix for non repository related links needs to be prefixed with `/` |
|
36 | 36 | ADMIN_PREFIX = '/_admin' |
|
37 | 37 | STATIC_FILE_PREFIX = '/_static' |
|
38 | 38 | |
|
39 | 39 | # Default requirements for URL parts |
|
40 | 40 | URL_NAME_REQUIREMENTS = { |
|
41 | 41 | # group name can have a slash in them, but they must not end with a slash |
|
42 | 42 | 'group_name': r'.*?[^/]', |
|
43 | 43 | 'repo_group_name': r'.*?[^/]', |
|
44 | 44 | # repo names can have a slash in them, but they must not end with a slash |
|
45 | 45 | 'repo_name': r'.*?[^/]', |
|
46 | 46 | # file path eats up everything at the end |
|
47 | 47 | 'f_path': r'.*', |
|
48 | 48 | # reference types |
|
49 | 49 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', |
|
50 | 50 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', |
|
51 | 51 | } |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | def add_route_requirements(route_path, requirements): |
|
55 | 55 | """ |
|
56 | 56 | Adds regex requirements to pyramid routes using a mapping dict |
|
57 | 57 | |
|
58 | 58 | >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'}) |
|
59 | 59 | '/{action}/{id:\d+}' |
|
60 | 60 | |
|
61 | 61 | """ |
|
62 | 62 | for key, regex in requirements.items(): |
|
63 | 63 | route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex)) |
|
64 | 64 | return route_path |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | class JSRoutesMapper(Mapper): |
|
68 | 68 | """ |
|
69 | 69 | Wrapper for routes.Mapper to make pyroutes compatible url definitions |
|
70 | 70 | """ |
|
71 | 71 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') |
|
72 | 72 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
73 | 73 | def __init__(self, *args, **kw): |
|
74 | 74 | super(JSRoutesMapper, self).__init__(*args, **kw) |
|
75 | 75 | self._jsroutes = [] |
|
76 | 76 | |
|
77 | 77 | def connect(self, *args, **kw): |
|
78 | 78 | """ |
|
79 | 79 | Wrapper for connect to take an extra argument jsroute=True |
|
80 | 80 | |
|
81 | 81 | :param jsroute: boolean, if True will add the route to the pyroutes list |
|
82 | 82 | """ |
|
83 | 83 | if kw.pop('jsroute', False): |
|
84 | 84 | if not self._named_route_regex.match(args[0]): |
|
85 | 85 | raise Exception('only named routes can be added to pyroutes') |
|
86 | 86 | self._jsroutes.append(args[0]) |
|
87 | 87 | |
|
88 | 88 | super(JSRoutesMapper, self).connect(*args, **kw) |
|
89 | 89 | |
|
90 | 90 | def _extract_route_information(self, route): |
|
91 | 91 | """ |
|
92 | 92 | Convert a route into tuple(name, path, args), eg: |
|
93 | 93 | ('show_user', '/profile/%(username)s', ['username']) |
|
94 | 94 | """ |
|
95 | 95 | routepath = route.routepath |
|
96 | 96 | def replace(matchobj): |
|
97 | 97 | if matchobj.group(1): |
|
98 | 98 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
99 | 99 | else: |
|
100 | 100 | return "%%(%s)s" % matchobj.group(2) |
|
101 | 101 | |
|
102 | 102 | routepath = self._argument_prog.sub(replace, routepath) |
|
103 | 103 | return ( |
|
104 | 104 | route.name, |
|
105 | 105 | routepath, |
|
106 | 106 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
107 | 107 | for arg in self._argument_prog.findall(route.routepath)] |
|
108 | 108 | ) |
|
109 | 109 | |
|
110 | 110 | def jsroutes(self): |
|
111 | 111 | """ |
|
112 | 112 | Return a list of pyroutes.js compatible routes |
|
113 | 113 | """ |
|
114 | 114 | for route_name in self._jsroutes: |
|
115 | 115 | yield self._extract_route_information(self._routenames[route_name]) |
|
116 | 116 | |
|
117 | 117 | |
|
118 | 118 | def make_map(config): |
|
119 | 119 | """Create, configure and return the routes Mapper""" |
|
120 | rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'], | |
|
121 | always_scan=config['debug']) | |
|
120 | rmap = JSRoutesMapper( | |
|
121 | directory=config['pylons.paths']['controllers'], | |
|
122 | always_scan=config['debug']) | |
|
122 | 123 | rmap.minimization = False |
|
123 | 124 | rmap.explicit = False |
|
124 | 125 | |
|
125 | 126 | from rhodecode.lib.utils2 import str2bool |
|
126 | 127 | from rhodecode.model import repo, repo_group |
|
127 | 128 | |
|
128 | 129 | def check_repo(environ, match_dict): |
|
129 | 130 | """ |
|
130 | 131 | check for valid repository for proper 404 handling |
|
131 | 132 | |
|
132 | 133 | :param environ: |
|
133 | 134 | :param match_dict: |
|
134 | 135 | """ |
|
135 | 136 | repo_name = match_dict.get('repo_name') |
|
136 | 137 | |
|
137 | 138 | if match_dict.get('f_path'): |
|
138 | 139 | # fix for multiple initial slashes that causes errors |
|
139 | 140 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
140 | 141 | repo_model = repo.RepoModel() |
|
141 | 142 | by_name_match = repo_model.get_by_repo_name(repo_name) |
|
142 | 143 | # if we match quickly from database, short circuit the operation, |
|
143 | 144 | # and validate repo based on the type. |
|
144 | 145 | if by_name_match: |
|
145 | 146 | return True |
|
146 | 147 | |
|
147 | 148 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
148 | 149 | if by_id_match: |
|
149 | 150 | repo_name = by_id_match.repo_name |
|
150 | 151 | match_dict['repo_name'] = repo_name |
|
151 | 152 | return True |
|
152 | 153 | |
|
153 | 154 | return False |
|
154 | 155 | |
|
155 | 156 | def check_group(environ, match_dict): |
|
156 | 157 | """ |
|
157 | 158 | check for valid repository group path for proper 404 handling |
|
158 | 159 | |
|
159 | 160 | :param environ: |
|
160 | 161 | :param match_dict: |
|
161 | 162 | """ |
|
162 | 163 | repo_group_name = match_dict.get('group_name') |
|
163 | 164 | repo_group_model = repo_group.RepoGroupModel() |
|
164 | 165 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) |
|
165 | 166 | if by_name_match: |
|
166 | 167 | return True |
|
167 | 168 | |
|
168 | 169 | return False |
|
169 | 170 | |
|
170 | 171 | def check_user_group(environ, match_dict): |
|
171 | 172 | """ |
|
172 | 173 | check for valid user group for proper 404 handling |
|
173 | 174 | |
|
174 | 175 | :param environ: |
|
175 | 176 | :param match_dict: |
|
176 | 177 | """ |
|
177 | 178 | return True |
|
178 | 179 | |
|
179 | 180 | def check_int(environ, match_dict): |
|
180 | 181 | return match_dict.get('id').isdigit() |
|
181 | 182 | |
|
182 | 183 | |
|
183 | 184 | #========================================================================== |
|
184 | 185 | # CUSTOM ROUTES HERE |
|
185 | 186 | #========================================================================== |
|
186 | 187 | |
|
187 | 188 | # ping and pylons error test |
|
188 | 189 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') |
|
189 | 190 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') |
|
190 | 191 | |
|
191 | 192 | # ADMIN REPOSITORY ROUTES |
|
192 | 193 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
193 | 194 | controller='admin/repos') as m: |
|
194 | 195 | m.connect('repos', '/repos', |
|
195 | 196 | action='create', conditions={'method': ['POST']}) |
|
196 | 197 | m.connect('repos', '/repos', |
|
197 | 198 | action='index', conditions={'method': ['GET']}) |
|
198 | 199 | m.connect('new_repo', '/create_repository', jsroute=True, |
|
199 | 200 | action='create_repository', conditions={'method': ['GET']}) |
|
200 | 201 | m.connect('delete_repo', '/repos/{repo_name}', |
|
201 | 202 | action='delete', conditions={'method': ['DELETE']}, |
|
202 | 203 | requirements=URL_NAME_REQUIREMENTS) |
|
203 | 204 | m.connect('repo', '/repos/{repo_name}', |
|
204 | 205 | action='show', conditions={'method': ['GET'], |
|
205 | 206 | 'function': check_repo}, |
|
206 | 207 | requirements=URL_NAME_REQUIREMENTS) |
|
207 | 208 | |
|
208 | 209 | # ADMIN REPOSITORY GROUPS ROUTES |
|
209 | 210 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
210 | 211 | controller='admin/repo_groups') as m: |
|
211 | 212 | m.connect('repo_groups', '/repo_groups', |
|
212 | 213 | action='create', conditions={'method': ['POST']}) |
|
213 | 214 | m.connect('repo_groups', '/repo_groups', |
|
214 | 215 | action='index', conditions={'method': ['GET']}) |
|
215 | 216 | m.connect('new_repo_group', '/repo_groups/new', |
|
216 | 217 | action='new', conditions={'method': ['GET']}) |
|
217 | 218 | m.connect('update_repo_group', '/repo_groups/{group_name}', |
|
218 | 219 | action='update', conditions={'method': ['PUT'], |
|
219 | 220 | 'function': check_group}, |
|
220 | 221 | requirements=URL_NAME_REQUIREMENTS) |
|
221 | 222 | |
|
222 | 223 | # EXTRAS REPO GROUP ROUTES |
|
223 | 224 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
224 | 225 | action='edit', |
|
225 | 226 | conditions={'method': ['GET'], 'function': check_group}, |
|
226 | 227 | requirements=URL_NAME_REQUIREMENTS) |
|
227 | 228 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
228 | 229 | action='edit', |
|
229 | 230 | conditions={'method': ['PUT'], 'function': check_group}, |
|
230 | 231 | requirements=URL_NAME_REQUIREMENTS) |
|
231 | 232 | |
|
232 | 233 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
233 | 234 | action='edit_repo_group_advanced', |
|
234 | 235 | conditions={'method': ['GET'], 'function': check_group}, |
|
235 | 236 | requirements=URL_NAME_REQUIREMENTS) |
|
236 | 237 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
237 | 238 | action='edit_repo_group_advanced', |
|
238 | 239 | conditions={'method': ['PUT'], 'function': check_group}, |
|
239 | 240 | requirements=URL_NAME_REQUIREMENTS) |
|
240 | 241 | |
|
241 | 242 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
242 | 243 | action='edit_repo_group_perms', |
|
243 | 244 | conditions={'method': ['GET'], 'function': check_group}, |
|
244 | 245 | requirements=URL_NAME_REQUIREMENTS) |
|
245 | 246 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
246 | 247 | action='update_perms', |
|
247 | 248 | conditions={'method': ['PUT'], 'function': check_group}, |
|
248 | 249 | requirements=URL_NAME_REQUIREMENTS) |
|
249 | 250 | |
|
250 | 251 | m.connect('delete_repo_group', '/repo_groups/{group_name}', |
|
251 | 252 | action='delete', conditions={'method': ['DELETE'], |
|
252 | 253 | 'function': check_group}, |
|
253 | 254 | requirements=URL_NAME_REQUIREMENTS) |
|
254 | 255 | |
|
255 | 256 | # ADMIN USER ROUTES |
|
256 | 257 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
257 | 258 | controller='admin/users') as m: |
|
258 | 259 | m.connect('users', '/users', |
|
259 | 260 | action='create', conditions={'method': ['POST']}) |
|
260 | 261 | m.connect('new_user', '/users/new', |
|
261 | 262 | action='new', conditions={'method': ['GET']}) |
|
262 | 263 | m.connect('update_user', '/users/{user_id}', |
|
263 | 264 | action='update', conditions={'method': ['PUT']}) |
|
264 | 265 | m.connect('delete_user', '/users/{user_id}', |
|
265 | 266 | action='delete', conditions={'method': ['DELETE']}) |
|
266 | 267 | m.connect('edit_user', '/users/{user_id}/edit', |
|
267 | 268 | action='edit', conditions={'method': ['GET']}, jsroute=True) |
|
268 | 269 | m.connect('user', '/users/{user_id}', |
|
269 | 270 | action='show', conditions={'method': ['GET']}) |
|
270 | 271 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', |
|
271 | 272 | action='reset_password', conditions={'method': ['POST']}) |
|
272 | 273 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', |
|
273 | 274 | action='create_personal_repo_group', conditions={'method': ['POST']}) |
|
274 | 275 | |
|
275 | 276 | # EXTRAS USER ROUTES |
|
276 | 277 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
277 | 278 | action='edit_advanced', conditions={'method': ['GET']}) |
|
278 | 279 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
279 | 280 | action='update_advanced', conditions={'method': ['PUT']}) |
|
280 | 281 | |
|
281 | 282 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
282 | 283 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
283 | 284 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
284 | 285 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
285 | 286 | |
|
286 | 287 | m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary', |
|
287 | 288 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
288 | 289 | |
|
289 | 290 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
290 | 291 | action='edit_emails', conditions={'method': ['GET']}) |
|
291 | 292 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
292 | 293 | action='add_email', conditions={'method': ['PUT']}) |
|
293 | 294 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
294 | 295 | action='delete_email', conditions={'method': ['DELETE']}) |
|
295 | 296 | |
|
296 | 297 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
297 | 298 | action='edit_ips', conditions={'method': ['GET']}) |
|
298 | 299 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
299 | 300 | action='add_ip', conditions={'method': ['PUT']}) |
|
300 | 301 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
301 | 302 | action='delete_ip', conditions={'method': ['DELETE']}) |
|
302 | 303 | |
|
303 | 304 | # ADMIN USER GROUPS REST ROUTES |
|
304 | 305 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
305 | 306 | controller='admin/user_groups') as m: |
|
306 | 307 | m.connect('users_groups', '/user_groups', |
|
307 | 308 | action='create', conditions={'method': ['POST']}) |
|
308 | 309 | m.connect('users_groups', '/user_groups', |
|
309 | 310 | action='index', conditions={'method': ['GET']}) |
|
310 | 311 | m.connect('new_users_group', '/user_groups/new', |
|
311 | 312 | action='new', conditions={'method': ['GET']}) |
|
312 | 313 | m.connect('update_users_group', '/user_groups/{user_group_id}', |
|
313 | 314 | action='update', conditions={'method': ['PUT']}) |
|
314 | 315 | m.connect('delete_users_group', '/user_groups/{user_group_id}', |
|
315 | 316 | action='delete', conditions={'method': ['DELETE']}) |
|
316 | 317 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', |
|
317 | 318 | action='edit', conditions={'method': ['GET']}, |
|
318 | 319 | function=check_user_group) |
|
319 | 320 | |
|
320 | 321 | # EXTRAS USER GROUP ROUTES |
|
321 | 322 | m.connect('edit_user_group_global_perms', |
|
322 | 323 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
323 | 324 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
324 | 325 | m.connect('edit_user_group_global_perms', |
|
325 | 326 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
326 | 327 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
327 | 328 | m.connect('edit_user_group_perms_summary', |
|
328 | 329 | '/user_groups/{user_group_id}/edit/permissions_summary', |
|
329 | 330 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
330 | 331 | |
|
331 | 332 | m.connect('edit_user_group_perms', |
|
332 | 333 | '/user_groups/{user_group_id}/edit/permissions', |
|
333 | 334 | action='edit_perms', conditions={'method': ['GET']}) |
|
334 | 335 | m.connect('edit_user_group_perms', |
|
335 | 336 | '/user_groups/{user_group_id}/edit/permissions', |
|
336 | 337 | action='update_perms', conditions={'method': ['PUT']}) |
|
337 | 338 | |
|
338 | 339 | m.connect('edit_user_group_advanced', |
|
339 | 340 | '/user_groups/{user_group_id}/edit/advanced', |
|
340 | 341 | action='edit_advanced', conditions={'method': ['GET']}) |
|
341 | 342 | |
|
342 | 343 | m.connect('edit_user_group_advanced_sync', |
|
343 | 344 | '/user_groups/{user_group_id}/edit/advanced/sync', |
|
344 | 345 | action='edit_advanced_set_synchronization', conditions={'method': ['POST']}) |
|
345 | 346 | |
|
346 | 347 | m.connect('edit_user_group_members', |
|
347 | 348 | '/user_groups/{user_group_id}/edit/members', jsroute=True, |
|
348 | 349 | action='user_group_members', conditions={'method': ['GET']}) |
|
349 | 350 | |
|
350 | 351 | # ADMIN PERMISSIONS ROUTES |
|
351 | 352 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
352 | 353 | controller='admin/permissions') as m: |
|
353 | 354 | m.connect('admin_permissions_application', '/permissions/application', |
|
354 | 355 | action='permission_application_update', conditions={'method': ['POST']}) |
|
355 | 356 | m.connect('admin_permissions_application', '/permissions/application', |
|
356 | 357 | action='permission_application', conditions={'method': ['GET']}) |
|
357 | 358 | |
|
358 | 359 | m.connect('admin_permissions_global', '/permissions/global', |
|
359 | 360 | action='permission_global_update', conditions={'method': ['POST']}) |
|
360 | 361 | m.connect('admin_permissions_global', '/permissions/global', |
|
361 | 362 | action='permission_global', conditions={'method': ['GET']}) |
|
362 | 363 | |
|
363 | 364 | m.connect('admin_permissions_object', '/permissions/object', |
|
364 | 365 | action='permission_objects_update', conditions={'method': ['POST']}) |
|
365 | 366 | m.connect('admin_permissions_object', '/permissions/object', |
|
366 | 367 | action='permission_objects', conditions={'method': ['GET']}) |
|
367 | 368 | |
|
368 | 369 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
369 | 370 | action='permission_ips', conditions={'method': ['POST']}) |
|
370 | 371 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
371 | 372 | action='permission_ips', conditions={'method': ['GET']}) |
|
372 | 373 | |
|
373 | 374 | m.connect('admin_permissions_overview', '/permissions/overview', |
|
374 | 375 | action='permission_perms', conditions={'method': ['GET']}) |
|
375 | 376 | |
|
376 | 377 | # ADMIN DEFAULTS REST ROUTES |
|
377 | 378 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
378 | 379 | controller='admin/defaults') as m: |
|
379 | 380 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
380 | 381 | action='update_repository_defaults', conditions={'method': ['POST']}) |
|
381 | 382 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
382 | 383 | action='index', conditions={'method': ['GET']}) |
|
383 | 384 | |
|
384 | 385 | # ADMIN DEBUG STYLE ROUTES |
|
385 | 386 | if str2bool(config.get('debug_style')): |
|
386 | 387 | with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style', |
|
387 | 388 | controller='debug_style') as m: |
|
388 | 389 | m.connect('debug_style_home', '', |
|
389 | 390 | action='index', conditions={'method': ['GET']}) |
|
390 | 391 | m.connect('debug_style_template', '/t/{t_path}', |
|
391 | 392 | action='template', conditions={'method': ['GET']}) |
|
392 | 393 | |
|
393 | 394 | # ADMIN SETTINGS ROUTES |
|
394 | 395 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
395 | 396 | controller='admin/settings') as m: |
|
396 | 397 | |
|
397 | 398 | # default |
|
398 | 399 | m.connect('admin_settings', '/settings', |
|
399 | 400 | action='settings_global_update', |
|
400 | 401 | conditions={'method': ['POST']}) |
|
401 | 402 | m.connect('admin_settings', '/settings', |
|
402 | 403 | action='settings_global', conditions={'method': ['GET']}) |
|
403 | 404 | |
|
404 | 405 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
405 | 406 | action='settings_vcs_update', |
|
406 | 407 | conditions={'method': ['POST']}) |
|
407 | 408 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
408 | 409 | action='settings_vcs', |
|
409 | 410 | conditions={'method': ['GET']}) |
|
410 | 411 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
411 | 412 | action='delete_svn_pattern', |
|
412 | 413 | conditions={'method': ['DELETE']}) |
|
413 | 414 | |
|
414 | 415 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
415 | 416 | action='settings_mapping_update', |
|
416 | 417 | conditions={'method': ['POST']}) |
|
417 | 418 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
418 | 419 | action='settings_mapping', conditions={'method': ['GET']}) |
|
419 | 420 | |
|
420 | 421 | m.connect('admin_settings_global', '/settings/global', |
|
421 | 422 | action='settings_global_update', |
|
422 | 423 | conditions={'method': ['POST']}) |
|
423 | 424 | m.connect('admin_settings_global', '/settings/global', |
|
424 | 425 | action='settings_global', conditions={'method': ['GET']}) |
|
425 | 426 | |
|
426 | 427 | m.connect('admin_settings_visual', '/settings/visual', |
|
427 | 428 | action='settings_visual_update', |
|
428 | 429 | conditions={'method': ['POST']}) |
|
429 | 430 | m.connect('admin_settings_visual', '/settings/visual', |
|
430 | 431 | action='settings_visual', conditions={'method': ['GET']}) |
|
431 | 432 | |
|
432 | 433 | m.connect('admin_settings_issuetracker', |
|
433 | 434 | '/settings/issue-tracker', action='settings_issuetracker', |
|
434 | 435 | conditions={'method': ['GET']}) |
|
435 | 436 | m.connect('admin_settings_issuetracker_save', |
|
436 | 437 | '/settings/issue-tracker/save', |
|
437 | 438 | action='settings_issuetracker_save', |
|
438 | 439 | conditions={'method': ['POST']}) |
|
439 | 440 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', |
|
440 | 441 | action='settings_issuetracker_test', |
|
441 | 442 | conditions={'method': ['POST']}) |
|
442 | 443 | m.connect('admin_issuetracker_delete', |
|
443 | 444 | '/settings/issue-tracker/delete', |
|
444 | 445 | action='settings_issuetracker_delete', |
|
445 | 446 | conditions={'method': ['DELETE']}) |
|
446 | 447 | |
|
447 | 448 | m.connect('admin_settings_email', '/settings/email', |
|
448 | 449 | action='settings_email_update', |
|
449 | 450 | conditions={'method': ['POST']}) |
|
450 | 451 | m.connect('admin_settings_email', '/settings/email', |
|
451 | 452 | action='settings_email', conditions={'method': ['GET']}) |
|
452 | 453 | |
|
453 | 454 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
454 | 455 | action='settings_hooks_update', |
|
455 | 456 | conditions={'method': ['POST', 'DELETE']}) |
|
456 | 457 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
457 | 458 | action='settings_hooks', conditions={'method': ['GET']}) |
|
458 | 459 | |
|
459 | 460 | m.connect('admin_settings_search', '/settings/search', |
|
460 | 461 | action='settings_search', conditions={'method': ['GET']}) |
|
461 | 462 | |
|
462 | 463 | m.connect('admin_settings_supervisor', '/settings/supervisor', |
|
463 | 464 | action='settings_supervisor', conditions={'method': ['GET']}) |
|
464 | 465 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', |
|
465 | 466 | action='settings_supervisor_log', conditions={'method': ['GET']}) |
|
466 | 467 | |
|
467 | 468 | m.connect('admin_settings_labs', '/settings/labs', |
|
468 | 469 | action='settings_labs_update', |
|
469 | 470 | conditions={'method': ['POST']}) |
|
470 | 471 | m.connect('admin_settings_labs', '/settings/labs', |
|
471 | 472 | action='settings_labs', conditions={'method': ['GET']}) |
|
472 | 473 | |
|
473 | 474 | # ADMIN MY ACCOUNT |
|
474 | 475 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
475 | 476 | controller='admin/my_account') as m: |
|
476 | 477 | |
|
477 | 478 | m.connect('my_account_edit', '/my_account/edit', |
|
478 | 479 | action='my_account_edit', conditions={'method': ['GET']}) |
|
479 | 480 | m.connect('my_account', '/my_account/update', |
|
480 | 481 | action='my_account_update', conditions={'method': ['POST']}) |
|
481 | 482 | |
|
482 | 483 | # NOTE(marcink): this needs to be kept for password force flag to be |
|
483 | 484 | # handler, remove after migration to pyramid |
|
484 | 485 | m.connect('my_account_password', '/my_account/password', |
|
485 | 486 | action='my_account_password', conditions={'method': ['GET']}) |
|
486 | 487 | |
|
487 | 488 | m.connect('my_account_repos', '/my_account/repos', |
|
488 | 489 | action='my_account_repos', conditions={'method': ['GET']}) |
|
489 | 490 | |
|
490 | 491 | m.connect('my_account_watched', '/my_account/watched', |
|
491 | 492 | action='my_account_watched', conditions={'method': ['GET']}) |
|
492 | 493 | |
|
493 | 494 | m.connect('my_account_pullrequests', '/my_account/pull_requests', |
|
494 | 495 | action='my_account_pullrequests', conditions={'method': ['GET']}) |
|
495 | 496 | |
|
496 | 497 | m.connect('my_account_perms', '/my_account/perms', |
|
497 | 498 | action='my_account_perms', conditions={'method': ['GET']}) |
|
498 | 499 | |
|
499 | 500 | m.connect('my_account_emails', '/my_account/emails', |
|
500 | 501 | action='my_account_emails', conditions={'method': ['GET']}) |
|
501 | 502 | m.connect('my_account_emails', '/my_account/emails', |
|
502 | 503 | action='my_account_emails_add', conditions={'method': ['POST']}) |
|
503 | 504 | m.connect('my_account_emails', '/my_account/emails', |
|
504 | 505 | action='my_account_emails_delete', conditions={'method': ['DELETE']}) |
|
505 | 506 | |
|
506 | 507 | m.connect('my_account_notifications', '/my_account/notifications', |
|
507 | 508 | action='my_notifications', |
|
508 | 509 | conditions={'method': ['GET']}) |
|
509 | 510 | m.connect('my_account_notifications_toggle_visibility', |
|
510 | 511 | '/my_account/toggle_visibility', |
|
511 | 512 | action='my_notifications_toggle_visibility', |
|
512 | 513 | conditions={'method': ['POST']}) |
|
513 | 514 | |
|
514 | 515 | # NOTIFICATION REST ROUTES |
|
515 | 516 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
516 | 517 | controller='admin/notifications') as m: |
|
517 | 518 | m.connect('notifications', '/notifications', |
|
518 | 519 | action='index', conditions={'method': ['GET']}) |
|
519 | 520 | m.connect('notifications_mark_all_read', '/notifications/mark_all_read', |
|
520 | 521 | action='mark_all_read', conditions={'method': ['POST']}) |
|
521 | 522 | m.connect('/notifications/{notification_id}', |
|
522 | 523 | action='update', conditions={'method': ['PUT']}) |
|
523 | 524 | m.connect('/notifications/{notification_id}', |
|
524 | 525 | action='delete', conditions={'method': ['DELETE']}) |
|
525 | 526 | m.connect('notification', '/notifications/{notification_id}', |
|
526 | 527 | action='show', conditions={'method': ['GET']}) |
|
527 | 528 | |
|
528 | 529 | # ADMIN GIST |
|
529 | 530 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
530 | 531 | controller='admin/gists') as m: |
|
531 | 532 | m.connect('gists', '/gists', |
|
532 | 533 | action='create', conditions={'method': ['POST']}) |
|
533 | 534 | m.connect('gists', '/gists', jsroute=True, |
|
534 | 535 | action='index', conditions={'method': ['GET']}) |
|
535 | 536 | m.connect('new_gist', '/gists/new', jsroute=True, |
|
536 | 537 | action='new', conditions={'method': ['GET']}) |
|
537 | 538 | |
|
538 | 539 | m.connect('/gists/{gist_id}', |
|
539 | 540 | action='delete', conditions={'method': ['DELETE']}) |
|
540 | 541 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
541 | 542 | action='edit_form', conditions={'method': ['GET']}) |
|
542 | 543 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
543 | 544 | action='edit', conditions={'method': ['POST']}) |
|
544 | 545 | m.connect( |
|
545 | 546 | 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision', |
|
546 | 547 | action='check_revision', conditions={'method': ['GET']}) |
|
547 | 548 | |
|
548 | 549 | m.connect('gist', '/gists/{gist_id}', |
|
549 | 550 | action='show', conditions={'method': ['GET']}) |
|
550 | 551 | m.connect('gist_rev', '/gists/{gist_id}/{revision}', |
|
551 | 552 | revision='tip', |
|
552 | 553 | action='show', conditions={'method': ['GET']}) |
|
553 | 554 | m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}', |
|
554 | 555 | revision='tip', |
|
555 | 556 | action='show', conditions={'method': ['GET']}) |
|
556 | 557 | m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}', |
|
557 | 558 | revision='tip', |
|
558 | 559 | action='show', conditions={'method': ['GET']}, |
|
559 | 560 | requirements=URL_NAME_REQUIREMENTS) |
|
560 | 561 | |
|
561 | 562 | # USER JOURNAL |
|
562 | 563 | rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,), |
|
563 | 564 | controller='journal', action='index') |
|
564 | 565 | rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,), |
|
565 | 566 | controller='journal', action='journal_rss') |
|
566 | 567 | rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,), |
|
567 | 568 | controller='journal', action='journal_atom') |
|
568 | 569 | |
|
569 | 570 | rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,), |
|
570 | 571 | controller='journal', action='public_journal') |
|
571 | 572 | |
|
572 | 573 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,), |
|
573 | 574 | controller='journal', action='public_journal_rss') |
|
574 | 575 | |
|
575 | 576 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,), |
|
576 | 577 | controller='journal', action='public_journal_rss') |
|
577 | 578 | |
|
578 | 579 | rmap.connect('public_journal_atom', |
|
579 | 580 | '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal', |
|
580 | 581 | action='public_journal_atom') |
|
581 | 582 | |
|
582 | 583 | rmap.connect('public_journal_atom_old', |
|
583 | 584 | '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal', |
|
584 | 585 | action='public_journal_atom') |
|
585 | 586 | |
|
586 | 587 | rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,), |
|
587 | 588 | controller='journal', action='toggle_following', jsroute=True, |
|
588 | 589 | conditions={'method': ['POST']}) |
|
589 | 590 | |
|
590 | 591 | # FEEDS |
|
591 | 592 | rmap.connect('rss_feed_home', '/{repo_name}/feed/rss', |
|
592 | 593 | controller='feed', action='rss', |
|
593 | 594 | conditions={'function': check_repo}, |
|
594 | 595 | requirements=URL_NAME_REQUIREMENTS) |
|
595 | 596 | |
|
596 | 597 | rmap.connect('atom_feed_home', '/{repo_name}/feed/atom', |
|
597 | 598 | controller='feed', action='atom', |
|
598 | 599 | conditions={'function': check_repo}, |
|
599 | 600 | requirements=URL_NAME_REQUIREMENTS) |
|
600 | 601 | |
|
601 | 602 | #========================================================================== |
|
602 | 603 | # REPOSITORY ROUTES |
|
603 | 604 | #========================================================================== |
|
604 | 605 | |
|
605 | 606 | rmap.connect('repo_creating_home', '/{repo_name}/repo_creating', |
|
606 | 607 | controller='admin/repos', action='repo_creating', |
|
607 | 608 | requirements=URL_NAME_REQUIREMENTS) |
|
608 | 609 | rmap.connect('repo_check_home', '/{repo_name}/crepo_check', |
|
609 | 610 | controller='admin/repos', action='repo_check', |
|
610 | 611 | requirements=URL_NAME_REQUIREMENTS) |
|
611 | 612 | |
|
612 | rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}', | |
|
613 | controller='summary', action='repo_stats', | |
|
614 | conditions={'function': check_repo}, | |
|
615 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
|
616 | ||
|
617 | rmap.connect('repo_refs_data', '/{repo_name}/refs-data', | |
|
618 | controller='summary', action='repo_refs_data', | |
|
619 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
|
620 | rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog', | |
|
621 | controller='summary', action='repo_refs_changelog_data', | |
|
622 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
|
623 | ||
|
624 | 613 | rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', |
|
625 | 614 | controller='changeset', revision='tip', |
|
626 | 615 | conditions={'function': check_repo}, |
|
627 | 616 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
628 | 617 | rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', |
|
629 | 618 | controller='changeset', revision='tip', action='changeset_children', |
|
630 | 619 | conditions={'function': check_repo}, |
|
631 | 620 | requirements=URL_NAME_REQUIREMENTS) |
|
632 | 621 | rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}', |
|
633 | 622 | controller='changeset', revision='tip', action='changeset_parents', |
|
634 | 623 | conditions={'function': check_repo}, |
|
635 | 624 | requirements=URL_NAME_REQUIREMENTS) |
|
636 | 625 | |
|
637 | 626 | # repo edit options |
|
638 | 627 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', |
|
639 | 628 | controller='admin/repos', action='edit_fields', |
|
640 | 629 | conditions={'method': ['GET'], 'function': check_repo}, |
|
641 | 630 | requirements=URL_NAME_REQUIREMENTS) |
|
642 | 631 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', |
|
643 | 632 | controller='admin/repos', action='create_repo_field', |
|
644 | 633 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
645 | 634 | requirements=URL_NAME_REQUIREMENTS) |
|
646 | 635 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', |
|
647 | 636 | controller='admin/repos', action='delete_repo_field', |
|
648 | 637 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
649 | 638 | requirements=URL_NAME_REQUIREMENTS) |
|
650 | 639 | |
|
651 | 640 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', |
|
652 | 641 | controller='admin/repos', action='toggle_locking', |
|
653 | 642 | conditions={'method': ['GET'], 'function': check_repo}, |
|
654 | 643 | requirements=URL_NAME_REQUIREMENTS) |
|
655 | 644 | |
|
656 | 645 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
657 | 646 | controller='admin/repos', action='edit_remote_form', |
|
658 | 647 | conditions={'method': ['GET'], 'function': check_repo}, |
|
659 | 648 | requirements=URL_NAME_REQUIREMENTS) |
|
660 | 649 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
661 | 650 | controller='admin/repos', action='edit_remote', |
|
662 | 651 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
663 | 652 | requirements=URL_NAME_REQUIREMENTS) |
|
664 | 653 | |
|
665 | 654 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
666 | 655 | controller='admin/repos', action='edit_statistics_form', |
|
667 | 656 | conditions={'method': ['GET'], 'function': check_repo}, |
|
668 | 657 | requirements=URL_NAME_REQUIREMENTS) |
|
669 | 658 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
670 | 659 | controller='admin/repos', action='edit_statistics', |
|
671 | 660 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
672 | 661 | requirements=URL_NAME_REQUIREMENTS) |
|
673 | 662 | rmap.connect('repo_settings_issuetracker', |
|
674 | 663 | '/{repo_name}/settings/issue-tracker', |
|
675 | 664 | controller='admin/repos', action='repo_issuetracker', |
|
676 | 665 | conditions={'method': ['GET'], 'function': check_repo}, |
|
677 | 666 | requirements=URL_NAME_REQUIREMENTS) |
|
678 | 667 | rmap.connect('repo_issuetracker_test', |
|
679 | 668 | '/{repo_name}/settings/issue-tracker/test', |
|
680 | 669 | controller='admin/repos', action='repo_issuetracker_test', |
|
681 | 670 | conditions={'method': ['POST'], 'function': check_repo}, |
|
682 | 671 | requirements=URL_NAME_REQUIREMENTS) |
|
683 | 672 | rmap.connect('repo_issuetracker_delete', |
|
684 | 673 | '/{repo_name}/settings/issue-tracker/delete', |
|
685 | 674 | controller='admin/repos', action='repo_issuetracker_delete', |
|
686 | 675 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
687 | 676 | requirements=URL_NAME_REQUIREMENTS) |
|
688 | 677 | rmap.connect('repo_issuetracker_save', |
|
689 | 678 | '/{repo_name}/settings/issue-tracker/save', |
|
690 | 679 | controller='admin/repos', action='repo_issuetracker_save', |
|
691 | 680 | conditions={'method': ['POST'], 'function': check_repo}, |
|
692 | 681 | requirements=URL_NAME_REQUIREMENTS) |
|
693 | 682 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
694 | 683 | controller='admin/repos', action='repo_settings_vcs_update', |
|
695 | 684 | conditions={'method': ['POST'], 'function': check_repo}, |
|
696 | 685 | requirements=URL_NAME_REQUIREMENTS) |
|
697 | 686 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
698 | 687 | controller='admin/repos', action='repo_settings_vcs', |
|
699 | 688 | conditions={'method': ['GET'], 'function': check_repo}, |
|
700 | 689 | requirements=URL_NAME_REQUIREMENTS) |
|
701 | 690 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
702 | 691 | controller='admin/repos', action='repo_delete_svn_pattern', |
|
703 | 692 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
704 | 693 | requirements=URL_NAME_REQUIREMENTS) |
|
705 | 694 | rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest', |
|
706 | 695 | controller='admin/repos', action='repo_settings_pullrequest', |
|
707 | 696 | conditions={'method': ['GET', 'POST'], 'function': check_repo}, |
|
708 | 697 | requirements=URL_NAME_REQUIREMENTS) |
|
709 | 698 | |
|
710 | 699 | # still working url for backward compat. |
|
711 | 700 | rmap.connect('raw_changeset_home_depraced', |
|
712 | 701 | '/{repo_name}/raw-changeset/{revision}', |
|
713 | 702 | controller='changeset', action='changeset_raw', |
|
714 | 703 | revision='tip', conditions={'function': check_repo}, |
|
715 | 704 | requirements=URL_NAME_REQUIREMENTS) |
|
716 | 705 | |
|
717 | 706 | # new URLs |
|
718 | 707 | rmap.connect('changeset_raw_home', |
|
719 | 708 | '/{repo_name}/changeset-diff/{revision}', |
|
720 | 709 | controller='changeset', action='changeset_raw', |
|
721 | 710 | revision='tip', conditions={'function': check_repo}, |
|
722 | 711 | requirements=URL_NAME_REQUIREMENTS) |
|
723 | 712 | |
|
724 | 713 | rmap.connect('changeset_patch_home', |
|
725 | 714 | '/{repo_name}/changeset-patch/{revision}', |
|
726 | 715 | controller='changeset', action='changeset_patch', |
|
727 | 716 | revision='tip', conditions={'function': check_repo}, |
|
728 | 717 | requirements=URL_NAME_REQUIREMENTS) |
|
729 | 718 | |
|
730 | 719 | rmap.connect('changeset_download_home', |
|
731 | 720 | '/{repo_name}/changeset-download/{revision}', |
|
732 | 721 | controller='changeset', action='changeset_download', |
|
733 | 722 | revision='tip', conditions={'function': check_repo}, |
|
734 | 723 | requirements=URL_NAME_REQUIREMENTS) |
|
735 | 724 | |
|
736 | 725 | rmap.connect('changeset_comment', |
|
737 | 726 | '/{repo_name}/changeset/{revision}/comment', jsroute=True, |
|
738 | 727 | controller='changeset', revision='tip', action='comment', |
|
739 | 728 | conditions={'function': check_repo}, |
|
740 | 729 | requirements=URL_NAME_REQUIREMENTS) |
|
741 | 730 | |
|
742 | 731 | rmap.connect('changeset_comment_preview', |
|
743 | 732 | '/{repo_name}/changeset/comment/preview', jsroute=True, |
|
744 | 733 | controller='changeset', action='preview_comment', |
|
745 | 734 | conditions={'function': check_repo, 'method': ['POST']}, |
|
746 | 735 | requirements=URL_NAME_REQUIREMENTS) |
|
747 | 736 | |
|
748 | 737 | rmap.connect('changeset_comment_delete', |
|
749 | 738 | '/{repo_name}/changeset/comment/{comment_id}/delete', |
|
750 | 739 | controller='changeset', action='delete_comment', |
|
751 | 740 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
752 | 741 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
753 | 742 | |
|
754 | 743 | rmap.connect('changeset_info', '/{repo_name}/changeset_info/{revision}', |
|
755 | 744 | controller='changeset', action='changeset_info', |
|
756 | 745 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
757 | 746 | |
|
758 | 747 | rmap.connect('compare_home', |
|
759 | 748 | '/{repo_name}/compare', |
|
760 | 749 | controller='compare', action='index', |
|
761 | 750 | conditions={'function': check_repo}, |
|
762 | 751 | requirements=URL_NAME_REQUIREMENTS) |
|
763 | 752 | |
|
764 | 753 | rmap.connect('compare_url', |
|
765 | 754 | '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', |
|
766 | 755 | controller='compare', action='compare', |
|
767 | 756 | conditions={'function': check_repo}, |
|
768 | 757 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
769 | 758 | |
|
770 | 759 | rmap.connect('pullrequest_home', |
|
771 | 760 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
772 | 761 | action='index', conditions={'function': check_repo, |
|
773 | 762 | 'method': ['GET']}, |
|
774 | 763 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
775 | 764 | |
|
776 | 765 | rmap.connect('pullrequest', |
|
777 | 766 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
778 | 767 | action='create', conditions={'function': check_repo, |
|
779 | 768 | 'method': ['POST']}, |
|
780 | 769 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
781 | 770 | |
|
782 | 771 | rmap.connect('pullrequest_repo_refs', |
|
783 | 772 | '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
784 | 773 | controller='pullrequests', |
|
785 | 774 | action='get_repo_refs', |
|
786 | 775 | conditions={'function': check_repo, 'method': ['GET']}, |
|
787 | 776 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
788 | 777 | |
|
789 | 778 | rmap.connect('pullrequest_repo_destinations', |
|
790 | 779 | '/{repo_name}/pull-request/repo-destinations', |
|
791 | 780 | controller='pullrequests', |
|
792 | 781 | action='get_repo_destinations', |
|
793 | 782 | conditions={'function': check_repo, 'method': ['GET']}, |
|
794 | 783 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
795 | 784 | |
|
796 | 785 | rmap.connect('pullrequest_show', |
|
797 | 786 | '/{repo_name}/pull-request/{pull_request_id}', |
|
798 | 787 | controller='pullrequests', |
|
799 | 788 | action='show', conditions={'function': check_repo, |
|
800 | 789 | 'method': ['GET']}, |
|
801 | 790 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
802 | 791 | |
|
803 | 792 | rmap.connect('pullrequest_update', |
|
804 | 793 | '/{repo_name}/pull-request/{pull_request_id}', |
|
805 | 794 | controller='pullrequests', |
|
806 | 795 | action='update', conditions={'function': check_repo, |
|
807 | 796 | 'method': ['PUT']}, |
|
808 | 797 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
809 | 798 | |
|
810 | 799 | rmap.connect('pullrequest_merge', |
|
811 | 800 | '/{repo_name}/pull-request/{pull_request_id}', |
|
812 | 801 | controller='pullrequests', |
|
813 | 802 | action='merge', conditions={'function': check_repo, |
|
814 | 803 | 'method': ['POST']}, |
|
815 | 804 | requirements=URL_NAME_REQUIREMENTS) |
|
816 | 805 | |
|
817 | 806 | rmap.connect('pullrequest_delete', |
|
818 | 807 | '/{repo_name}/pull-request/{pull_request_id}', |
|
819 | 808 | controller='pullrequests', |
|
820 | 809 | action='delete', conditions={'function': check_repo, |
|
821 | 810 | 'method': ['DELETE']}, |
|
822 | 811 | requirements=URL_NAME_REQUIREMENTS) |
|
823 | 812 | |
|
824 | 813 | rmap.connect('pullrequest_comment', |
|
825 | 814 | '/{repo_name}/pull-request-comment/{pull_request_id}', |
|
826 | 815 | controller='pullrequests', |
|
827 | 816 | action='comment', conditions={'function': check_repo, |
|
828 | 817 | 'method': ['POST']}, |
|
829 | 818 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
830 | 819 | |
|
831 | 820 | rmap.connect('pullrequest_comment_delete', |
|
832 | 821 | '/{repo_name}/pull-request-comment/{comment_id}/delete', |
|
833 | 822 | controller='pullrequests', action='delete_comment', |
|
834 | 823 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
835 | 824 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
836 | 825 | |
|
837 | rmap.connect('summary_home_explicit', '/{repo_name}/summary', | |
|
838 | controller='summary', conditions={'function': check_repo}, | |
|
839 | requirements=URL_NAME_REQUIREMENTS) | |
|
840 | ||
|
841 | 826 | rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True, |
|
842 | 827 | controller='changelog', conditions={'function': check_repo}, |
|
843 | 828 | requirements=URL_NAME_REQUIREMENTS) |
|
844 | 829 | |
|
845 | rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary', | |
|
846 | controller='changelog', action='changelog_summary', | |
|
847 | conditions={'function': check_repo}, | |
|
848 | requirements=URL_NAME_REQUIREMENTS) | |
|
849 | ||
|
850 | 830 | rmap.connect('changelog_file_home', |
|
851 | 831 | '/{repo_name}/changelog/{revision}/{f_path}', |
|
852 | 832 | controller='changelog', f_path=None, |
|
853 | 833 | conditions={'function': check_repo}, |
|
854 | 834 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
855 | 835 | |
|
856 | 836 | rmap.connect('changelog_elements', '/{repo_name}/changelog_details', |
|
857 | 837 | controller='changelog', action='changelog_elements', |
|
858 | 838 | conditions={'function': check_repo}, |
|
859 | 839 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
860 | 840 | |
|
861 | 841 | rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}', |
|
862 | 842 | controller='files', revision='tip', f_path='', |
|
863 | 843 | conditions={'function': check_repo}, |
|
864 | 844 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
865 | 845 | |
|
866 | 846 | rmap.connect('files_home_simple_catchrev', |
|
867 | 847 | '/{repo_name}/files/{revision}', |
|
868 | 848 | controller='files', revision='tip', f_path='', |
|
869 | 849 | conditions={'function': check_repo}, |
|
870 | 850 | requirements=URL_NAME_REQUIREMENTS) |
|
871 | 851 | |
|
872 | 852 | rmap.connect('files_home_simple_catchall', |
|
873 | 853 | '/{repo_name}/files', |
|
874 | 854 | controller='files', revision='tip', f_path='', |
|
875 | 855 | conditions={'function': check_repo}, |
|
876 | 856 | requirements=URL_NAME_REQUIREMENTS) |
|
877 | 857 | |
|
878 | 858 | rmap.connect('files_history_home', |
|
879 | 859 | '/{repo_name}/history/{revision}/{f_path}', |
|
880 | 860 | controller='files', action='history', revision='tip', f_path='', |
|
881 | 861 | conditions={'function': check_repo}, |
|
882 | 862 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
883 | 863 | |
|
884 | 864 | rmap.connect('files_authors_home', |
|
885 | 865 | '/{repo_name}/authors/{revision}/{f_path}', |
|
886 | 866 | controller='files', action='authors', revision='tip', f_path='', |
|
887 | 867 | conditions={'function': check_repo}, |
|
888 | 868 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
889 | 869 | |
|
890 | 870 | rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}', |
|
891 | 871 | controller='files', action='diff', f_path='', |
|
892 | 872 | conditions={'function': check_repo}, |
|
893 | 873 | requirements=URL_NAME_REQUIREMENTS) |
|
894 | 874 | |
|
895 | 875 | rmap.connect('files_diff_2way_home', |
|
896 | 876 | '/{repo_name}/diff-2way/{f_path}', |
|
897 | 877 | controller='files', action='diff_2way', f_path='', |
|
898 | 878 | conditions={'function': check_repo}, |
|
899 | 879 | requirements=URL_NAME_REQUIREMENTS) |
|
900 | 880 | |
|
901 | 881 | rmap.connect('files_rawfile_home', |
|
902 | 882 | '/{repo_name}/rawfile/{revision}/{f_path}', |
|
903 | 883 | controller='files', action='rawfile', revision='tip', |
|
904 | 884 | f_path='', conditions={'function': check_repo}, |
|
905 | 885 | requirements=URL_NAME_REQUIREMENTS) |
|
906 | 886 | |
|
907 | 887 | rmap.connect('files_raw_home', |
|
908 | 888 | '/{repo_name}/raw/{revision}/{f_path}', |
|
909 | 889 | controller='files', action='raw', revision='tip', f_path='', |
|
910 | 890 | conditions={'function': check_repo}, |
|
911 | 891 | requirements=URL_NAME_REQUIREMENTS) |
|
912 | 892 | |
|
913 | 893 | rmap.connect('files_render_home', |
|
914 | 894 | '/{repo_name}/render/{revision}/{f_path}', |
|
915 | 895 | controller='files', action='index', revision='tip', f_path='', |
|
916 | 896 | rendered=True, conditions={'function': check_repo}, |
|
917 | 897 | requirements=URL_NAME_REQUIREMENTS) |
|
918 | 898 | |
|
919 | 899 | rmap.connect('files_annotate_home', |
|
920 | 900 | '/{repo_name}/annotate/{revision}/{f_path}', |
|
921 | 901 | controller='files', action='index', revision='tip', |
|
922 | 902 | f_path='', annotate=True, conditions={'function': check_repo}, |
|
923 | 903 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
924 | 904 | |
|
925 | 905 | rmap.connect('files_annotate_previous', |
|
926 | 906 | '/{repo_name}/annotate-previous/{revision}/{f_path}', |
|
927 | 907 | controller='files', action='annotate_previous', revision='tip', |
|
928 | 908 | f_path='', annotate=True, conditions={'function': check_repo}, |
|
929 | 909 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
930 | 910 | |
|
931 | 911 | rmap.connect('files_edit', |
|
932 | 912 | '/{repo_name}/edit/{revision}/{f_path}', |
|
933 | 913 | controller='files', action='edit', revision='tip', |
|
934 | 914 | f_path='', |
|
935 | 915 | conditions={'function': check_repo, 'method': ['POST']}, |
|
936 | 916 | requirements=URL_NAME_REQUIREMENTS) |
|
937 | 917 | |
|
938 | 918 | rmap.connect('files_edit_home', |
|
939 | 919 | '/{repo_name}/edit/{revision}/{f_path}', |
|
940 | 920 | controller='files', action='edit_home', revision='tip', |
|
941 | 921 | f_path='', conditions={'function': check_repo}, |
|
942 | 922 | requirements=URL_NAME_REQUIREMENTS) |
|
943 | 923 | |
|
944 | 924 | rmap.connect('files_add', |
|
945 | 925 | '/{repo_name}/add/{revision}/{f_path}', |
|
946 | 926 | controller='files', action='add', revision='tip', |
|
947 | 927 | f_path='', |
|
948 | 928 | conditions={'function': check_repo, 'method': ['POST']}, |
|
949 | 929 | requirements=URL_NAME_REQUIREMENTS) |
|
950 | 930 | |
|
951 | 931 | rmap.connect('files_add_home', |
|
952 | 932 | '/{repo_name}/add/{revision}/{f_path}', |
|
953 | 933 | controller='files', action='add_home', revision='tip', |
|
954 | 934 | f_path='', conditions={'function': check_repo}, |
|
955 | 935 | requirements=URL_NAME_REQUIREMENTS) |
|
956 | 936 | |
|
957 | 937 | rmap.connect('files_delete', |
|
958 | 938 | '/{repo_name}/delete/{revision}/{f_path}', |
|
959 | 939 | controller='files', action='delete', revision='tip', |
|
960 | 940 | f_path='', |
|
961 | 941 | conditions={'function': check_repo, 'method': ['POST']}, |
|
962 | 942 | requirements=URL_NAME_REQUIREMENTS) |
|
963 | 943 | |
|
964 | 944 | rmap.connect('files_delete_home', |
|
965 | 945 | '/{repo_name}/delete/{revision}/{f_path}', |
|
966 | 946 | controller='files', action='delete_home', revision='tip', |
|
967 | 947 | f_path='', conditions={'function': check_repo}, |
|
968 | 948 | requirements=URL_NAME_REQUIREMENTS) |
|
969 | 949 | |
|
970 | 950 | rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}', |
|
971 | 951 | controller='files', action='archivefile', |
|
972 | 952 | conditions={'function': check_repo}, |
|
973 | 953 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
974 | 954 | |
|
975 | 955 | rmap.connect('files_nodelist_home', |
|
976 | 956 | '/{repo_name}/nodelist/{revision}/{f_path}', |
|
977 | 957 | controller='files', action='nodelist', |
|
978 | 958 | conditions={'function': check_repo}, |
|
979 | 959 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
980 | 960 | |
|
981 | 961 | rmap.connect('files_nodetree_full', |
|
982 | 962 | '/{repo_name}/nodetree_full/{commit_id}/{f_path}', |
|
983 | 963 | controller='files', action='nodetree_full', |
|
984 | 964 | conditions={'function': check_repo}, |
|
985 | 965 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
986 | 966 | |
|
987 | 967 | rmap.connect('repo_fork_create_home', '/{repo_name}/fork', |
|
988 | 968 | controller='forks', action='fork_create', |
|
989 | 969 | conditions={'function': check_repo, 'method': ['POST']}, |
|
990 | 970 | requirements=URL_NAME_REQUIREMENTS) |
|
991 | 971 | |
|
992 | 972 | rmap.connect('repo_fork_home', '/{repo_name}/fork', |
|
993 | 973 | controller='forks', action='fork', |
|
994 | 974 | conditions={'function': check_repo}, |
|
995 | 975 | requirements=URL_NAME_REQUIREMENTS) |
|
996 | 976 | |
|
997 | 977 | rmap.connect('repo_forks_home', '/{repo_name}/forks', |
|
998 | 978 | controller='forks', action='forks', |
|
999 | 979 | conditions={'function': check_repo}, |
|
1000 | 980 | requirements=URL_NAME_REQUIREMENTS) |
|
1001 | 981 | |
|
1002 | # catch all, at the end | |
|
1003 | _connect_with_slash( | |
|
1004 | rmap, 'summary_home', '/{repo_name}', jsroute=True, | |
|
1005 | controller='summary', action='index', | |
|
1006 | conditions={'function': check_repo}, | |
|
1007 | requirements=URL_NAME_REQUIREMENTS) | |
|
1008 | ||
|
1009 | 982 | return rmap |
|
1010 | ||
|
1011 | ||
|
1012 | def _connect_with_slash(mapper, name, path, *args, **kwargs): | |
|
1013 | """ | |
|
1014 | Connect a route with an optional trailing slash in `path`. | |
|
1015 | """ | |
|
1016 | mapper.connect(name + '_slash', path + '/', *args, **kwargs) | |
|
1017 | mapper.connect(name, path, *args, **kwargs) |
@@ -1,610 +1,610 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2013-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | Repositories controller for RhodeCode |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import logging |
|
27 | 27 | import traceback |
|
28 | 28 | |
|
29 | 29 | import formencode |
|
30 | 30 | from formencode import htmlfill |
|
31 | 31 | from pylons import request, tmpl_context as c, url |
|
32 | 32 | from pylons.controllers.util import redirect |
|
33 | 33 | from pylons.i18n.translation import _ |
|
34 | 34 | from webob.exc import HTTPForbidden, HTTPNotFound, HTTPBadRequest |
|
35 | 35 | |
|
36 | 36 | import rhodecode |
|
37 | 37 | from rhodecode.lib import auth, helpers as h |
|
38 | 38 | from rhodecode.lib.auth import ( |
|
39 | 39 | LoginRequired, HasPermissionAllDecorator, |
|
40 | 40 | HasRepoPermissionAllDecorator, NotAnonymous, HasPermissionAny, |
|
41 | 41 | HasRepoGroupPermissionAny, HasRepoPermissionAnyDecorator) |
|
42 | 42 | from rhodecode.lib.base import BaseRepoController, render |
|
43 | 43 | from rhodecode.lib.ext_json import json |
|
44 | 44 | from rhodecode.lib.exceptions import AttachedForksError |
|
45 | 45 | from rhodecode.lib.utils import action_logger, repo_name_slug, jsonify |
|
46 | 46 | from rhodecode.lib.utils2 import safe_int, str2bool |
|
47 | 47 | from rhodecode.lib.vcs import RepositoryError |
|
48 | 48 | from rhodecode.model.db import ( |
|
49 | 49 | User, Repository, UserFollowing, RepoGroup, RepositoryField) |
|
50 | 50 | from rhodecode.model.forms import ( |
|
51 | 51 | RepoForm, RepoFieldForm, RepoPermsForm, RepoVcsSettingsForm, |
|
52 | 52 | IssueTrackerPatternsForm) |
|
53 | 53 | from rhodecode.model.meta import Session |
|
54 | 54 | from rhodecode.model.repo import RepoModel |
|
55 | 55 | from rhodecode.model.scm import ScmModel, RepoGroupList, RepoList |
|
56 | 56 | from rhodecode.model.settings import ( |
|
57 | 57 | SettingsModel, IssueTrackerSettingsModel, VcsSettingsModel, |
|
58 | 58 | SettingNotFound) |
|
59 | 59 | |
|
60 | 60 | log = logging.getLogger(__name__) |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | class ReposController(BaseRepoController): |
|
64 | 64 | """ |
|
65 | 65 | REST Controller styled on the Atom Publishing Protocol""" |
|
66 | 66 | # To properly map this controller, ensure your config/routing.py |
|
67 | 67 | # file has a resource setup: |
|
68 | 68 | # map.resource('repo', 'repos') |
|
69 | 69 | |
|
70 | 70 | @LoginRequired() |
|
71 | 71 | def __before__(self): |
|
72 | 72 | super(ReposController, self).__before__() |
|
73 | 73 | |
|
74 | 74 | def _load_repo(self, repo_name): |
|
75 | 75 | repo_obj = Repository.get_by_repo_name(repo_name) |
|
76 | 76 | |
|
77 | 77 | if repo_obj is None: |
|
78 | 78 | h.not_mapped_error(repo_name) |
|
79 | 79 | return redirect(url('repos')) |
|
80 | 80 | |
|
81 | 81 | return repo_obj |
|
82 | 82 | |
|
83 | 83 | def __load_defaults(self, repo=None): |
|
84 | 84 | acl_groups = RepoGroupList(RepoGroup.query().all(), |
|
85 | 85 | perm_set=['group.write', 'group.admin']) |
|
86 | 86 | c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) |
|
87 | 87 | c.repo_groups_choices = map(lambda k: unicode(k[0]), c.repo_groups) |
|
88 | 88 | |
|
89 | 89 | # in case someone no longer have a group.write access to a repository |
|
90 | 90 | # pre fill the list with this entry, we don't care if this is the same |
|
91 | 91 | # but it will allow saving repo data properly. |
|
92 | 92 | |
|
93 | 93 | repo_group = None |
|
94 | 94 | if repo: |
|
95 | 95 | repo_group = repo.group |
|
96 | 96 | if repo_group and unicode(repo_group.group_id) not in c.repo_groups_choices: |
|
97 | 97 | c.repo_groups_choices.append(unicode(repo_group.group_id)) |
|
98 | 98 | c.repo_groups.append(RepoGroup._generate_choice(repo_group)) |
|
99 | 99 | |
|
100 | 100 | choices, c.landing_revs = ScmModel().get_repo_landing_revs() |
|
101 | 101 | c.landing_revs_choices = choices |
|
102 | 102 | |
|
103 | 103 | def __load_data(self, repo_name=None): |
|
104 | 104 | """ |
|
105 | 105 | Load defaults settings for edit, and update |
|
106 | 106 | |
|
107 | 107 | :param repo_name: |
|
108 | 108 | """ |
|
109 | 109 | c.repo_info = self._load_repo(repo_name) |
|
110 | 110 | self.__load_defaults(c.repo_info) |
|
111 | 111 | |
|
112 | 112 | # override defaults for exact repo info here git/hg etc |
|
113 | 113 | if not c.repository_requirements_missing: |
|
114 | 114 | choices, c.landing_revs = ScmModel().get_repo_landing_revs( |
|
115 | 115 | c.repo_info) |
|
116 | 116 | c.landing_revs_choices = choices |
|
117 | 117 | defaults = RepoModel()._get_defaults(repo_name) |
|
118 | 118 | |
|
119 | 119 | return defaults |
|
120 | 120 | |
|
121 | 121 | def _log_creation_exception(self, e, repo_name): |
|
122 | 122 | reason = None |
|
123 | 123 | if len(e.args) == 2: |
|
124 | 124 | reason = e.args[1] |
|
125 | 125 | |
|
126 | 126 | if reason == 'INVALID_CERTIFICATE': |
|
127 | 127 | log.exception( |
|
128 | 128 | 'Exception creating a repository: invalid certificate') |
|
129 | 129 | msg = (_('Error creating repository %s: invalid certificate') |
|
130 | 130 | % repo_name) |
|
131 | 131 | else: |
|
132 | 132 | log.exception("Exception creating a repository") |
|
133 | 133 | msg = (_('Error creating repository %s') |
|
134 | 134 | % repo_name) |
|
135 | 135 | |
|
136 | 136 | return msg |
|
137 | 137 | |
|
138 | 138 | @NotAnonymous() |
|
139 | 139 | def index(self, format='html'): |
|
140 | 140 | """GET /repos: All items in the collection""" |
|
141 | 141 | # url('repos') |
|
142 | 142 | |
|
143 | 143 | repo_list = Repository.get_all_repos() |
|
144 | 144 | c.repo_list = RepoList(repo_list, perm_set=['repository.admin']) |
|
145 | 145 | repos_data = RepoModel().get_repos_as_dict( |
|
146 | 146 | repo_list=c.repo_list, admin=True, super_user_actions=True) |
|
147 | 147 | # json used to render the grid |
|
148 | 148 | c.data = json.dumps(repos_data) |
|
149 | 149 | |
|
150 | 150 | return render('admin/repos/repos.mako') |
|
151 | 151 | |
|
152 | 152 | # perms check inside |
|
153 | 153 | @NotAnonymous() |
|
154 | 154 | @auth.CSRFRequired() |
|
155 | 155 | def create(self): |
|
156 | 156 | """ |
|
157 | 157 | POST /repos: Create a new item""" |
|
158 | 158 | # url('repos') |
|
159 | 159 | |
|
160 | 160 | self.__load_defaults() |
|
161 | 161 | form_result = {} |
|
162 | 162 | task_id = None |
|
163 | 163 | c.personal_repo_group = c.rhodecode_user.personal_repo_group |
|
164 | 164 | try: |
|
165 | 165 | # CanWriteToGroup validators checks permissions of this POST |
|
166 | 166 | form_result = RepoForm(repo_groups=c.repo_groups_choices, |
|
167 | 167 | landing_revs=c.landing_revs_choices)()\ |
|
168 | 168 | .to_python(dict(request.POST)) |
|
169 | 169 | |
|
170 | 170 | # create is done sometimes async on celery, db transaction |
|
171 | 171 | # management is handled there. |
|
172 | 172 | task = RepoModel().create(form_result, c.rhodecode_user.user_id) |
|
173 | 173 | from celery.result import BaseAsyncResult |
|
174 | 174 | if isinstance(task, BaseAsyncResult): |
|
175 | 175 | task_id = task.task_id |
|
176 | 176 | except formencode.Invalid as errors: |
|
177 | 177 | return htmlfill.render( |
|
178 | 178 | render('admin/repos/repo_add.mako'), |
|
179 | 179 | defaults=errors.value, |
|
180 | 180 | errors=errors.error_dict or {}, |
|
181 | 181 | prefix_error=False, |
|
182 | 182 | encoding="UTF-8", |
|
183 | 183 | force_defaults=False) |
|
184 | 184 | |
|
185 | 185 | except Exception as e: |
|
186 | 186 | msg = self._log_creation_exception(e, form_result.get('repo_name')) |
|
187 | 187 | h.flash(msg, category='error') |
|
188 | 188 | return redirect(h.route_path('home')) |
|
189 | 189 | |
|
190 | 190 | return redirect(h.url('repo_creating_home', |
|
191 | 191 | repo_name=form_result['repo_name_full'], |
|
192 | 192 | task_id=task_id)) |
|
193 | 193 | |
|
194 | 194 | # perms check inside |
|
195 | 195 | @NotAnonymous() |
|
196 | 196 | def create_repository(self): |
|
197 | 197 | """GET /_admin/create_repository: Form to create a new item""" |
|
198 | 198 | new_repo = request.GET.get('repo', '') |
|
199 | 199 | parent_group = safe_int(request.GET.get('parent_group')) |
|
200 | 200 | _gr = RepoGroup.get(parent_group) |
|
201 | 201 | |
|
202 | 202 | if not HasPermissionAny('hg.admin', 'hg.create.repository')(): |
|
203 | 203 | # you're not super admin nor have global create permissions, |
|
204 | 204 | # but maybe you have at least write permission to a parent group ? |
|
205 | 205 | |
|
206 | 206 | gr_name = _gr.group_name if _gr else None |
|
207 | 207 | # create repositories with write permission on group is set to true |
|
208 | 208 | create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')() |
|
209 | 209 | group_admin = HasRepoGroupPermissionAny('group.admin')(group_name=gr_name) |
|
210 | 210 | group_write = HasRepoGroupPermissionAny('group.write')(group_name=gr_name) |
|
211 | 211 | if not (group_admin or (group_write and create_on_write)): |
|
212 | 212 | raise HTTPForbidden |
|
213 | 213 | |
|
214 | 214 | acl_groups = RepoGroupList(RepoGroup.query().all(), |
|
215 | 215 | perm_set=['group.write', 'group.admin']) |
|
216 | 216 | c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) |
|
217 | 217 | c.repo_groups_choices = map(lambda k: unicode(k[0]), c.repo_groups) |
|
218 | 218 | choices, c.landing_revs = ScmModel().get_repo_landing_revs() |
|
219 | 219 | c.personal_repo_group = c.rhodecode_user.personal_repo_group |
|
220 | 220 | c.new_repo = repo_name_slug(new_repo) |
|
221 | 221 | |
|
222 | 222 | # apply the defaults from defaults page |
|
223 | 223 | defaults = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
224 | 224 | # set checkbox to autochecked |
|
225 | 225 | defaults['repo_copy_permissions'] = True |
|
226 | 226 | |
|
227 | 227 | parent_group_choice = '-1' |
|
228 | 228 | if not c.rhodecode_user.is_admin and c.rhodecode_user.personal_repo_group: |
|
229 | 229 | parent_group_choice = c.rhodecode_user.personal_repo_group |
|
230 | 230 | |
|
231 | 231 | if parent_group and _gr: |
|
232 | 232 | if parent_group in [x[0] for x in c.repo_groups]: |
|
233 | 233 | parent_group_choice = unicode(parent_group) |
|
234 | 234 | |
|
235 | 235 | defaults.update({'repo_group': parent_group_choice}) |
|
236 | 236 | |
|
237 | 237 | return htmlfill.render( |
|
238 | 238 | render('admin/repos/repo_add.mako'), |
|
239 | 239 | defaults=defaults, |
|
240 | 240 | errors={}, |
|
241 | 241 | prefix_error=False, |
|
242 | 242 | encoding="UTF-8", |
|
243 | 243 | force_defaults=False |
|
244 | 244 | ) |
|
245 | 245 | |
|
246 | 246 | @NotAnonymous() |
|
247 | 247 | def repo_creating(self, repo_name): |
|
248 | 248 | c.repo = repo_name |
|
249 | 249 | c.task_id = request.GET.get('task_id') |
|
250 | 250 | if not c.repo: |
|
251 | 251 | raise HTTPNotFound() |
|
252 | 252 | return render('admin/repos/repo_creating.mako') |
|
253 | 253 | |
|
254 | 254 | @NotAnonymous() |
|
255 | 255 | @jsonify |
|
256 | 256 | def repo_check(self, repo_name): |
|
257 | 257 | c.repo = repo_name |
|
258 | 258 | task_id = request.GET.get('task_id') |
|
259 | 259 | |
|
260 | 260 | if task_id and task_id not in ['None']: |
|
261 | 261 | import rhodecode |
|
262 | 262 | from celery.result import AsyncResult |
|
263 | 263 | if rhodecode.CELERY_ENABLED: |
|
264 | 264 | task = AsyncResult(task_id) |
|
265 | 265 | if task.failed(): |
|
266 | 266 | msg = self._log_creation_exception(task.result, c.repo) |
|
267 | 267 | h.flash(msg, category='error') |
|
268 | 268 | return redirect(h.route_path('home'), code=501) |
|
269 | 269 | |
|
270 | 270 | repo = Repository.get_by_repo_name(repo_name) |
|
271 | 271 | if repo and repo.repo_state == Repository.STATE_CREATED: |
|
272 | 272 | if repo.clone_uri: |
|
273 | 273 | clone_uri = repo.clone_uri_hidden |
|
274 | 274 | h.flash(_('Created repository %s from %s') |
|
275 | 275 | % (repo.repo_name, clone_uri), category='success') |
|
276 | 276 | else: |
|
277 |
repo_url = h.link_to( |
|
|
278 | h.url('summary_home', | |
|
279 |
|
|
|
277 | repo_url = h.link_to( | |
|
278 | repo.repo_name, | |
|
279 | h.route_path('repo_summary',repo_name=repo.repo_name)) | |
|
280 | 280 | fork = repo.fork |
|
281 | 281 | if fork: |
|
282 | 282 | fork_name = fork.repo_name |
|
283 | 283 | h.flash(h.literal(_('Forked repository %s as %s') |
|
284 | 284 | % (fork_name, repo_url)), category='success') |
|
285 | 285 | else: |
|
286 | 286 | h.flash(h.literal(_('Created repository %s') % repo_url), |
|
287 | 287 | category='success') |
|
288 | 288 | return {'result': True} |
|
289 | 289 | return {'result': False} |
|
290 | 290 | |
|
291 | 291 | @HasPermissionAllDecorator('hg.admin') |
|
292 | 292 | def show(self, repo_name, format='html'): |
|
293 | 293 | """GET /repos/repo_name: Show a specific item""" |
|
294 | 294 | # url('repo', repo_name=ID) |
|
295 | 295 | |
|
296 | 296 | @HasRepoPermissionAllDecorator('repository.admin') |
|
297 | 297 | def edit_fields(self, repo_name): |
|
298 | 298 | """GET /repo_name/settings: Form to edit an existing item""" |
|
299 | 299 | c.repo_info = self._load_repo(repo_name) |
|
300 | 300 | c.repo_fields = RepositoryField.query()\ |
|
301 | 301 | .filter(RepositoryField.repository == c.repo_info).all() |
|
302 | 302 | c.active = 'fields' |
|
303 | 303 | if request.POST: |
|
304 | 304 | |
|
305 | 305 | return redirect(url('repo_edit_fields')) |
|
306 | 306 | return render('admin/repos/repo_edit.mako') |
|
307 | 307 | |
|
308 | 308 | @HasRepoPermissionAllDecorator('repository.admin') |
|
309 | 309 | @auth.CSRFRequired() |
|
310 | 310 | def create_repo_field(self, repo_name): |
|
311 | 311 | try: |
|
312 | 312 | form_result = RepoFieldForm()().to_python(dict(request.POST)) |
|
313 | 313 | RepoModel().add_repo_field( |
|
314 | 314 | repo_name, form_result['new_field_key'], |
|
315 | 315 | field_type=form_result['new_field_type'], |
|
316 | 316 | field_value=form_result['new_field_value'], |
|
317 | 317 | field_label=form_result['new_field_label'], |
|
318 | 318 | field_desc=form_result['new_field_desc']) |
|
319 | 319 | |
|
320 | 320 | Session().commit() |
|
321 | 321 | except Exception as e: |
|
322 | 322 | log.exception("Exception creating field") |
|
323 | 323 | msg = _('An error occurred during creation of field') |
|
324 | 324 | if isinstance(e, formencode.Invalid): |
|
325 | 325 | msg += ". " + e.msg |
|
326 | 326 | h.flash(msg, category='error') |
|
327 | 327 | return redirect(url('edit_repo_fields', repo_name=repo_name)) |
|
328 | 328 | |
|
329 | 329 | @HasRepoPermissionAllDecorator('repository.admin') |
|
330 | 330 | @auth.CSRFRequired() |
|
331 | 331 | def delete_repo_field(self, repo_name, field_id): |
|
332 | 332 | field = RepositoryField.get_or_404(field_id) |
|
333 | 333 | try: |
|
334 | 334 | RepoModel().delete_repo_field(repo_name, field.field_key) |
|
335 | 335 | Session().commit() |
|
336 | 336 | except Exception as e: |
|
337 | 337 | log.exception("Exception during removal of field") |
|
338 | 338 | msg = _('An error occurred during removal of field') |
|
339 | 339 | h.flash(msg, category='error') |
|
340 | 340 | return redirect(url('edit_repo_fields', repo_name=repo_name)) |
|
341 | 341 | |
|
342 | 342 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
343 | 343 | @auth.CSRFRequired() |
|
344 | 344 | def toggle_locking(self, repo_name): |
|
345 | 345 | """ |
|
346 | 346 | Toggle locking of repository by simple GET call to url |
|
347 | 347 | |
|
348 | 348 | :param repo_name: |
|
349 | 349 | """ |
|
350 | 350 | |
|
351 | 351 | try: |
|
352 | 352 | repo = Repository.get_by_repo_name(repo_name) |
|
353 | 353 | |
|
354 | 354 | if repo.enable_locking: |
|
355 | 355 | if repo.locked[0]: |
|
356 | 356 | Repository.unlock(repo) |
|
357 | 357 | action = _('Unlocked') |
|
358 | 358 | else: |
|
359 | 359 | Repository.lock(repo, c.rhodecode_user.user_id, |
|
360 | 360 | lock_reason=Repository.LOCK_WEB) |
|
361 | 361 | action = _('Locked') |
|
362 | 362 | |
|
363 | 363 | h.flash(_('Repository has been %s') % action, |
|
364 | 364 | category='success') |
|
365 | 365 | except Exception: |
|
366 | 366 | log.exception("Exception during unlocking") |
|
367 | 367 | h.flash(_('An error occurred during unlocking'), |
|
368 | 368 | category='error') |
|
369 |
return redirect( |
|
|
369 | return redirect(h.route_path('repo_summary', repo_name=repo_name)) | |
|
370 | 370 | |
|
371 | 371 | @HasRepoPermissionAllDecorator('repository.admin') |
|
372 | 372 | @auth.CSRFRequired() |
|
373 | 373 | def edit_remote(self, repo_name): |
|
374 | 374 | """PUT /{repo_name}/settings/remote: edit the repo remote.""" |
|
375 | 375 | try: |
|
376 | 376 | ScmModel().pull_changes(repo_name, c.rhodecode_user.username) |
|
377 | 377 | h.flash(_('Pulled from remote location'), category='success') |
|
378 | 378 | except Exception: |
|
379 | 379 | log.exception("Exception during pull from remote") |
|
380 | 380 | h.flash(_('An error occurred during pull from remote location'), |
|
381 | 381 | category='error') |
|
382 | 382 | return redirect(url('edit_repo_remote', repo_name=c.repo_name)) |
|
383 | 383 | |
|
384 | 384 | @HasRepoPermissionAllDecorator('repository.admin') |
|
385 | 385 | def edit_remote_form(self, repo_name): |
|
386 | 386 | """GET /repo_name/settings: Form to edit an existing item""" |
|
387 | 387 | c.repo_info = self._load_repo(repo_name) |
|
388 | 388 | c.active = 'remote' |
|
389 | 389 | |
|
390 | 390 | return render('admin/repos/repo_edit.mako') |
|
391 | 391 | |
|
392 | 392 | @HasRepoPermissionAllDecorator('repository.admin') |
|
393 | 393 | @auth.CSRFRequired() |
|
394 | 394 | def edit_statistics(self, repo_name): |
|
395 | 395 | """PUT /{repo_name}/settings/statistics: reset the repo statistics.""" |
|
396 | 396 | try: |
|
397 | 397 | RepoModel().delete_stats(repo_name) |
|
398 | 398 | Session().commit() |
|
399 | 399 | except Exception as e: |
|
400 | 400 | log.error(traceback.format_exc()) |
|
401 | 401 | h.flash(_('An error occurred during deletion of repository stats'), |
|
402 | 402 | category='error') |
|
403 | 403 | return redirect(url('edit_repo_statistics', repo_name=c.repo_name)) |
|
404 | 404 | |
|
405 | 405 | @HasRepoPermissionAllDecorator('repository.admin') |
|
406 | 406 | def edit_statistics_form(self, repo_name): |
|
407 | 407 | """GET /repo_name/settings: Form to edit an existing item""" |
|
408 | 408 | c.repo_info = self._load_repo(repo_name) |
|
409 | 409 | repo = c.repo_info.scm_instance() |
|
410 | 410 | |
|
411 | 411 | if c.repo_info.stats: |
|
412 | 412 | # this is on what revision we ended up so we add +1 for count |
|
413 | 413 | last_rev = c.repo_info.stats.stat_on_revision + 1 |
|
414 | 414 | else: |
|
415 | 415 | last_rev = 0 |
|
416 | 416 | c.stats_revision = last_rev |
|
417 | 417 | |
|
418 | 418 | c.repo_last_rev = repo.count() |
|
419 | 419 | |
|
420 | 420 | if last_rev == 0 or c.repo_last_rev == 0: |
|
421 | 421 | c.stats_percentage = 0 |
|
422 | 422 | else: |
|
423 | 423 | c.stats_percentage = '%.2f' % ((float((last_rev)) / c.repo_last_rev) * 100) |
|
424 | 424 | |
|
425 | 425 | c.active = 'statistics' |
|
426 | 426 | |
|
427 | 427 | return render('admin/repos/repo_edit.mako') |
|
428 | 428 | |
|
429 | 429 | @HasRepoPermissionAllDecorator('repository.admin') |
|
430 | 430 | @auth.CSRFRequired() |
|
431 | 431 | def repo_issuetracker_test(self, repo_name): |
|
432 | 432 | if request.is_xhr: |
|
433 | 433 | return h.urlify_commit_message( |
|
434 | 434 | request.POST.get('test_text', ''), |
|
435 | 435 | repo_name) |
|
436 | 436 | else: |
|
437 | 437 | raise HTTPBadRequest() |
|
438 | 438 | |
|
439 | 439 | @HasRepoPermissionAllDecorator('repository.admin') |
|
440 | 440 | @auth.CSRFRequired() |
|
441 | 441 | def repo_issuetracker_delete(self, repo_name): |
|
442 | 442 | uid = request.POST.get('uid') |
|
443 | 443 | repo_settings = IssueTrackerSettingsModel(repo=repo_name) |
|
444 | 444 | try: |
|
445 | 445 | repo_settings.delete_entries(uid) |
|
446 | 446 | except Exception: |
|
447 | 447 | h.flash(_('Error occurred during deleting issue tracker entry'), |
|
448 | 448 | category='error') |
|
449 | 449 | else: |
|
450 | 450 | h.flash(_('Removed issue tracker entry'), category='success') |
|
451 | 451 | return redirect(url('repo_settings_issuetracker', |
|
452 | 452 | repo_name=repo_name)) |
|
453 | 453 | |
|
454 | 454 | def _update_patterns(self, form, repo_settings): |
|
455 | 455 | for uid in form['delete_patterns']: |
|
456 | 456 | repo_settings.delete_entries(uid) |
|
457 | 457 | |
|
458 | 458 | for pattern in form['patterns']: |
|
459 | 459 | for setting, value, type_ in pattern: |
|
460 | 460 | sett = repo_settings.create_or_update_setting( |
|
461 | 461 | setting, value, type_) |
|
462 | 462 | Session().add(sett) |
|
463 | 463 | |
|
464 | 464 | Session().commit() |
|
465 | 465 | |
|
466 | 466 | @HasRepoPermissionAllDecorator('repository.admin') |
|
467 | 467 | @auth.CSRFRequired() |
|
468 | 468 | def repo_issuetracker_save(self, repo_name): |
|
469 | 469 | # Save inheritance |
|
470 | 470 | repo_settings = IssueTrackerSettingsModel(repo=repo_name) |
|
471 | 471 | inherited = (request.POST.get('inherit_global_issuetracker') |
|
472 | 472 | == "inherited") |
|
473 | 473 | repo_settings.inherit_global_settings = inherited |
|
474 | 474 | Session().commit() |
|
475 | 475 | |
|
476 | 476 | form = IssueTrackerPatternsForm()().to_python(request.POST) |
|
477 | 477 | if form: |
|
478 | 478 | self._update_patterns(form, repo_settings) |
|
479 | 479 | |
|
480 | 480 | h.flash(_('Updated issue tracker entries'), category='success') |
|
481 | 481 | return redirect(url('repo_settings_issuetracker', |
|
482 | 482 | repo_name=repo_name)) |
|
483 | 483 | |
|
484 | 484 | @HasRepoPermissionAllDecorator('repository.admin') |
|
485 | 485 | def repo_issuetracker(self, repo_name): |
|
486 | 486 | """GET /admin/settings/issue-tracker: All items in the collection""" |
|
487 | 487 | c.active = 'issuetracker' |
|
488 | 488 | c.data = 'data' |
|
489 | 489 | c.repo_info = self._load_repo(repo_name) |
|
490 | 490 | |
|
491 | 491 | repo = Repository.get_by_repo_name(repo_name) |
|
492 | 492 | c.settings_model = IssueTrackerSettingsModel(repo=repo) |
|
493 | 493 | c.global_patterns = c.settings_model.get_global_settings() |
|
494 | 494 | c.repo_patterns = c.settings_model.get_repo_settings() |
|
495 | 495 | |
|
496 | 496 | return render('admin/repos/repo_edit.mako') |
|
497 | 497 | |
|
498 | 498 | @HasRepoPermissionAllDecorator('repository.admin') |
|
499 | 499 | def repo_settings_vcs(self, repo_name): |
|
500 | 500 | """GET /{repo_name}/settings/vcs/: All items in the collection""" |
|
501 | 501 | |
|
502 | 502 | model = VcsSettingsModel(repo=repo_name) |
|
503 | 503 | |
|
504 | 504 | c.active = 'vcs' |
|
505 | 505 | c.global_svn_branch_patterns = model.get_global_svn_branch_patterns() |
|
506 | 506 | c.global_svn_tag_patterns = model.get_global_svn_tag_patterns() |
|
507 | 507 | c.svn_branch_patterns = model.get_repo_svn_branch_patterns() |
|
508 | 508 | c.svn_tag_patterns = model.get_repo_svn_tag_patterns() |
|
509 | 509 | c.repo_info = self._load_repo(repo_name) |
|
510 | 510 | defaults = self._vcs_form_defaults(repo_name) |
|
511 | 511 | c.inherit_global_settings = defaults['inherit_global_settings'] |
|
512 | 512 | c.labs_active = str2bool( |
|
513 | 513 | rhodecode.CONFIG.get('labs_settings_active', 'true')) |
|
514 | 514 | |
|
515 | 515 | return htmlfill.render( |
|
516 | 516 | render('admin/repos/repo_edit.mako'), |
|
517 | 517 | defaults=defaults, |
|
518 | 518 | encoding="UTF-8", |
|
519 | 519 | force_defaults=False) |
|
520 | 520 | |
|
521 | 521 | @HasRepoPermissionAllDecorator('repository.admin') |
|
522 | 522 | @auth.CSRFRequired() |
|
523 | 523 | def repo_settings_vcs_update(self, repo_name): |
|
524 | 524 | """POST /{repo_name}/settings/vcs/: All items in the collection""" |
|
525 | 525 | c.active = 'vcs' |
|
526 | 526 | |
|
527 | 527 | model = VcsSettingsModel(repo=repo_name) |
|
528 | 528 | c.global_svn_branch_patterns = model.get_global_svn_branch_patterns() |
|
529 | 529 | c.global_svn_tag_patterns = model.get_global_svn_tag_patterns() |
|
530 | 530 | c.svn_branch_patterns = model.get_repo_svn_branch_patterns() |
|
531 | 531 | c.svn_tag_patterns = model.get_repo_svn_tag_patterns() |
|
532 | 532 | c.repo_info = self._load_repo(repo_name) |
|
533 | 533 | defaults = self._vcs_form_defaults(repo_name) |
|
534 | 534 | c.inherit_global_settings = defaults['inherit_global_settings'] |
|
535 | 535 | |
|
536 | 536 | application_form = RepoVcsSettingsForm(repo_name)() |
|
537 | 537 | try: |
|
538 | 538 | form_result = application_form.to_python(dict(request.POST)) |
|
539 | 539 | except formencode.Invalid as errors: |
|
540 | 540 | h.flash( |
|
541 | 541 | _("Some form inputs contain invalid data."), |
|
542 | 542 | category='error') |
|
543 | 543 | return htmlfill.render( |
|
544 | 544 | render('admin/repos/repo_edit.mako'), |
|
545 | 545 | defaults=errors.value, |
|
546 | 546 | errors=errors.error_dict or {}, |
|
547 | 547 | prefix_error=False, |
|
548 | 548 | encoding="UTF-8", |
|
549 | 549 | force_defaults=False |
|
550 | 550 | ) |
|
551 | 551 | |
|
552 | 552 | try: |
|
553 | 553 | inherit_global_settings = form_result['inherit_global_settings'] |
|
554 | 554 | model.create_or_update_repo_settings( |
|
555 | 555 | form_result, inherit_global_settings=inherit_global_settings) |
|
556 | 556 | except Exception: |
|
557 | 557 | log.exception("Exception while updating settings") |
|
558 | 558 | h.flash( |
|
559 | 559 | _('Error occurred during updating repository VCS settings'), |
|
560 | 560 | category='error') |
|
561 | 561 | else: |
|
562 | 562 | Session().commit() |
|
563 | 563 | h.flash(_('Updated VCS settings'), category='success') |
|
564 | 564 | return redirect(url('repo_vcs_settings', repo_name=repo_name)) |
|
565 | 565 | |
|
566 | 566 | return htmlfill.render( |
|
567 | 567 | render('admin/repos/repo_edit.mako'), |
|
568 | 568 | defaults=self._vcs_form_defaults(repo_name), |
|
569 | 569 | encoding="UTF-8", |
|
570 | 570 | force_defaults=False) |
|
571 | 571 | |
|
572 | 572 | @HasRepoPermissionAllDecorator('repository.admin') |
|
573 | 573 | @auth.CSRFRequired() |
|
574 | 574 | @jsonify |
|
575 | 575 | def repo_delete_svn_pattern(self, repo_name): |
|
576 | 576 | if not request.is_xhr: |
|
577 | 577 | return False |
|
578 | 578 | |
|
579 | 579 | delete_pattern_id = request.POST.get('delete_svn_pattern') |
|
580 | 580 | model = VcsSettingsModel(repo=repo_name) |
|
581 | 581 | try: |
|
582 | 582 | model.delete_repo_svn_pattern(delete_pattern_id) |
|
583 | 583 | except SettingNotFound: |
|
584 | 584 | raise HTTPBadRequest() |
|
585 | 585 | |
|
586 | 586 | Session().commit() |
|
587 | 587 | return True |
|
588 | 588 | |
|
589 | 589 | def _vcs_form_defaults(self, repo_name): |
|
590 | 590 | model = VcsSettingsModel(repo=repo_name) |
|
591 | 591 | global_defaults = model.get_global_settings() |
|
592 | 592 | |
|
593 | 593 | repo_defaults = {} |
|
594 | 594 | repo_defaults.update(global_defaults) |
|
595 | 595 | repo_defaults.update(model.get_repo_settings()) |
|
596 | 596 | |
|
597 | 597 | global_defaults = { |
|
598 | 598 | '{}_inherited'.format(k): global_defaults[k] |
|
599 | 599 | for k in global_defaults} |
|
600 | 600 | |
|
601 | 601 | defaults = { |
|
602 | 602 | 'inherit_global_settings': model.inherit_global_settings |
|
603 | 603 | } |
|
604 | 604 | defaults.update(global_defaults) |
|
605 | 605 | defaults.update(repo_defaults) |
|
606 | 606 | defaults.update({ |
|
607 | 607 | 'new_svn_branch': '', |
|
608 | 608 | 'new_svn_tag': '', |
|
609 | 609 | }) |
|
610 | 610 | return defaults |
@@ -1,290 +1,260 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | changelog controller for rhodecode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | |
|
27 | 27 | from pylons import request, url, session, tmpl_context as c |
|
28 | 28 | from pylons.controllers.util import redirect |
|
29 | 29 | from pylons.i18n.translation import _ |
|
30 | 30 | from webob.exc import HTTPNotFound, HTTPBadRequest |
|
31 | 31 | |
|
32 | 32 | import rhodecode.lib.helpers as h |
|
33 | 33 | from rhodecode.lib.auth import ( |
|
34 | 34 | LoginRequired, HasRepoPermissionAnyDecorator, XHRRequired) |
|
35 | 35 | from rhodecode.lib.base import BaseRepoController, render |
|
36 | 36 | from rhodecode.lib.ext_json import json |
|
37 | 37 | from rhodecode.lib.graphmod import _colored, _dagwalker |
|
38 | 38 | from rhodecode.lib.helpers import RepoPage |
|
39 | 39 | from rhodecode.lib.utils2 import safe_int, safe_str |
|
40 | 40 | from rhodecode.lib.vcs.exceptions import ( |
|
41 | 41 | RepositoryError, CommitDoesNotExistError, |
|
42 | 42 | CommitError, NodeDoesNotExistError, EmptyRepositoryError) |
|
43 | 43 | |
|
44 | 44 | log = logging.getLogger(__name__) |
|
45 | 45 | |
|
46 | 46 | DEFAULT_CHANGELOG_SIZE = 20 |
|
47 | 47 | |
|
48 | 48 | |
|
49 | def _load_changelog_summary(): | |
|
50 | p = safe_int(request.GET.get('page'), 1) | |
|
51 | size = safe_int(request.GET.get('size'), 10) | |
|
52 | ||
|
53 | def url_generator(**kw): | |
|
54 | return url('summary_home', | |
|
55 | repo_name=c.rhodecode_db_repo.repo_name, size=size, **kw) | |
|
56 | ||
|
57 | pre_load = ['author', 'branch', 'date', 'message'] | |
|
58 | try: | |
|
59 | collection = c.rhodecode_repo.get_commits(pre_load=pre_load) | |
|
60 | except EmptyRepositoryError: | |
|
61 | collection = c.rhodecode_repo | |
|
62 | ||
|
63 | c.repo_commits = RepoPage( | |
|
64 | collection, page=p, items_per_page=size, url=url_generator) | |
|
65 | page_ids = [x.raw_id for x in c.repo_commits] | |
|
66 | c.comments = c.rhodecode_db_repo.get_comments(page_ids) | |
|
67 | c.statuses = c.rhodecode_db_repo.statuses(page_ids) | |
|
68 | ||
|
69 | ||
|
70 | 49 | class ChangelogController(BaseRepoController): |
|
71 | 50 | |
|
72 | 51 | def __before__(self): |
|
73 | 52 | super(ChangelogController, self).__before__() |
|
74 | 53 | c.affected_files_cut_off = 60 |
|
75 | 54 | |
|
76 | 55 | def __get_commit_or_redirect( |
|
77 | 56 | self, commit_id, repo, redirect_after=True, partial=False): |
|
78 | 57 | """ |
|
79 | 58 | This is a safe way to get a commit. If an error occurs it |
|
80 | 59 | redirects to a commit with a proper message. If partial is set |
|
81 | 60 | then it does not do redirect raise and throws an exception instead. |
|
82 | 61 | |
|
83 | 62 | :param commit_id: commit to fetch |
|
84 | 63 | :param repo: repo instance |
|
85 | 64 | """ |
|
86 | 65 | try: |
|
87 | 66 | return c.rhodecode_repo.get_commit(commit_id) |
|
88 | 67 | except EmptyRepositoryError: |
|
89 | 68 | if not redirect_after: |
|
90 | 69 | return None |
|
91 | 70 | h.flash(h.literal(_('There are no commits yet')), |
|
92 | 71 | category='warning') |
|
93 | 72 | redirect(url('changelog_home', repo_name=repo.repo_name)) |
|
94 | 73 | except RepositoryError as e: |
|
95 | 74 | msg = safe_str(e) |
|
96 | 75 | log.exception(msg) |
|
97 | 76 | h.flash(msg, category='warning') |
|
98 | 77 | if not partial: |
|
99 | 78 | redirect(h.url('changelog_home', repo_name=repo.repo_name)) |
|
100 | 79 | raise HTTPBadRequest() |
|
101 | 80 | |
|
102 | 81 | def _graph(self, repo, commits, prev_data=None, next_data=None): |
|
103 | 82 | """ |
|
104 | 83 | Generates a DAG graph for repo |
|
105 | 84 | |
|
106 | 85 | :param repo: repo instance |
|
107 | 86 | :param commits: list of commits |
|
108 | 87 | """ |
|
109 | 88 | if not commits: |
|
110 | 89 | return json.dumps([]) |
|
111 | 90 | |
|
112 | 91 | def serialize(commit, parents=True): |
|
113 | 92 | data = dict( |
|
114 | 93 | raw_id=commit.raw_id, |
|
115 | 94 | idx=commit.idx, |
|
116 | 95 | branch=commit.branch, |
|
117 | 96 | ) |
|
118 | 97 | if parents: |
|
119 | 98 | data['parents'] = [ |
|
120 | 99 | serialize(x, parents=False) for x in commit.parents] |
|
121 | 100 | return data |
|
122 | 101 | |
|
123 | 102 | prev_data = prev_data or [] |
|
124 | 103 | next_data = next_data or [] |
|
125 | 104 | |
|
126 | 105 | current = [serialize(x) for x in commits] |
|
127 | 106 | commits = prev_data + current + next_data |
|
128 | 107 | |
|
129 | 108 | dag = _dagwalker(repo, commits) |
|
130 | 109 | |
|
131 | 110 | data = [[commit_id, vtx, edges, branch] |
|
132 | 111 | for commit_id, vtx, edges, branch in _colored(dag)] |
|
133 | 112 | return json.dumps(data), json.dumps(current) |
|
134 | 113 | |
|
135 | 114 | def _check_if_valid_branch(self, branch_name, repo_name, f_path): |
|
136 | 115 | if branch_name not in c.rhodecode_repo.branches_all: |
|
137 | 116 | h.flash('Branch {} is not found.'.format(branch_name), |
|
138 | 117 | category='warning') |
|
139 | 118 | redirect(url('changelog_file_home', repo_name=repo_name, |
|
140 | 119 | revision=branch_name, f_path=f_path or '')) |
|
141 | 120 | |
|
142 | 121 | def _load_changelog_data(self, collection, page, chunk_size, branch_name=None, dynamic=False): |
|
143 | 122 | c.total_cs = len(collection) |
|
144 | 123 | c.showing_commits = min(chunk_size, c.total_cs) |
|
145 | 124 | c.pagination = RepoPage(collection, page=page, item_count=c.total_cs, |
|
146 | 125 | items_per_page=chunk_size, branch=branch_name) |
|
147 | 126 | |
|
148 | 127 | c.next_page = c.pagination.next_page |
|
149 | 128 | c.prev_page = c.pagination.previous_page |
|
150 | 129 | |
|
151 | 130 | if dynamic: |
|
152 | 131 | if request.GET.get('chunk') != 'next': |
|
153 | 132 | c.next_page = None |
|
154 | 133 | if request.GET.get('chunk') != 'prev': |
|
155 | 134 | c.prev_page = None |
|
156 | 135 | |
|
157 | 136 | page_commit_ids = [x.raw_id for x in c.pagination] |
|
158 | 137 | c.comments = c.rhodecode_db_repo.get_comments(page_commit_ids) |
|
159 | 138 | c.statuses = c.rhodecode_db_repo.statuses(page_commit_ids) |
|
160 | 139 | |
|
161 | 140 | @LoginRequired() |
|
162 | 141 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
163 | 142 | 'repository.admin') |
|
164 | 143 | def index(self, repo_name, revision=None, f_path=None): |
|
165 | 144 | commit_id = revision |
|
166 | 145 | chunk_size = 20 |
|
167 | 146 | |
|
168 | 147 | c.branch_name = branch_name = request.GET.get('branch', None) |
|
169 | 148 | c.book_name = book_name = request.GET.get('bookmark', None) |
|
170 | 149 | hist_limit = safe_int(request.GET.get('limit')) or None |
|
171 | 150 | |
|
172 | 151 | p = safe_int(request.GET.get('page', 1), 1) |
|
173 | 152 | |
|
174 | 153 | c.selected_name = branch_name or book_name |
|
175 | 154 | if not commit_id and branch_name: |
|
176 | 155 | self._check_if_valid_branch(branch_name, repo_name, f_path) |
|
177 | 156 | |
|
178 | 157 | c.changelog_for_path = f_path |
|
179 | 158 | pre_load = ['author', 'branch', 'date', 'message', 'parents'] |
|
180 | 159 | commit_ids = [] |
|
181 | 160 | |
|
182 | 161 | try: |
|
183 | 162 | if f_path: |
|
184 | 163 | log.debug('generating changelog for path %s', f_path) |
|
185 | 164 | # get the history for the file ! |
|
186 | 165 | base_commit = c.rhodecode_repo.get_commit(revision) |
|
187 | 166 | try: |
|
188 | 167 | collection = base_commit.get_file_history( |
|
189 | 168 | f_path, limit=hist_limit, pre_load=pre_load) |
|
190 | 169 | if (collection |
|
191 | 170 | and request.environ.get('HTTP_X_PARTIAL_XHR')): |
|
192 | 171 | # for ajax call we remove first one since we're looking |
|
193 | 172 | # at it right now in the context of a file commit |
|
194 | 173 | collection.pop(0) |
|
195 | 174 | except (NodeDoesNotExistError, CommitError): |
|
196 | 175 | # this node is not present at tip! |
|
197 | 176 | try: |
|
198 | 177 | commit = self.__get_commit_or_redirect( |
|
199 | 178 | commit_id, repo_name) |
|
200 | 179 | collection = commit.get_file_history(f_path) |
|
201 | 180 | except RepositoryError as e: |
|
202 | 181 | h.flash(safe_str(e), category='warning') |
|
203 | 182 | redirect(h.url('changelog_home', repo_name=repo_name)) |
|
204 | 183 | collection = list(reversed(collection)) |
|
205 | 184 | else: |
|
206 | 185 | collection = c.rhodecode_repo.get_commits( |
|
207 | 186 | branch_name=branch_name, pre_load=pre_load) |
|
208 | 187 | |
|
209 | 188 | self._load_changelog_data( |
|
210 | 189 | collection, p, chunk_size, c.branch_name, dynamic=f_path) |
|
211 | 190 | |
|
212 | 191 | except EmptyRepositoryError as e: |
|
213 | 192 | h.flash(safe_str(e), category='warning') |
|
214 |
return redirect( |
|
|
193 | return redirect(h.route_path('repo_summary', repo_name=repo_name)) | |
|
215 | 194 | except (RepositoryError, CommitDoesNotExistError, Exception) as e: |
|
216 | 195 | msg = safe_str(e) |
|
217 | 196 | log.exception(msg) |
|
218 | 197 | h.flash(msg, category='error') |
|
219 | 198 | return redirect(url('changelog_home', repo_name=repo_name)) |
|
220 | 199 | |
|
221 | 200 | if (request.environ.get('HTTP_X_PARTIAL_XHR') |
|
222 | 201 | or request.environ.get('HTTP_X_PJAX')): |
|
223 | 202 | # loading from ajax, we don't want the first result, it's popped |
|
224 | 203 | return render('changelog/changelog_file_history.mako') |
|
225 | 204 | |
|
226 | 205 | if not f_path: |
|
227 | 206 | commit_ids = c.pagination |
|
228 | 207 | |
|
229 | 208 | c.graph_data, c.graph_commits = self._graph( |
|
230 | 209 | c.rhodecode_repo, commit_ids) |
|
231 | 210 | |
|
232 | 211 | return render('changelog/changelog.mako') |
|
233 | 212 | |
|
234 | 213 | @LoginRequired() |
|
235 | 214 | @XHRRequired() |
|
236 | 215 | @HasRepoPermissionAnyDecorator( |
|
237 | 216 | 'repository.read', 'repository.write', 'repository.admin') |
|
238 | 217 | def changelog_elements(self, repo_name): |
|
239 | 218 | commit_id = None |
|
240 | 219 | chunk_size = 20 |
|
241 | 220 | |
|
242 | 221 | def wrap_for_error(err): |
|
243 | 222 | return '<tr><td colspan="9" class="alert alert-error">ERROR: {}</td></tr>'.format(err) |
|
244 | 223 | |
|
245 | 224 | c.branch_name = branch_name = request.GET.get('branch', None) |
|
246 | 225 | c.book_name = book_name = request.GET.get('bookmark', None) |
|
247 | 226 | |
|
248 | 227 | p = safe_int(request.GET.get('page', 1), 1) |
|
249 | 228 | |
|
250 | 229 | c.selected_name = branch_name or book_name |
|
251 | 230 | if not commit_id and branch_name: |
|
252 | 231 | if branch_name not in c.rhodecode_repo.branches_all: |
|
253 | 232 | return wrap_for_error( |
|
254 | 233 | safe_str('Missing branch: {}'.format(branch_name))) |
|
255 | 234 | |
|
256 | 235 | pre_load = ['author', 'branch', 'date', 'message', 'parents'] |
|
257 | 236 | collection = c.rhodecode_repo.get_commits( |
|
258 | 237 | branch_name=branch_name, pre_load=pre_load) |
|
259 | 238 | |
|
260 | 239 | try: |
|
261 | 240 | self._load_changelog_data(collection, p, chunk_size, dynamic=True) |
|
262 | 241 | except EmptyRepositoryError as e: |
|
263 | 242 | return wrap_for_error(safe_str(e)) |
|
264 | 243 | except (RepositoryError, CommitDoesNotExistError, Exception) as e: |
|
265 | 244 | log.exception('Failed to fetch commits') |
|
266 | 245 | return wrap_for_error(safe_str(e)) |
|
267 | 246 | |
|
268 | 247 | prev_data = None |
|
269 | 248 | next_data = None |
|
270 | 249 | |
|
271 | 250 | prev_graph = json.loads(request.POST.get('graph', '')) |
|
272 | 251 | |
|
273 | 252 | if request.GET.get('chunk') == 'prev': |
|
274 | 253 | next_data = prev_graph |
|
275 | 254 | elif request.GET.get('chunk') == 'next': |
|
276 | 255 | prev_data = prev_graph |
|
277 | 256 | |
|
278 | 257 | c.graph_data, c.graph_commits = self._graph( |
|
279 | 258 | c.rhodecode_repo, c.pagination, |
|
280 | 259 | prev_data=prev_data, next_data=next_data) |
|
281 | 260 | return render('changelog/changelog_elements.mako') |
|
282 | ||
|
283 | @LoginRequired() | |
|
284 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
|
285 | 'repository.admin') | |
|
286 | def changelog_summary(self, repo_name): | |
|
287 | if request.environ.get('HTTP_X_PJAX'): | |
|
288 | _load_changelog_summary() | |
|
289 | return render('changelog/changelog_summary_data.mako') | |
|
290 | raise HTTPNotFound() |
@@ -1,282 +1,282 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Compare controller for showing differences between two commits/refs/tags etc. |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | |
|
27 | 27 | from webob.exc import HTTPBadRequest |
|
28 | 28 | from pylons import request, tmpl_context as c, url |
|
29 | 29 | from pylons.controllers.util import redirect |
|
30 | 30 | from pylons.i18n.translation import _ |
|
31 | 31 | |
|
32 | 32 | from rhodecode.controllers.utils import parse_path_ref, get_commit_from_ref_name |
|
33 | 33 | from rhodecode.lib import helpers as h |
|
34 | 34 | from rhodecode.lib import diffs, codeblocks |
|
35 | 35 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
36 | 36 | from rhodecode.lib.base import BaseRepoController, render |
|
37 | 37 | from rhodecode.lib.utils import safe_str |
|
38 | 38 | from rhodecode.lib.utils2 import safe_unicode, str2bool |
|
39 | 39 | from rhodecode.lib.vcs.exceptions import ( |
|
40 | 40 | EmptyRepositoryError, RepositoryError, RepositoryRequirementError, |
|
41 | 41 | NodeDoesNotExistError) |
|
42 | 42 | from rhodecode.model.db import Repository, ChangesetStatus |
|
43 | 43 | |
|
44 | 44 | log = logging.getLogger(__name__) |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | class CompareController(BaseRepoController): |
|
48 | 48 | |
|
49 | 49 | def __before__(self): |
|
50 | 50 | super(CompareController, self).__before__() |
|
51 | 51 | |
|
52 | 52 | def _get_commit_or_redirect( |
|
53 | 53 | self, ref, ref_type, repo, redirect_after=True, partial=False): |
|
54 | 54 | """ |
|
55 | 55 | This is a safe way to get a commit. If an error occurs it |
|
56 | 56 | redirects to a commit with a proper message. If partial is set |
|
57 | 57 | then it does not do redirect raise and throws an exception instead. |
|
58 | 58 | """ |
|
59 | 59 | try: |
|
60 | 60 | return get_commit_from_ref_name(repo, safe_str(ref), ref_type) |
|
61 | 61 | except EmptyRepositoryError: |
|
62 | 62 | if not redirect_after: |
|
63 | 63 | return repo.scm_instance().EMPTY_COMMIT |
|
64 | 64 | h.flash(h.literal(_('There are no commits yet')), |
|
65 | 65 | category='warning') |
|
66 |
redirect( |
|
|
66 | redirect(h.route_path('repo_summary', repo_name=repo.repo_name)) | |
|
67 | 67 | |
|
68 | 68 | except RepositoryError as e: |
|
69 | 69 | msg = safe_str(e) |
|
70 | 70 | log.exception(msg) |
|
71 | 71 | h.flash(msg, category='warning') |
|
72 | 72 | if not partial: |
|
73 |
redirect(h. |
|
|
73 | redirect(h.route_path('repo_summary', repo_name=repo.repo_name)) | |
|
74 | 74 | raise HTTPBadRequest() |
|
75 | 75 | |
|
76 | 76 | @LoginRequired() |
|
77 | 77 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
78 | 78 | 'repository.admin') |
|
79 | 79 | def index(self, repo_name): |
|
80 | 80 | c.compare_home = True |
|
81 | 81 | c.commit_ranges = [] |
|
82 | 82 | c.collapse_all_commits = False |
|
83 | 83 | c.diffset = None |
|
84 | 84 | c.limited_diff = False |
|
85 | 85 | source_repo = c.rhodecode_db_repo.repo_name |
|
86 | 86 | target_repo = request.GET.get('target_repo', source_repo) |
|
87 | 87 | c.source_repo = Repository.get_by_repo_name(source_repo) |
|
88 | 88 | c.target_repo = Repository.get_by_repo_name(target_repo) |
|
89 | 89 | c.source_ref = c.target_ref = _('Select commit') |
|
90 | 90 | c.source_ref_type = "" |
|
91 | 91 | c.target_ref_type = "" |
|
92 | 92 | c.commit_statuses = ChangesetStatus.STATUSES |
|
93 | 93 | c.preview_mode = False |
|
94 | 94 | c.file_path = None |
|
95 | 95 | return render('compare/compare_diff.mako') |
|
96 | 96 | |
|
97 | 97 | @LoginRequired() |
|
98 | 98 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
99 | 99 | 'repository.admin') |
|
100 | 100 | def compare(self, repo_name, source_ref_type, source_ref, |
|
101 | 101 | target_ref_type, target_ref): |
|
102 | 102 | # source_ref will be evaluated in source_repo |
|
103 | 103 | source_repo_name = c.rhodecode_db_repo.repo_name |
|
104 | 104 | source_path, source_id = parse_path_ref(source_ref) |
|
105 | 105 | |
|
106 | 106 | # target_ref will be evaluated in target_repo |
|
107 | 107 | target_repo_name = request.GET.get('target_repo', source_repo_name) |
|
108 | 108 | target_path, target_id = parse_path_ref( |
|
109 | 109 | target_ref, default_path=request.GET.get('f_path', '')) |
|
110 | 110 | |
|
111 | 111 | c.file_path = target_path |
|
112 | 112 | c.commit_statuses = ChangesetStatus.STATUSES |
|
113 | 113 | |
|
114 | 114 | # if merge is True |
|
115 | 115 | # Show what changes since the shared ancestor commit of target/source |
|
116 | 116 | # the source would get if it was merged with target. Only commits |
|
117 | 117 | # which are in target but not in source will be shown. |
|
118 | 118 | merge = str2bool(request.GET.get('merge')) |
|
119 | 119 | # if merge is False |
|
120 | 120 | # Show a raw diff of source/target refs even if no ancestor exists |
|
121 | 121 | |
|
122 | 122 | # c.fulldiff disables cut_off_limit |
|
123 | 123 | c.fulldiff = str2bool(request.GET.get('fulldiff')) |
|
124 | 124 | |
|
125 | 125 | # if partial, returns just compare_commits.html (commits log) |
|
126 | 126 | partial = request.is_xhr |
|
127 | 127 | |
|
128 | 128 | # swap url for compare_diff page |
|
129 | 129 | c.swap_url = h.url( |
|
130 | 130 | 'compare_url', |
|
131 | 131 | repo_name=target_repo_name, |
|
132 | 132 | source_ref_type=target_ref_type, |
|
133 | 133 | source_ref=target_ref, |
|
134 | 134 | target_repo=source_repo_name, |
|
135 | 135 | target_ref_type=source_ref_type, |
|
136 | 136 | target_ref=source_ref, |
|
137 | 137 | merge=merge and '1' or '', |
|
138 | 138 | f_path=target_path) |
|
139 | 139 | |
|
140 | 140 | source_repo = Repository.get_by_repo_name(source_repo_name) |
|
141 | 141 | target_repo = Repository.get_by_repo_name(target_repo_name) |
|
142 | 142 | |
|
143 | 143 | if source_repo is None: |
|
144 | 144 | msg = _('Could not find the original repo: %(repo)s') % { |
|
145 | 145 | 'repo': source_repo} |
|
146 | 146 | |
|
147 | 147 | log.error(msg) |
|
148 | 148 | h.flash(msg, category='error') |
|
149 | 149 | return redirect(url('compare_home', repo_name=c.repo_name)) |
|
150 | 150 | |
|
151 | 151 | if target_repo is None: |
|
152 | 152 | msg = _('Could not find the other repo: %(repo)s') % { |
|
153 | 153 | 'repo': target_repo_name} |
|
154 | 154 | log.error(msg) |
|
155 | 155 | h.flash(msg, category='error') |
|
156 | 156 | return redirect(url('compare_home', repo_name=c.repo_name)) |
|
157 | 157 | |
|
158 | 158 | source_scm = source_repo.scm_instance() |
|
159 | 159 | target_scm = target_repo.scm_instance() |
|
160 | 160 | |
|
161 | 161 | source_alias = source_scm.alias |
|
162 | 162 | target_alias = target_scm.alias |
|
163 | 163 | if source_alias != target_alias: |
|
164 | 164 | msg = _('The comparison of two different kinds of remote repos ' |
|
165 | 165 | 'is not available') |
|
166 | 166 | log.error(msg) |
|
167 | 167 | h.flash(msg, category='error') |
|
168 | 168 | return redirect(url('compare_home', repo_name=c.repo_name)) |
|
169 | 169 | |
|
170 | 170 | source_commit = self._get_commit_or_redirect( |
|
171 | 171 | ref=source_id, ref_type=source_ref_type, repo=source_repo, |
|
172 | 172 | partial=partial) |
|
173 | 173 | target_commit = self._get_commit_or_redirect( |
|
174 | 174 | ref=target_id, ref_type=target_ref_type, repo=target_repo, |
|
175 | 175 | partial=partial) |
|
176 | 176 | |
|
177 | 177 | c.compare_home = False |
|
178 | 178 | c.source_repo = source_repo |
|
179 | 179 | c.target_repo = target_repo |
|
180 | 180 | c.source_ref = source_ref |
|
181 | 181 | c.target_ref = target_ref |
|
182 | 182 | c.source_ref_type = source_ref_type |
|
183 | 183 | c.target_ref_type = target_ref_type |
|
184 | 184 | |
|
185 | 185 | pre_load = ["author", "branch", "date", "message"] |
|
186 | 186 | c.ancestor = None |
|
187 | 187 | |
|
188 | 188 | if c.file_path: |
|
189 | 189 | if source_commit == target_commit: |
|
190 | 190 | c.commit_ranges = [] |
|
191 | 191 | else: |
|
192 | 192 | c.commit_ranges = [target_commit] |
|
193 | 193 | else: |
|
194 | 194 | try: |
|
195 | 195 | c.commit_ranges = source_scm.compare( |
|
196 | 196 | source_commit.raw_id, target_commit.raw_id, |
|
197 | 197 | target_scm, merge, pre_load=pre_load) |
|
198 | 198 | if merge: |
|
199 | 199 | c.ancestor = source_scm.get_common_ancestor( |
|
200 | 200 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
201 | 201 | except RepositoryRequirementError: |
|
202 | 202 | msg = _('Could not compare repos with different ' |
|
203 | 203 | 'large file settings') |
|
204 | 204 | log.error(msg) |
|
205 | 205 | if partial: |
|
206 | 206 | return msg |
|
207 | 207 | h.flash(msg, category='error') |
|
208 | 208 | return redirect(url('compare_home', repo_name=c.repo_name)) |
|
209 | 209 | |
|
210 | 210 | c.statuses = c.rhodecode_db_repo.statuses( |
|
211 | 211 | [x.raw_id for x in c.commit_ranges]) |
|
212 | 212 | |
|
213 | 213 | # auto collapse if we have more than limit |
|
214 | 214 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
215 | 215 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
216 | 216 | |
|
217 | 217 | if partial: # for PR ajax commits loader |
|
218 | 218 | if not c.ancestor: |
|
219 | 219 | return '' # cannot merge if there is no ancestor |
|
220 | 220 | return render('compare/compare_commits.mako') |
|
221 | 221 | |
|
222 | 222 | if c.ancestor: |
|
223 | 223 | # case we want a simple diff without incoming commits, |
|
224 | 224 | # previewing what will be merged. |
|
225 | 225 | # Make the diff on target repo (which is known to have target_ref) |
|
226 | 226 | log.debug('Using ancestor %s as source_ref instead of %s' |
|
227 | 227 | % (c.ancestor, source_ref)) |
|
228 | 228 | source_repo = target_repo |
|
229 | 229 | source_commit = target_repo.get_commit(commit_id=c.ancestor) |
|
230 | 230 | |
|
231 | 231 | # diff_limit will cut off the whole diff if the limit is applied |
|
232 | 232 | # otherwise it will just hide the big files from the front-end |
|
233 | 233 | diff_limit = self.cut_off_limit_diff |
|
234 | 234 | file_limit = self.cut_off_limit_file |
|
235 | 235 | |
|
236 | 236 | log.debug('calculating diff between ' |
|
237 | 237 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
238 | 238 | source_commit, target_commit, |
|
239 | 239 | safe_unicode(source_repo.scm_instance().path)) |
|
240 | 240 | |
|
241 | 241 | if source_commit.repository != target_commit.repository: |
|
242 | 242 | msg = _( |
|
243 | 243 | "Repositories unrelated. " |
|
244 | 244 | "Cannot compare commit %(commit1)s from repository %(repo1)s " |
|
245 | 245 | "with commit %(commit2)s from repository %(repo2)s.") % { |
|
246 | 246 | 'commit1': h.show_id(source_commit), |
|
247 | 247 | 'repo1': source_repo.repo_name, |
|
248 | 248 | 'commit2': h.show_id(target_commit), |
|
249 | 249 | 'repo2': target_repo.repo_name, |
|
250 | 250 | } |
|
251 | 251 | h.flash(msg, category='error') |
|
252 | 252 | raise HTTPBadRequest() |
|
253 | 253 | |
|
254 | 254 | txtdiff = source_repo.scm_instance().get_diff( |
|
255 | 255 | commit1=source_commit, commit2=target_commit, |
|
256 | 256 | path=target_path, path1=source_path) |
|
257 | 257 | |
|
258 | 258 | diff_processor = diffs.DiffProcessor( |
|
259 | 259 | txtdiff, format='newdiff', diff_limit=diff_limit, |
|
260 | 260 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
261 | 261 | _parsed = diff_processor.prepare() |
|
262 | 262 | |
|
263 | 263 | def _node_getter(commit): |
|
264 | 264 | """ Returns a function that returns a node for a commit or None """ |
|
265 | 265 | def get_node(fname): |
|
266 | 266 | try: |
|
267 | 267 | return commit.get_node(fname) |
|
268 | 268 | except NodeDoesNotExistError: |
|
269 | 269 | return None |
|
270 | 270 | return get_node |
|
271 | 271 | |
|
272 | 272 | c.diffset = codeblocks.DiffSet( |
|
273 | 273 | repo_name=source_repo.repo_name, |
|
274 | 274 | source_node_getter=_node_getter(source_commit), |
|
275 | 275 | target_node_getter=_node_getter(target_commit), |
|
276 | 276 | ).render_patchset(_parsed, source_ref, target_ref) |
|
277 | 277 | |
|
278 | 278 | c.preview_mode = merge |
|
279 | 279 | c.source_commit = source_commit |
|
280 | 280 | c.target_commit = target_commit |
|
281 | 281 | |
|
282 | 282 | return render('compare/compare_diff.mako') |
@@ -1,180 +1,179 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Feed controller for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | |
|
27 | 27 | import pytz |
|
28 | 28 | from pylons import url, response, tmpl_context as c |
|
29 | 29 | from pylons.i18n.translation import _ |
|
30 | 30 | |
|
31 | 31 | from beaker.cache import cache_region |
|
32 | 32 | from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed |
|
33 | 33 | |
|
34 | 34 | from rhodecode.model.db import CacheKey, UserApiKeys |
|
35 | 35 | from rhodecode.lib import helpers as h |
|
36 | 36 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
37 | 37 | from rhodecode.lib.base import BaseRepoController |
|
38 | 38 | from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer |
|
39 | 39 | from rhodecode.lib.utils2 import safe_int, str2bool |
|
40 | 40 | from rhodecode.lib.utils import PartialRenderer |
|
41 | 41 | |
|
42 | 42 | log = logging.getLogger(__name__) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | class FeedController(BaseRepoController): |
|
46 | 46 | |
|
47 | 47 | def _get_config(self): |
|
48 | 48 | import rhodecode |
|
49 | 49 | config = rhodecode.CONFIG |
|
50 | 50 | |
|
51 | 51 | return { |
|
52 | 52 | 'language': 'en-us', |
|
53 | 53 | 'feed_ttl': '5', # TTL of feed, |
|
54 | 54 | 'feed_include_diff': |
|
55 | 55 | str2bool(config.get('rss_include_diff', False)), |
|
56 | 56 | 'feed_items_per_page': |
|
57 | 57 | safe_int(config.get('rss_items_per_page', 20)), |
|
58 | 58 | 'feed_diff_limit': |
|
59 | 59 | # we need to protect from parsing huge diffs here other way |
|
60 | 60 | # we can kill the server |
|
61 | 61 | safe_int(config.get('rss_cut_off_limit', 32 * 1024)), |
|
62 | 62 | } |
|
63 | 63 | |
|
64 | 64 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
65 | 65 | def __before__(self): |
|
66 | 66 | super(FeedController, self).__before__() |
|
67 | 67 | config = self._get_config() |
|
68 | 68 | # common values for feeds |
|
69 | 69 | self.description = _('Changes on %s repository') |
|
70 | 70 | self.title = self.title = _('%s %s feed') % (c.rhodecode_name, '%s') |
|
71 | 71 | self.language = config["language"] |
|
72 | 72 | self.ttl = config["feed_ttl"] |
|
73 | 73 | self.feed_include_diff = config['feed_include_diff'] |
|
74 | 74 | self.feed_diff_limit = config['feed_diff_limit'] |
|
75 | 75 | self.feed_items_per_page = config['feed_items_per_page'] |
|
76 | 76 | |
|
77 | 77 | def __changes(self, commit): |
|
78 | 78 | diff_processor = DiffProcessor( |
|
79 | 79 | commit.diff(), diff_limit=self.feed_diff_limit) |
|
80 | 80 | _parsed = diff_processor.prepare(inline_diff=False) |
|
81 | 81 | limited_diff = isinstance(_parsed, LimitedDiffContainer) |
|
82 | 82 | |
|
83 | 83 | return _parsed, limited_diff |
|
84 | 84 | |
|
85 | 85 | def _get_title(self, commit): |
|
86 | 86 | return h.shorter(commit.message, 160) |
|
87 | 87 | |
|
88 | 88 | def _get_description(self, commit): |
|
89 | 89 | _renderer = PartialRenderer('feed/atom_feed_entry.mako') |
|
90 | 90 | parsed_diff, limited_diff = self.__changes(commit) |
|
91 | 91 | return _renderer( |
|
92 | 92 | 'body', |
|
93 | 93 | commit=commit, |
|
94 | 94 | parsed_diff=parsed_diff, |
|
95 | 95 | limited_diff=limited_diff, |
|
96 | 96 | feed_include_diff=self.feed_include_diff, |
|
97 | 97 | ) |
|
98 | 98 | |
|
99 | 99 | def _set_timezone(self, date, tzinfo=pytz.utc): |
|
100 | 100 | if not getattr(date, "tzinfo", None): |
|
101 | 101 | date.replace(tzinfo=tzinfo) |
|
102 | 102 | return date |
|
103 | 103 | |
|
104 | 104 | def _get_commits(self): |
|
105 | 105 | return list(c.rhodecode_repo[-self.feed_items_per_page:]) |
|
106 | 106 | |
|
107 | 107 | @HasRepoPermissionAnyDecorator( |
|
108 | 108 | 'repository.read', 'repository.write', 'repository.admin') |
|
109 | 109 | def atom(self, repo_name): |
|
110 | 110 | """Produce an atom-1.0 feed via feedgenerator module""" |
|
111 | 111 | |
|
112 | 112 | @cache_region('long_term') |
|
113 | 113 | def _generate_feed(cache_key): |
|
114 | 114 | feed = Atom1Feed( |
|
115 | 115 | title=self.title % repo_name, |
|
116 |
link=url('summary |
|
|
116 | link=h.route_url('repo_summary', repo_name=repo_name), | |
|
117 | 117 | description=self.description % repo_name, |
|
118 | 118 | language=self.language, |
|
119 | 119 | ttl=self.ttl |
|
120 | 120 | ) |
|
121 | 121 | |
|
122 | 122 | for commit in reversed(self._get_commits()): |
|
123 | 123 | date = self._set_timezone(commit.date) |
|
124 | 124 | feed.add_item( |
|
125 | 125 | title=self._get_title(commit), |
|
126 | 126 | author_name=commit.author, |
|
127 | 127 | description=self._get_description(commit), |
|
128 | 128 | link=url('changeset_home', repo_name=repo_name, |
|
129 | 129 | revision=commit.raw_id, qualified=True), |
|
130 | 130 | pubdate=date,) |
|
131 | 131 | |
|
132 | 132 | return feed.mime_type, feed.writeString('utf-8') |
|
133 | 133 | |
|
134 | 134 | invalidator_context = CacheKey.repo_context_cache( |
|
135 | 135 | _generate_feed, repo_name, CacheKey.CACHE_TYPE_ATOM) |
|
136 | 136 | |
|
137 | 137 | with invalidator_context as context: |
|
138 | 138 | context.invalidate() |
|
139 | 139 | mime_type, feed = context.compute() |
|
140 | 140 | |
|
141 | 141 | response.content_type = mime_type |
|
142 | 142 | return feed |
|
143 | 143 | |
|
144 | 144 | @HasRepoPermissionAnyDecorator( |
|
145 | 145 | 'repository.read', 'repository.write', 'repository.admin') |
|
146 | 146 | def rss(self, repo_name): |
|
147 | 147 | """Produce an rss2 feed via feedgenerator module""" |
|
148 | 148 | |
|
149 | 149 | @cache_region('long_term') |
|
150 | 150 | def _generate_feed(cache_key): |
|
151 | 151 | feed = Rss201rev2Feed( |
|
152 | 152 | title=self.title % repo_name, |
|
153 |
link=url('summary |
|
|
154 | qualified=True), | |
|
153 | link=h.route_url('repo_summary', repo_name=repo_name), | |
|
155 | 154 | description=self.description % repo_name, |
|
156 | 155 | language=self.language, |
|
157 | 156 | ttl=self.ttl |
|
158 | 157 | ) |
|
159 | 158 | |
|
160 | 159 | for commit in reversed(self._get_commits()): |
|
161 | 160 | date = self._set_timezone(commit.date) |
|
162 | 161 | feed.add_item( |
|
163 | 162 | title=self._get_title(commit), |
|
164 | 163 | author_name=commit.author, |
|
165 | 164 | description=self._get_description(commit), |
|
166 | 165 | link=url('changeset_home', repo_name=repo_name, |
|
167 | 166 | revision=commit.raw_id, qualified=True), |
|
168 | 167 | pubdate=date,) |
|
169 | 168 | |
|
170 | 169 | return feed.mime_type, feed.writeString('utf-8') |
|
171 | 170 | |
|
172 | 171 | invalidator_context = CacheKey.repo_context_cache( |
|
173 | 172 | _generate_feed, repo_name, CacheKey.CACHE_TYPE_RSS) |
|
174 | 173 | |
|
175 | 174 | with invalidator_context as context: |
|
176 | 175 | context.invalidate() |
|
177 | 176 | mime_type, feed = context.compute() |
|
178 | 177 | |
|
179 | 178 | response.content_type = mime_type |
|
180 | 179 | return feed |
@@ -1,1110 +1,1110 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Files controller for RhodeCode Enterprise |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import itertools |
|
26 | 26 | import logging |
|
27 | 27 | import os |
|
28 | 28 | import shutil |
|
29 | 29 | import tempfile |
|
30 | 30 | |
|
31 | 31 | from pylons import request, response, tmpl_context as c, url |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | from pylons.controllers.util import redirect |
|
34 | 34 | from webob.exc import HTTPNotFound, HTTPBadRequest |
|
35 | 35 | |
|
36 | 36 | from rhodecode.controllers.utils import parse_path_ref |
|
37 | 37 | from rhodecode.lib import diffs, helpers as h, caches |
|
38 | 38 | from rhodecode.lib import audit_logger |
|
39 | 39 | from rhodecode.lib.codeblocks import ( |
|
40 | 40 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) |
|
41 | 41 | from rhodecode.lib.utils import jsonify, action_logger |
|
42 | 42 | from rhodecode.lib.utils2 import ( |
|
43 | 43 | convert_line_endings, detect_mode, safe_str, str2bool) |
|
44 | 44 | from rhodecode.lib.auth import ( |
|
45 | 45 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired) |
|
46 | 46 | from rhodecode.lib.base import BaseRepoController, render |
|
47 | 47 | from rhodecode.lib.vcs import path as vcspath |
|
48 | 48 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
49 | 49 | from rhodecode.lib.vcs.conf import settings |
|
50 | 50 | from rhodecode.lib.vcs.exceptions import ( |
|
51 | 51 | RepositoryError, CommitDoesNotExistError, EmptyRepositoryError, |
|
52 | 52 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError, |
|
53 | 53 | NodeDoesNotExistError, CommitError, NodeError) |
|
54 | 54 | from rhodecode.lib.vcs.nodes import FileNode |
|
55 | 55 | |
|
56 | 56 | from rhodecode.model.repo import RepoModel |
|
57 | 57 | from rhodecode.model.scm import ScmModel |
|
58 | 58 | from rhodecode.model.db import Repository |
|
59 | 59 | |
|
60 | 60 | from rhodecode.controllers.changeset import ( |
|
61 | 61 | _ignorews_url, _context_url, get_line_ctx, get_ignore_ws) |
|
62 | 62 | from rhodecode.lib.exceptions import NonRelativePathError |
|
63 | 63 | |
|
64 | 64 | log = logging.getLogger(__name__) |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | class FilesController(BaseRepoController): |
|
68 | 68 | |
|
69 | 69 | def __before__(self): |
|
70 | 70 | super(FilesController, self).__before__() |
|
71 | 71 | c.cut_off_limit = self.cut_off_limit_file |
|
72 | 72 | |
|
73 | 73 | def _get_default_encoding(self): |
|
74 | 74 | enc_list = getattr(c, 'default_encodings', []) |
|
75 | 75 | return enc_list[0] if enc_list else 'UTF-8' |
|
76 | 76 | |
|
77 | 77 | def __get_commit_or_redirect(self, commit_id, repo_name, |
|
78 | 78 | redirect_after=True): |
|
79 | 79 | """ |
|
80 | 80 | This is a safe way to get commit. If an error occurs it redirects to |
|
81 | 81 | tip with proper message |
|
82 | 82 | |
|
83 | 83 | :param commit_id: id of commit to fetch |
|
84 | 84 | :param repo_name: repo name to redirect after |
|
85 | 85 | :param redirect_after: toggle redirection |
|
86 | 86 | """ |
|
87 | 87 | try: |
|
88 | 88 | return c.rhodecode_repo.get_commit(commit_id) |
|
89 | 89 | except EmptyRepositoryError: |
|
90 | 90 | if not redirect_after: |
|
91 | 91 | return None |
|
92 | 92 | url_ = url('files_add_home', |
|
93 | 93 | repo_name=c.repo_name, |
|
94 | 94 | revision=0, f_path='', anchor='edit') |
|
95 | 95 | if h.HasRepoPermissionAny( |
|
96 | 96 | 'repository.write', 'repository.admin')(c.repo_name): |
|
97 | 97 | add_new = h.link_to( |
|
98 | 98 | _('Click here to add a new file.'), |
|
99 | 99 | url_, class_="alert-link") |
|
100 | 100 | else: |
|
101 | 101 | add_new = "" |
|
102 | 102 | h.flash(h.literal( |
|
103 | 103 | _('There are no files yet. %s') % add_new), category='warning') |
|
104 |
redirect(h. |
|
|
104 | redirect(h.route_path('repo_summary', repo_name=repo_name)) | |
|
105 | 105 | except (CommitDoesNotExistError, LookupError): |
|
106 | 106 | msg = _('No such commit exists for this repository') |
|
107 | 107 | h.flash(msg, category='error') |
|
108 | 108 | raise HTTPNotFound() |
|
109 | 109 | except RepositoryError as e: |
|
110 | 110 | h.flash(safe_str(e), category='error') |
|
111 | 111 | raise HTTPNotFound() |
|
112 | 112 | |
|
113 | 113 | def __get_filenode_or_redirect(self, repo_name, commit, path): |
|
114 | 114 | """ |
|
115 | 115 | Returns file_node, if error occurs or given path is directory, |
|
116 | 116 | it'll redirect to top level path |
|
117 | 117 | |
|
118 | 118 | :param repo_name: repo_name |
|
119 | 119 | :param commit: given commit |
|
120 | 120 | :param path: path to lookup |
|
121 | 121 | """ |
|
122 | 122 | try: |
|
123 | 123 | file_node = commit.get_node(path) |
|
124 | 124 | if file_node.is_dir(): |
|
125 | 125 | raise RepositoryError('The given path is a directory') |
|
126 | 126 | except CommitDoesNotExistError: |
|
127 | 127 | msg = _('No such commit exists for this repository') |
|
128 | 128 | log.exception(msg) |
|
129 | 129 | h.flash(msg, category='error') |
|
130 | 130 | raise HTTPNotFound() |
|
131 | 131 | except RepositoryError as e: |
|
132 | 132 | h.flash(safe_str(e), category='error') |
|
133 | 133 | raise HTTPNotFound() |
|
134 | 134 | |
|
135 | 135 | return file_node |
|
136 | 136 | |
|
137 | 137 | def __get_tree_cache_manager(self, repo_name, namespace_type): |
|
138 | 138 | _namespace = caches.get_repo_namespace_key(namespace_type, repo_name) |
|
139 | 139 | return caches.get_cache_manager('repo_cache_long', _namespace) |
|
140 | 140 | |
|
141 | 141 | def _get_tree_at_commit(self, repo_name, commit_id, f_path, |
|
142 | 142 | full_load=False, force=False): |
|
143 | 143 | def _cached_tree(): |
|
144 | 144 | log.debug('Generating cached file tree for %s, %s, %s', |
|
145 | 145 | repo_name, commit_id, f_path) |
|
146 | 146 | c.full_load = full_load |
|
147 | 147 | return render('files/files_browser_tree.mako') |
|
148 | 148 | |
|
149 | 149 | cache_manager = self.__get_tree_cache_manager( |
|
150 | 150 | repo_name, caches.FILE_TREE) |
|
151 | 151 | |
|
152 | 152 | cache_key = caches.compute_key_from_params( |
|
153 | 153 | repo_name, commit_id, f_path) |
|
154 | 154 | |
|
155 | 155 | if force: |
|
156 | 156 | # we want to force recompute of caches |
|
157 | 157 | cache_manager.remove_value(cache_key) |
|
158 | 158 | |
|
159 | 159 | return cache_manager.get(cache_key, createfunc=_cached_tree) |
|
160 | 160 | |
|
161 | 161 | def _get_nodelist_at_commit(self, repo_name, commit_id, f_path): |
|
162 | 162 | def _cached_nodes(): |
|
163 | 163 | log.debug('Generating cached nodelist for %s, %s, %s', |
|
164 | 164 | repo_name, commit_id, f_path) |
|
165 | 165 | _d, _f = ScmModel().get_nodes( |
|
166 | 166 | repo_name, commit_id, f_path, flat=False) |
|
167 | 167 | return _d + _f |
|
168 | 168 | |
|
169 | 169 | cache_manager = self.__get_tree_cache_manager( |
|
170 | 170 | repo_name, caches.FILE_SEARCH_TREE_META) |
|
171 | 171 | |
|
172 | 172 | cache_key = caches.compute_key_from_params( |
|
173 | 173 | repo_name, commit_id, f_path) |
|
174 | 174 | return cache_manager.get(cache_key, createfunc=_cached_nodes) |
|
175 | 175 | |
|
176 | 176 | @LoginRequired() |
|
177 | 177 | @HasRepoPermissionAnyDecorator( |
|
178 | 178 | 'repository.read', 'repository.write', 'repository.admin') |
|
179 | 179 | def index( |
|
180 | 180 | self, repo_name, revision, f_path, annotate=False, rendered=False): |
|
181 | 181 | commit_id = revision |
|
182 | 182 | |
|
183 | 183 | # redirect to given commit_id from form if given |
|
184 | 184 | get_commit_id = request.GET.get('at_rev', None) |
|
185 | 185 | if get_commit_id: |
|
186 | 186 | self.__get_commit_or_redirect(get_commit_id, repo_name) |
|
187 | 187 | |
|
188 | 188 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
189 | 189 | c.branch = request.GET.get('branch', None) |
|
190 | 190 | c.f_path = f_path |
|
191 | 191 | c.annotate = annotate |
|
192 | 192 | # default is false, but .rst/.md files later are autorendered, we can |
|
193 | 193 | # overwrite autorendering by setting this GET flag |
|
194 | 194 | c.renderer = rendered or not request.GET.get('no-render', False) |
|
195 | 195 | |
|
196 | 196 | # prev link |
|
197 | 197 | try: |
|
198 | 198 | prev_commit = c.commit.prev(c.branch) |
|
199 | 199 | c.prev_commit = prev_commit |
|
200 | 200 | c.url_prev = url('files_home', repo_name=c.repo_name, |
|
201 | 201 | revision=prev_commit.raw_id, f_path=f_path) |
|
202 | 202 | if c.branch: |
|
203 | 203 | c.url_prev += '?branch=%s' % c.branch |
|
204 | 204 | except (CommitDoesNotExistError, VCSError): |
|
205 | 205 | c.url_prev = '#' |
|
206 | 206 | c.prev_commit = EmptyCommit() |
|
207 | 207 | |
|
208 | 208 | # next link |
|
209 | 209 | try: |
|
210 | 210 | next_commit = c.commit.next(c.branch) |
|
211 | 211 | c.next_commit = next_commit |
|
212 | 212 | c.url_next = url('files_home', repo_name=c.repo_name, |
|
213 | 213 | revision=next_commit.raw_id, f_path=f_path) |
|
214 | 214 | if c.branch: |
|
215 | 215 | c.url_next += '?branch=%s' % c.branch |
|
216 | 216 | except (CommitDoesNotExistError, VCSError): |
|
217 | 217 | c.url_next = '#' |
|
218 | 218 | c.next_commit = EmptyCommit() |
|
219 | 219 | |
|
220 | 220 | # files or dirs |
|
221 | 221 | try: |
|
222 | 222 | c.file = c.commit.get_node(f_path) |
|
223 | 223 | c.file_author = True |
|
224 | 224 | c.file_tree = '' |
|
225 | 225 | if c.file.is_file(): |
|
226 | 226 | c.lf_node = c.file.get_largefile_node() |
|
227 | 227 | |
|
228 | 228 | c.file_source_page = 'true' |
|
229 | 229 | c.file_last_commit = c.file.last_commit |
|
230 | 230 | if c.file.size < self.cut_off_limit_file: |
|
231 | 231 | if c.annotate: # annotation has precedence over renderer |
|
232 | 232 | c.annotated_lines = filenode_as_annotated_lines_tokens( |
|
233 | 233 | c.file |
|
234 | 234 | ) |
|
235 | 235 | else: |
|
236 | 236 | c.renderer = ( |
|
237 | 237 | c.renderer and h.renderer_from_filename(c.file.path) |
|
238 | 238 | ) |
|
239 | 239 | if not c.renderer: |
|
240 | 240 | c.lines = filenode_as_lines_tokens(c.file) |
|
241 | 241 | |
|
242 | 242 | c.on_branch_head = self._is_valid_head( |
|
243 | 243 | commit_id, c.rhodecode_repo) |
|
244 | 244 | |
|
245 | 245 | branch = c.commit.branch if ( |
|
246 | 246 | c.commit.branch and '/' not in c.commit.branch) else None |
|
247 | 247 | c.branch_or_raw_id = branch or c.commit.raw_id |
|
248 | 248 | c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id) |
|
249 | 249 | |
|
250 | 250 | author = c.file_last_commit.author |
|
251 | 251 | c.authors = [(h.email(author), |
|
252 | 252 | h.person(author, 'username_or_name_or_email'))] |
|
253 | 253 | else: |
|
254 | 254 | c.file_source_page = 'false' |
|
255 | 255 | c.authors = [] |
|
256 | 256 | c.file_tree = self._get_tree_at_commit( |
|
257 | 257 | repo_name, c.commit.raw_id, f_path) |
|
258 | 258 | |
|
259 | 259 | except RepositoryError as e: |
|
260 | 260 | h.flash(safe_str(e), category='error') |
|
261 | 261 | raise HTTPNotFound() |
|
262 | 262 | |
|
263 | 263 | if request.environ.get('HTTP_X_PJAX'): |
|
264 | 264 | return render('files/files_pjax.mako') |
|
265 | 265 | |
|
266 | 266 | return render('files/files.mako') |
|
267 | 267 | |
|
268 | 268 | @LoginRequired() |
|
269 | 269 | @HasRepoPermissionAnyDecorator( |
|
270 | 270 | 'repository.read', 'repository.write', 'repository.admin') |
|
271 | 271 | def annotate_previous(self, repo_name, revision, f_path): |
|
272 | 272 | |
|
273 | 273 | commit_id = revision |
|
274 | 274 | commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
275 | 275 | prev_commit_id = commit.raw_id |
|
276 | 276 | |
|
277 | 277 | f_path = f_path |
|
278 | 278 | is_file = False |
|
279 | 279 | try: |
|
280 | 280 | _file = commit.get_node(f_path) |
|
281 | 281 | is_file = _file.is_file() |
|
282 | 282 | except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError): |
|
283 | 283 | pass |
|
284 | 284 | |
|
285 | 285 | if is_file: |
|
286 | 286 | history = commit.get_file_history(f_path) |
|
287 | 287 | prev_commit_id = history[1].raw_id \ |
|
288 | 288 | if len(history) > 1 else prev_commit_id |
|
289 | 289 | |
|
290 | 290 | return redirect(h.url( |
|
291 | 291 | 'files_annotate_home', repo_name=repo_name, |
|
292 | 292 | revision=prev_commit_id, f_path=f_path)) |
|
293 | 293 | |
|
294 | 294 | @LoginRequired() |
|
295 | 295 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
296 | 296 | 'repository.admin') |
|
297 | 297 | @jsonify |
|
298 | 298 | def history(self, repo_name, revision, f_path): |
|
299 | 299 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
300 | 300 | f_path = f_path |
|
301 | 301 | _file = commit.get_node(f_path) |
|
302 | 302 | if _file.is_file(): |
|
303 | 303 | file_history, _hist = self._get_node_history(commit, f_path) |
|
304 | 304 | |
|
305 | 305 | res = [] |
|
306 | 306 | for obj in file_history: |
|
307 | 307 | res.append({ |
|
308 | 308 | 'text': obj[1], |
|
309 | 309 | 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]] |
|
310 | 310 | }) |
|
311 | 311 | |
|
312 | 312 | data = { |
|
313 | 313 | 'more': False, |
|
314 | 314 | 'results': res |
|
315 | 315 | } |
|
316 | 316 | return data |
|
317 | 317 | |
|
318 | 318 | @LoginRequired() |
|
319 | 319 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
320 | 320 | 'repository.admin') |
|
321 | 321 | def authors(self, repo_name, revision, f_path): |
|
322 | 322 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
323 | 323 | file_node = commit.get_node(f_path) |
|
324 | 324 | if file_node.is_file(): |
|
325 | 325 | c.file_last_commit = file_node.last_commit |
|
326 | 326 | if request.GET.get('annotate') == '1': |
|
327 | 327 | # use _hist from annotation if annotation mode is on |
|
328 | 328 | commit_ids = set(x[1] for x in file_node.annotate) |
|
329 | 329 | _hist = ( |
|
330 | 330 | c.rhodecode_repo.get_commit(commit_id) |
|
331 | 331 | for commit_id in commit_ids) |
|
332 | 332 | else: |
|
333 | 333 | _f_history, _hist = self._get_node_history(commit, f_path) |
|
334 | 334 | c.file_author = False |
|
335 | 335 | c.authors = [] |
|
336 | 336 | for author in set(commit.author for commit in _hist): |
|
337 | 337 | c.authors.append(( |
|
338 | 338 | h.email(author), |
|
339 | 339 | h.person(author, 'username_or_name_or_email'))) |
|
340 | 340 | return render('files/file_authors_box.mako') |
|
341 | 341 | |
|
342 | 342 | @LoginRequired() |
|
343 | 343 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
344 | 344 | 'repository.admin') |
|
345 | 345 | def rawfile(self, repo_name, revision, f_path): |
|
346 | 346 | """ |
|
347 | 347 | Action for download as raw |
|
348 | 348 | """ |
|
349 | 349 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
350 | 350 | file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path) |
|
351 | 351 | |
|
352 | 352 | if request.GET.get('lf'): |
|
353 | 353 | # only if lf get flag is passed, we download this file |
|
354 | 354 | # as LFS/Largefile |
|
355 | 355 | lf_node = file_node.get_largefile_node() |
|
356 | 356 | if lf_node: |
|
357 | 357 | # overwrite our pointer with the REAL large-file |
|
358 | 358 | file_node = lf_node |
|
359 | 359 | |
|
360 | 360 | response.content_disposition = 'attachment; filename=%s' % \ |
|
361 | 361 | safe_str(f_path.split(Repository.NAME_SEP)[-1]) |
|
362 | 362 | |
|
363 | 363 | response.content_type = file_node.mimetype |
|
364 | 364 | charset = self._get_default_encoding() |
|
365 | 365 | if charset: |
|
366 | 366 | response.charset = charset |
|
367 | 367 | |
|
368 | 368 | return file_node.content |
|
369 | 369 | |
|
370 | 370 | @LoginRequired() |
|
371 | 371 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
372 | 372 | 'repository.admin') |
|
373 | 373 | def raw(self, repo_name, revision, f_path): |
|
374 | 374 | """ |
|
375 | 375 | Action for show as raw, some mimetypes are "rendered", |
|
376 | 376 | those include images, icons. |
|
377 | 377 | """ |
|
378 | 378 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
379 | 379 | file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path) |
|
380 | 380 | |
|
381 | 381 | raw_mimetype_mapping = { |
|
382 | 382 | # map original mimetype to a mimetype used for "show as raw" |
|
383 | 383 | # you can also provide a content-disposition to override the |
|
384 | 384 | # default "attachment" disposition. |
|
385 | 385 | # orig_type: (new_type, new_dispo) |
|
386 | 386 | |
|
387 | 387 | # show images inline: |
|
388 | 388 | # Do not re-add SVG: it is unsafe and permits XSS attacks. One can |
|
389 | 389 | # for example render an SVG with javascript inside or even render |
|
390 | 390 | # HTML. |
|
391 | 391 | 'image/x-icon': ('image/x-icon', 'inline'), |
|
392 | 392 | 'image/png': ('image/png', 'inline'), |
|
393 | 393 | 'image/gif': ('image/gif', 'inline'), |
|
394 | 394 | 'image/jpeg': ('image/jpeg', 'inline'), |
|
395 | 395 | 'application/pdf': ('application/pdf', 'inline'), |
|
396 | 396 | } |
|
397 | 397 | |
|
398 | 398 | mimetype = file_node.mimetype |
|
399 | 399 | try: |
|
400 | 400 | mimetype, dispo = raw_mimetype_mapping[mimetype] |
|
401 | 401 | except KeyError: |
|
402 | 402 | # we don't know anything special about this, handle it safely |
|
403 | 403 | if file_node.is_binary: |
|
404 | 404 | # do same as download raw for binary files |
|
405 | 405 | mimetype, dispo = 'application/octet-stream', 'attachment' |
|
406 | 406 | else: |
|
407 | 407 | # do not just use the original mimetype, but force text/plain, |
|
408 | 408 | # otherwise it would serve text/html and that might be unsafe. |
|
409 | 409 | # Note: underlying vcs library fakes text/plain mimetype if the |
|
410 | 410 | # mimetype can not be determined and it thinks it is not |
|
411 | 411 | # binary.This might lead to erroneous text display in some |
|
412 | 412 | # cases, but helps in other cases, like with text files |
|
413 | 413 | # without extension. |
|
414 | 414 | mimetype, dispo = 'text/plain', 'inline' |
|
415 | 415 | |
|
416 | 416 | if dispo == 'attachment': |
|
417 | 417 | dispo = 'attachment; filename=%s' % safe_str( |
|
418 | 418 | f_path.split(os.sep)[-1]) |
|
419 | 419 | |
|
420 | 420 | response.content_disposition = dispo |
|
421 | 421 | response.content_type = mimetype |
|
422 | 422 | charset = self._get_default_encoding() |
|
423 | 423 | if charset: |
|
424 | 424 | response.charset = charset |
|
425 | 425 | return file_node.content |
|
426 | 426 | |
|
427 | 427 | @CSRFRequired() |
|
428 | 428 | @LoginRequired() |
|
429 | 429 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
430 | 430 | def delete(self, repo_name, revision, f_path): |
|
431 | 431 | commit_id = revision |
|
432 | 432 | |
|
433 | 433 | repo = c.rhodecode_db_repo |
|
434 | 434 | if repo.enable_locking and repo.locked[0]: |
|
435 | 435 | h.flash(_('This repository has been locked by %s on %s') |
|
436 | 436 | % (h.person_by_id(repo.locked[0]), |
|
437 | 437 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
438 | 438 | 'warning') |
|
439 | 439 | return redirect(h.url('files_home', |
|
440 | 440 | repo_name=repo_name, revision='tip')) |
|
441 | 441 | |
|
442 | 442 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
443 | 443 | h.flash(_('You can only delete files with revision ' |
|
444 | 444 | 'being a valid branch '), category='warning') |
|
445 | 445 | return redirect(h.url('files_home', |
|
446 | 446 | repo_name=repo_name, revision='tip', |
|
447 | 447 | f_path=f_path)) |
|
448 | 448 | |
|
449 | 449 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
450 | 450 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
451 | 451 | |
|
452 | 452 | c.default_message = _( |
|
453 | 453 | 'Deleted file %s via RhodeCode Enterprise') % (f_path) |
|
454 | 454 | c.f_path = f_path |
|
455 | 455 | node_path = f_path |
|
456 | 456 | author = c.rhodecode_user.full_contact |
|
457 | 457 | message = request.POST.get('message') or c.default_message |
|
458 | 458 | try: |
|
459 | 459 | nodes = { |
|
460 | 460 | node_path: { |
|
461 | 461 | 'content': '' |
|
462 | 462 | } |
|
463 | 463 | } |
|
464 | 464 | self.scm_model.delete_nodes( |
|
465 | 465 | user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo, |
|
466 | 466 | message=message, |
|
467 | 467 | nodes=nodes, |
|
468 | 468 | parent_commit=c.commit, |
|
469 | 469 | author=author, |
|
470 | 470 | ) |
|
471 | 471 | |
|
472 | 472 | h.flash(_('Successfully deleted file %s') % f_path, |
|
473 | 473 | category='success') |
|
474 | 474 | except Exception: |
|
475 | 475 | msg = _('Error occurred during commit') |
|
476 | 476 | log.exception(msg) |
|
477 | 477 | h.flash(msg, category='error') |
|
478 | 478 | return redirect(url('changeset_home', |
|
479 | 479 | repo_name=c.repo_name, revision='tip')) |
|
480 | 480 | |
|
481 | 481 | @LoginRequired() |
|
482 | 482 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
483 | 483 | def delete_home(self, repo_name, revision, f_path): |
|
484 | 484 | commit_id = revision |
|
485 | 485 | |
|
486 | 486 | repo = c.rhodecode_db_repo |
|
487 | 487 | if repo.enable_locking and repo.locked[0]: |
|
488 | 488 | h.flash(_('This repository has been locked by %s on %s') |
|
489 | 489 | % (h.person_by_id(repo.locked[0]), |
|
490 | 490 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
491 | 491 | 'warning') |
|
492 | 492 | return redirect(h.url('files_home', |
|
493 | 493 | repo_name=repo_name, revision='tip')) |
|
494 | 494 | |
|
495 | 495 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
496 | 496 | h.flash(_('You can only delete files with revision ' |
|
497 | 497 | 'being a valid branch '), category='warning') |
|
498 | 498 | return redirect(h.url('files_home', |
|
499 | 499 | repo_name=repo_name, revision='tip', |
|
500 | 500 | f_path=f_path)) |
|
501 | 501 | |
|
502 | 502 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
503 | 503 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
504 | 504 | |
|
505 | 505 | c.default_message = _( |
|
506 | 506 | 'Deleted file %s via RhodeCode Enterprise') % (f_path) |
|
507 | 507 | c.f_path = f_path |
|
508 | 508 | |
|
509 | 509 | return render('files/files_delete.mako') |
|
510 | 510 | |
|
511 | 511 | @CSRFRequired() |
|
512 | 512 | @LoginRequired() |
|
513 | 513 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
514 | 514 | def edit(self, repo_name, revision, f_path): |
|
515 | 515 | commit_id = revision |
|
516 | 516 | |
|
517 | 517 | repo = c.rhodecode_db_repo |
|
518 | 518 | if repo.enable_locking and repo.locked[0]: |
|
519 | 519 | h.flash(_('This repository has been locked by %s on %s') |
|
520 | 520 | % (h.person_by_id(repo.locked[0]), |
|
521 | 521 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
522 | 522 | 'warning') |
|
523 | 523 | return redirect(h.url('files_home', |
|
524 | 524 | repo_name=repo_name, revision='tip')) |
|
525 | 525 | |
|
526 | 526 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
527 | 527 | h.flash(_('You can only edit files with revision ' |
|
528 | 528 | 'being a valid branch '), category='warning') |
|
529 | 529 | return redirect(h.url('files_home', |
|
530 | 530 | repo_name=repo_name, revision='tip', |
|
531 | 531 | f_path=f_path)) |
|
532 | 532 | |
|
533 | 533 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
534 | 534 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
535 | 535 | |
|
536 | 536 | if c.file.is_binary: |
|
537 | 537 | return redirect(url('files_home', repo_name=c.repo_name, |
|
538 | 538 | revision=c.commit.raw_id, f_path=f_path)) |
|
539 | 539 | c.default_message = _( |
|
540 | 540 | 'Edited file %s via RhodeCode Enterprise') % (f_path) |
|
541 | 541 | c.f_path = f_path |
|
542 | 542 | old_content = c.file.content |
|
543 | 543 | sl = old_content.splitlines(1) |
|
544 | 544 | first_line = sl[0] if sl else '' |
|
545 | 545 | |
|
546 | 546 | # modes: 0 - Unix, 1 - Mac, 2 - DOS |
|
547 | 547 | mode = detect_mode(first_line, 0) |
|
548 | 548 | content = convert_line_endings(request.POST.get('content', ''), mode) |
|
549 | 549 | |
|
550 | 550 | message = request.POST.get('message') or c.default_message |
|
551 | 551 | org_f_path = c.file.unicode_path |
|
552 | 552 | filename = request.POST['filename'] |
|
553 | 553 | org_filename = c.file.name |
|
554 | 554 | |
|
555 | 555 | if content == old_content and filename == org_filename: |
|
556 | 556 | h.flash(_('No changes'), category='warning') |
|
557 | 557 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
558 | 558 | revision='tip')) |
|
559 | 559 | try: |
|
560 | 560 | mapping = { |
|
561 | 561 | org_f_path: { |
|
562 | 562 | 'org_filename': org_f_path, |
|
563 | 563 | 'filename': os.path.join(c.file.dir_path, filename), |
|
564 | 564 | 'content': content, |
|
565 | 565 | 'lexer': '', |
|
566 | 566 | 'op': 'mod', |
|
567 | 567 | } |
|
568 | 568 | } |
|
569 | 569 | |
|
570 | 570 | ScmModel().update_nodes( |
|
571 | 571 | user=c.rhodecode_user.user_id, |
|
572 | 572 | repo=c.rhodecode_db_repo, |
|
573 | 573 | message=message, |
|
574 | 574 | nodes=mapping, |
|
575 | 575 | parent_commit=c.commit, |
|
576 | 576 | ) |
|
577 | 577 | |
|
578 | 578 | h.flash(_('Successfully committed to %s') % f_path, |
|
579 | 579 | category='success') |
|
580 | 580 | except Exception: |
|
581 | 581 | msg = _('Error occurred during commit') |
|
582 | 582 | log.exception(msg) |
|
583 | 583 | h.flash(msg, category='error') |
|
584 | 584 | return redirect(url('changeset_home', |
|
585 | 585 | repo_name=c.repo_name, revision='tip')) |
|
586 | 586 | |
|
587 | 587 | @LoginRequired() |
|
588 | 588 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
589 | 589 | def edit_home(self, repo_name, revision, f_path): |
|
590 | 590 | commit_id = revision |
|
591 | 591 | |
|
592 | 592 | repo = c.rhodecode_db_repo |
|
593 | 593 | if repo.enable_locking and repo.locked[0]: |
|
594 | 594 | h.flash(_('This repository has been locked by %s on %s') |
|
595 | 595 | % (h.person_by_id(repo.locked[0]), |
|
596 | 596 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
597 | 597 | 'warning') |
|
598 | 598 | return redirect(h.url('files_home', |
|
599 | 599 | repo_name=repo_name, revision='tip')) |
|
600 | 600 | |
|
601 | 601 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
602 | 602 | h.flash(_('You can only edit files with revision ' |
|
603 | 603 | 'being a valid branch '), category='warning') |
|
604 | 604 | return redirect(h.url('files_home', |
|
605 | 605 | repo_name=repo_name, revision='tip', |
|
606 | 606 | f_path=f_path)) |
|
607 | 607 | |
|
608 | 608 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
609 | 609 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
610 | 610 | |
|
611 | 611 | if c.file.is_binary: |
|
612 | 612 | return redirect(url('files_home', repo_name=c.repo_name, |
|
613 | 613 | revision=c.commit.raw_id, f_path=f_path)) |
|
614 | 614 | c.default_message = _( |
|
615 | 615 | 'Edited file %s via RhodeCode Enterprise') % (f_path) |
|
616 | 616 | c.f_path = f_path |
|
617 | 617 | |
|
618 | 618 | return render('files/files_edit.mako') |
|
619 | 619 | |
|
620 | 620 | def _is_valid_head(self, commit_id, repo): |
|
621 | 621 | # check if commit is a branch identifier- basically we cannot |
|
622 | 622 | # create multiple heads via file editing |
|
623 | 623 | valid_heads = repo.branches.keys() + repo.branches.values() |
|
624 | 624 | |
|
625 | 625 | if h.is_svn(repo) and not repo.is_empty(): |
|
626 | 626 | # Note: Subversion only has one head, we add it here in case there |
|
627 | 627 | # is no branch matched. |
|
628 | 628 | valid_heads.append(repo.get_commit(commit_idx=-1).raw_id) |
|
629 | 629 | |
|
630 | 630 | # check if commit is a branch name or branch hash |
|
631 | 631 | return commit_id in valid_heads |
|
632 | 632 | |
|
633 | 633 | @CSRFRequired() |
|
634 | 634 | @LoginRequired() |
|
635 | 635 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
636 | 636 | def add(self, repo_name, revision, f_path): |
|
637 | 637 | repo = Repository.get_by_repo_name(repo_name) |
|
638 | 638 | if repo.enable_locking and repo.locked[0]: |
|
639 | 639 | h.flash(_('This repository has been locked by %s on %s') |
|
640 | 640 | % (h.person_by_id(repo.locked[0]), |
|
641 | 641 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
642 | 642 | 'warning') |
|
643 | 643 | return redirect(h.url('files_home', |
|
644 | 644 | repo_name=repo_name, revision='tip')) |
|
645 | 645 | |
|
646 | 646 | r_post = request.POST |
|
647 | 647 | |
|
648 | 648 | c.commit = self.__get_commit_or_redirect( |
|
649 | 649 | revision, repo_name, redirect_after=False) |
|
650 | 650 | if c.commit is None: |
|
651 | 651 | c.commit = EmptyCommit(alias=c.rhodecode_repo.alias) |
|
652 | 652 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
653 | 653 | c.f_path = f_path |
|
654 | 654 | unix_mode = 0 |
|
655 | 655 | content = convert_line_endings(r_post.get('content', ''), unix_mode) |
|
656 | 656 | |
|
657 | 657 | message = r_post.get('message') or c.default_message |
|
658 | 658 | filename = r_post.get('filename') |
|
659 | 659 | location = r_post.get('location', '') # dir location |
|
660 | 660 | file_obj = r_post.get('upload_file', None) |
|
661 | 661 | |
|
662 | 662 | if file_obj is not None and hasattr(file_obj, 'filename'): |
|
663 | 663 | filename = r_post.get('filename_upload') |
|
664 | 664 | content = file_obj.file |
|
665 | 665 | |
|
666 | 666 | if hasattr(content, 'file'): |
|
667 | 667 | # non posix systems store real file under file attr |
|
668 | 668 | content = content.file |
|
669 | 669 | |
|
670 | 670 | # If there's no commit, redirect to repo summary |
|
671 | 671 | if type(c.commit) is EmptyCommit: |
|
672 |
redirect_url = |
|
|
672 | redirect_url = h.route_path('repo_summary', repo_name=c.repo_name) | |
|
673 | 673 | else: |
|
674 | redirect_url = "changeset_home" | |
|
674 | redirect_url = url("changeset_home", repo_name=c.repo_name, | |
|
675 | revision='tip') | |
|
675 | 676 | |
|
676 | 677 | if not filename: |
|
677 | 678 | h.flash(_('No filename'), category='warning') |
|
678 |
return redirect( |
|
|
679 | revision='tip')) | |
|
679 | return redirect(redirect_url) | |
|
680 | 680 | |
|
681 | 681 | # extract the location from filename, |
|
682 | 682 | # allows using foo/bar.txt syntax to create subdirectories |
|
683 | 683 | subdir_loc = filename.rsplit('/', 1) |
|
684 | 684 | if len(subdir_loc) == 2: |
|
685 | 685 | location = os.path.join(location, subdir_loc[0]) |
|
686 | 686 | |
|
687 | 687 | # strip all crap out of file, just leave the basename |
|
688 | 688 | filename = os.path.basename(filename) |
|
689 | 689 | node_path = os.path.join(location, filename) |
|
690 | 690 | author = c.rhodecode_user.full_contact |
|
691 | 691 | |
|
692 | 692 | try: |
|
693 | 693 | nodes = { |
|
694 | 694 | node_path: { |
|
695 | 695 | 'content': content |
|
696 | 696 | } |
|
697 | 697 | } |
|
698 | 698 | self.scm_model.create_nodes( |
|
699 | 699 | user=c.rhodecode_user.user_id, |
|
700 | 700 | repo=c.rhodecode_db_repo, |
|
701 | 701 | message=message, |
|
702 | 702 | nodes=nodes, |
|
703 | 703 | parent_commit=c.commit, |
|
704 | 704 | author=author, |
|
705 | 705 | ) |
|
706 | 706 | |
|
707 | 707 | h.flash(_('Successfully committed to %s') % node_path, |
|
708 | 708 | category='success') |
|
709 | 709 | except NonRelativePathError as e: |
|
710 | 710 | h.flash(_( |
|
711 | 711 | 'The location specified must be a relative path and must not ' |
|
712 | 712 | 'contain .. in the path'), category='warning') |
|
713 | 713 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
714 | 714 | revision='tip')) |
|
715 | 715 | except (NodeError, NodeAlreadyExistsError) as e: |
|
716 | 716 | h.flash(_(e), category='error') |
|
717 | 717 | except Exception: |
|
718 | 718 | msg = _('Error occurred during commit') |
|
719 | 719 | log.exception(msg) |
|
720 | 720 | h.flash(msg, category='error') |
|
721 | 721 | return redirect(url('changeset_home', |
|
722 | 722 | repo_name=c.repo_name, revision='tip')) |
|
723 | 723 | |
|
724 | 724 | @LoginRequired() |
|
725 | 725 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
726 | 726 | def add_home(self, repo_name, revision, f_path): |
|
727 | 727 | |
|
728 | 728 | repo = Repository.get_by_repo_name(repo_name) |
|
729 | 729 | if repo.enable_locking and repo.locked[0]: |
|
730 | 730 | h.flash(_('This repository has been locked by %s on %s') |
|
731 | 731 | % (h.person_by_id(repo.locked[0]), |
|
732 | 732 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
733 | 733 | 'warning') |
|
734 | 734 | return redirect(h.url('files_home', |
|
735 | 735 | repo_name=repo_name, revision='tip')) |
|
736 | 736 | |
|
737 | 737 | c.commit = self.__get_commit_or_redirect( |
|
738 | 738 | revision, repo_name, redirect_after=False) |
|
739 | 739 | if c.commit is None: |
|
740 | 740 | c.commit = EmptyCommit(alias=c.rhodecode_repo.alias) |
|
741 | 741 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
742 | 742 | c.f_path = f_path |
|
743 | 743 | |
|
744 | 744 | return render('files/files_add.mako') |
|
745 | 745 | |
|
746 | 746 | @LoginRequired() |
|
747 | 747 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
748 | 748 | 'repository.admin') |
|
749 | 749 | def archivefile(self, repo_name, fname): |
|
750 | 750 | fileformat = None |
|
751 | 751 | commit_id = None |
|
752 | 752 | ext = None |
|
753 | 753 | subrepos = request.GET.get('subrepos') == 'true' |
|
754 | 754 | |
|
755 | 755 | for a_type, ext_data in settings.ARCHIVE_SPECS.items(): |
|
756 | 756 | archive_spec = fname.split(ext_data[1]) |
|
757 | 757 | if len(archive_spec) == 2 and archive_spec[1] == '': |
|
758 | 758 | fileformat = a_type or ext_data[1] |
|
759 | 759 | commit_id = archive_spec[0] |
|
760 | 760 | ext = ext_data[1] |
|
761 | 761 | |
|
762 | 762 | dbrepo = RepoModel().get_by_repo_name(repo_name) |
|
763 | 763 | if not dbrepo.enable_downloads: |
|
764 | 764 | return _('Downloads disabled') |
|
765 | 765 | |
|
766 | 766 | try: |
|
767 | 767 | commit = c.rhodecode_repo.get_commit(commit_id) |
|
768 | 768 | content_type = settings.ARCHIVE_SPECS[fileformat][0] |
|
769 | 769 | except CommitDoesNotExistError: |
|
770 | 770 | return _('Unknown revision %s') % commit_id |
|
771 | 771 | except EmptyRepositoryError: |
|
772 | 772 | return _('Empty repository') |
|
773 | 773 | except KeyError: |
|
774 | 774 | return _('Unknown archive type') |
|
775 | 775 | |
|
776 | 776 | # archive cache |
|
777 | 777 | from rhodecode import CONFIG |
|
778 | 778 | |
|
779 | 779 | archive_name = '%s-%s%s%s' % ( |
|
780 | 780 | safe_str(repo_name.replace('/', '_')), |
|
781 | 781 | '-sub' if subrepos else '', |
|
782 | 782 | safe_str(commit.short_id), ext) |
|
783 | 783 | |
|
784 | 784 | use_cached_archive = False |
|
785 | 785 | archive_cache_enabled = CONFIG.get( |
|
786 | 786 | 'archive_cache_dir') and not request.GET.get('no_cache') |
|
787 | 787 | |
|
788 | 788 | if archive_cache_enabled: |
|
789 | 789 | # check if we it's ok to write |
|
790 | 790 | if not os.path.isdir(CONFIG['archive_cache_dir']): |
|
791 | 791 | os.makedirs(CONFIG['archive_cache_dir']) |
|
792 | 792 | cached_archive_path = os.path.join( |
|
793 | 793 | CONFIG['archive_cache_dir'], archive_name) |
|
794 | 794 | if os.path.isfile(cached_archive_path): |
|
795 | 795 | log.debug('Found cached archive in %s', cached_archive_path) |
|
796 | 796 | fd, archive = None, cached_archive_path |
|
797 | 797 | use_cached_archive = True |
|
798 | 798 | else: |
|
799 | 799 | log.debug('Archive %s is not yet cached', archive_name) |
|
800 | 800 | |
|
801 | 801 | if not use_cached_archive: |
|
802 | 802 | # generate new archive |
|
803 | 803 | fd, archive = tempfile.mkstemp() |
|
804 | 804 | log.debug('Creating new temp archive in %s' % (archive,)) |
|
805 | 805 | try: |
|
806 | 806 | commit.archive_repo(archive, kind=fileformat, subrepos=subrepos) |
|
807 | 807 | except ImproperArchiveTypeError: |
|
808 | 808 | return _('Unknown archive type') |
|
809 | 809 | if archive_cache_enabled: |
|
810 | 810 | # if we generated the archive and we have cache enabled |
|
811 | 811 | # let's use this for future |
|
812 | 812 | log.debug('Storing new archive in %s' % (cached_archive_path,)) |
|
813 | 813 | shutil.move(archive, cached_archive_path) |
|
814 | 814 | archive = cached_archive_path |
|
815 | 815 | |
|
816 | 816 | # store download action |
|
817 | 817 | audit_logger.store( |
|
818 | 818 | action='repo.archive.download', |
|
819 | 819 | action_data={'user_agent': request.user_agent, |
|
820 | 820 | 'archive_name': archive_name, |
|
821 | 821 | 'archive_spec': fname, |
|
822 | 822 | 'archive_cached': use_cached_archive}, |
|
823 | 823 | user=c.rhodecode_user, |
|
824 | 824 | repo=dbrepo, |
|
825 | 825 | commit=True |
|
826 | 826 | ) |
|
827 | 827 | |
|
828 | 828 | response.content_disposition = str( |
|
829 | 829 | 'attachment; filename=%s' % archive_name) |
|
830 | 830 | response.content_type = str(content_type) |
|
831 | 831 | |
|
832 | 832 | def get_chunked_archive(archive): |
|
833 | 833 | with open(archive, 'rb') as stream: |
|
834 | 834 | while True: |
|
835 | 835 | data = stream.read(16 * 1024) |
|
836 | 836 | if not data: |
|
837 | 837 | if fd: # fd means we used temporary file |
|
838 | 838 | os.close(fd) |
|
839 | 839 | if not archive_cache_enabled: |
|
840 | 840 | log.debug('Destroying temp archive %s', archive) |
|
841 | 841 | os.remove(archive) |
|
842 | 842 | break |
|
843 | 843 | yield data |
|
844 | 844 | |
|
845 | 845 | return get_chunked_archive(archive) |
|
846 | 846 | |
|
847 | 847 | @LoginRequired() |
|
848 | 848 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
849 | 849 | 'repository.admin') |
|
850 | 850 | def diff(self, repo_name, f_path): |
|
851 | 851 | |
|
852 | 852 | c.action = request.GET.get('diff') |
|
853 | 853 | diff1 = request.GET.get('diff1', '') |
|
854 | 854 | diff2 = request.GET.get('diff2', '') |
|
855 | 855 | |
|
856 | 856 | path1, diff1 = parse_path_ref(diff1, default_path=f_path) |
|
857 | 857 | |
|
858 | 858 | ignore_whitespace = str2bool(request.GET.get('ignorews')) |
|
859 | 859 | line_context = request.GET.get('context', 3) |
|
860 | 860 | |
|
861 | 861 | if not any((diff1, diff2)): |
|
862 | 862 | h.flash( |
|
863 | 863 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
864 | 864 | category='error') |
|
865 | 865 | raise HTTPBadRequest() |
|
866 | 866 | |
|
867 | 867 | if c.action not in ['download', 'raw']: |
|
868 | 868 | # redirect to new view if we render diff |
|
869 | 869 | return redirect( |
|
870 | 870 | url('compare_url', repo_name=repo_name, |
|
871 | 871 | source_ref_type='rev', |
|
872 | 872 | source_ref=diff1, |
|
873 | 873 | target_repo=c.repo_name, |
|
874 | 874 | target_ref_type='rev', |
|
875 | 875 | target_ref=diff2, |
|
876 | 876 | f_path=f_path)) |
|
877 | 877 | |
|
878 | 878 | try: |
|
879 | 879 | node1 = self._get_file_node(diff1, path1) |
|
880 | 880 | node2 = self._get_file_node(diff2, f_path) |
|
881 | 881 | except (RepositoryError, NodeError): |
|
882 | 882 | log.exception("Exception while trying to get node from repository") |
|
883 | 883 | return redirect(url( |
|
884 | 884 | 'files_home', repo_name=c.repo_name, f_path=f_path)) |
|
885 | 885 | |
|
886 | 886 | if all(isinstance(node.commit, EmptyCommit) |
|
887 | 887 | for node in (node1, node2)): |
|
888 | 888 | raise HTTPNotFound |
|
889 | 889 | |
|
890 | 890 | c.commit_1 = node1.commit |
|
891 | 891 | c.commit_2 = node2.commit |
|
892 | 892 | |
|
893 | 893 | if c.action == 'download': |
|
894 | 894 | _diff = diffs.get_gitdiff(node1, node2, |
|
895 | 895 | ignore_whitespace=ignore_whitespace, |
|
896 | 896 | context=line_context) |
|
897 | 897 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
898 | 898 | |
|
899 | 899 | diff_name = '%s_vs_%s.diff' % (diff1, diff2) |
|
900 | 900 | response.content_type = 'text/plain' |
|
901 | 901 | response.content_disposition = ( |
|
902 | 902 | 'attachment; filename=%s' % (diff_name,) |
|
903 | 903 | ) |
|
904 | 904 | charset = self._get_default_encoding() |
|
905 | 905 | if charset: |
|
906 | 906 | response.charset = charset |
|
907 | 907 | return diff.as_raw() |
|
908 | 908 | |
|
909 | 909 | elif c.action == 'raw': |
|
910 | 910 | _diff = diffs.get_gitdiff(node1, node2, |
|
911 | 911 | ignore_whitespace=ignore_whitespace, |
|
912 | 912 | context=line_context) |
|
913 | 913 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
914 | 914 | response.content_type = 'text/plain' |
|
915 | 915 | charset = self._get_default_encoding() |
|
916 | 916 | if charset: |
|
917 | 917 | response.charset = charset |
|
918 | 918 | return diff.as_raw() |
|
919 | 919 | |
|
920 | 920 | else: |
|
921 | 921 | return redirect( |
|
922 | 922 | url('compare_url', repo_name=repo_name, |
|
923 | 923 | source_ref_type='rev', |
|
924 | 924 | source_ref=diff1, |
|
925 | 925 | target_repo=c.repo_name, |
|
926 | 926 | target_ref_type='rev', |
|
927 | 927 | target_ref=diff2, |
|
928 | 928 | f_path=f_path)) |
|
929 | 929 | |
|
930 | 930 | @LoginRequired() |
|
931 | 931 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
932 | 932 | 'repository.admin') |
|
933 | 933 | def diff_2way(self, repo_name, f_path): |
|
934 | 934 | """ |
|
935 | 935 | Kept only to make OLD links work |
|
936 | 936 | """ |
|
937 | 937 | diff1 = request.GET.get('diff1', '') |
|
938 | 938 | diff2 = request.GET.get('diff2', '') |
|
939 | 939 | |
|
940 | 940 | if not any((diff1, diff2)): |
|
941 | 941 | h.flash( |
|
942 | 942 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
943 | 943 | category='error') |
|
944 | 944 | raise HTTPBadRequest() |
|
945 | 945 | |
|
946 | 946 | return redirect( |
|
947 | 947 | url('compare_url', repo_name=repo_name, |
|
948 | 948 | source_ref_type='rev', |
|
949 | 949 | source_ref=diff1, |
|
950 | 950 | target_repo=c.repo_name, |
|
951 | 951 | target_ref_type='rev', |
|
952 | 952 | target_ref=diff2, |
|
953 | 953 | f_path=f_path, |
|
954 | 954 | diffmode='sideside')) |
|
955 | 955 | |
|
956 | 956 | def _get_file_node(self, commit_id, f_path): |
|
957 | 957 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
958 | 958 | commit = c.rhodecode_repo.get_commit(commit_id=commit_id) |
|
959 | 959 | try: |
|
960 | 960 | node = commit.get_node(f_path) |
|
961 | 961 | if node.is_dir(): |
|
962 | 962 | raise NodeError('%s path is a %s not a file' |
|
963 | 963 | % (node, type(node))) |
|
964 | 964 | except NodeDoesNotExistError: |
|
965 | 965 | commit = EmptyCommit( |
|
966 | 966 | commit_id=commit_id, |
|
967 | 967 | idx=commit.idx, |
|
968 | 968 | repo=commit.repository, |
|
969 | 969 | alias=commit.repository.alias, |
|
970 | 970 | message=commit.message, |
|
971 | 971 | author=commit.author, |
|
972 | 972 | date=commit.date) |
|
973 | 973 | node = FileNode(f_path, '', commit=commit) |
|
974 | 974 | else: |
|
975 | 975 | commit = EmptyCommit( |
|
976 | 976 | repo=c.rhodecode_repo, |
|
977 | 977 | alias=c.rhodecode_repo.alias) |
|
978 | 978 | node = FileNode(f_path, '', commit=commit) |
|
979 | 979 | return node |
|
980 | 980 | |
|
981 | 981 | def _get_node_history(self, commit, f_path, commits=None): |
|
982 | 982 | """ |
|
983 | 983 | get commit history for given node |
|
984 | 984 | |
|
985 | 985 | :param commit: commit to calculate history |
|
986 | 986 | :param f_path: path for node to calculate history for |
|
987 | 987 | :param commits: if passed don't calculate history and take |
|
988 | 988 | commits defined in this list |
|
989 | 989 | """ |
|
990 | 990 | # calculate history based on tip |
|
991 | 991 | tip = c.rhodecode_repo.get_commit() |
|
992 | 992 | if commits is None: |
|
993 | 993 | pre_load = ["author", "branch"] |
|
994 | 994 | try: |
|
995 | 995 | commits = tip.get_file_history(f_path, pre_load=pre_load) |
|
996 | 996 | except (NodeDoesNotExistError, CommitError): |
|
997 | 997 | # this node is not present at tip! |
|
998 | 998 | commits = commit.get_file_history(f_path, pre_load=pre_load) |
|
999 | 999 | |
|
1000 | 1000 | history = [] |
|
1001 | 1001 | commits_group = ([], _("Changesets")) |
|
1002 | 1002 | for commit in commits: |
|
1003 | 1003 | branch = ' (%s)' % commit.branch if commit.branch else '' |
|
1004 | 1004 | n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch) |
|
1005 | 1005 | commits_group[0].append((commit.raw_id, n_desc,)) |
|
1006 | 1006 | history.append(commits_group) |
|
1007 | 1007 | |
|
1008 | 1008 | symbolic_reference = self._symbolic_reference |
|
1009 | 1009 | |
|
1010 | 1010 | if c.rhodecode_repo.alias == 'svn': |
|
1011 | 1011 | adjusted_f_path = self._adjust_file_path_for_svn( |
|
1012 | 1012 | f_path, c.rhodecode_repo) |
|
1013 | 1013 | if adjusted_f_path != f_path: |
|
1014 | 1014 | log.debug( |
|
1015 | 1015 | 'Recognized svn tag or branch in file "%s", using svn ' |
|
1016 | 1016 | 'specific symbolic references', f_path) |
|
1017 | 1017 | f_path = adjusted_f_path |
|
1018 | 1018 | symbolic_reference = self._symbolic_reference_svn |
|
1019 | 1019 | |
|
1020 | 1020 | branches = self._create_references( |
|
1021 | 1021 | c.rhodecode_repo.branches, symbolic_reference, f_path) |
|
1022 | 1022 | branches_group = (branches, _("Branches")) |
|
1023 | 1023 | |
|
1024 | 1024 | tags = self._create_references( |
|
1025 | 1025 | c.rhodecode_repo.tags, symbolic_reference, f_path) |
|
1026 | 1026 | tags_group = (tags, _("Tags")) |
|
1027 | 1027 | |
|
1028 | 1028 | history.append(branches_group) |
|
1029 | 1029 | history.append(tags_group) |
|
1030 | 1030 | |
|
1031 | 1031 | return history, commits |
|
1032 | 1032 | |
|
1033 | 1033 | def _adjust_file_path_for_svn(self, f_path, repo): |
|
1034 | 1034 | """ |
|
1035 | 1035 | Computes the relative path of `f_path`. |
|
1036 | 1036 | |
|
1037 | 1037 | This is mainly based on prefix matching of the recognized tags and |
|
1038 | 1038 | branches in the underlying repository. |
|
1039 | 1039 | """ |
|
1040 | 1040 | tags_and_branches = itertools.chain( |
|
1041 | 1041 | repo.branches.iterkeys(), |
|
1042 | 1042 | repo.tags.iterkeys()) |
|
1043 | 1043 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) |
|
1044 | 1044 | |
|
1045 | 1045 | for name in tags_and_branches: |
|
1046 | 1046 | if f_path.startswith(name + '/'): |
|
1047 | 1047 | f_path = vcspath.relpath(f_path, name) |
|
1048 | 1048 | break |
|
1049 | 1049 | return f_path |
|
1050 | 1050 | |
|
1051 | 1051 | def _create_references( |
|
1052 | 1052 | self, branches_or_tags, symbolic_reference, f_path): |
|
1053 | 1053 | items = [] |
|
1054 | 1054 | for name, commit_id in branches_or_tags.items(): |
|
1055 | 1055 | sym_ref = symbolic_reference(commit_id, name, f_path) |
|
1056 | 1056 | items.append((sym_ref, name)) |
|
1057 | 1057 | return items |
|
1058 | 1058 | |
|
1059 | 1059 | def _symbolic_reference(self, commit_id, name, f_path): |
|
1060 | 1060 | return commit_id |
|
1061 | 1061 | |
|
1062 | 1062 | def _symbolic_reference_svn(self, commit_id, name, f_path): |
|
1063 | 1063 | new_f_path = vcspath.join(name, f_path) |
|
1064 | 1064 | return u'%s@%s' % (new_f_path, commit_id) |
|
1065 | 1065 | |
|
1066 | 1066 | @LoginRequired() |
|
1067 | 1067 | @XHRRequired() |
|
1068 | 1068 | @HasRepoPermissionAnyDecorator( |
|
1069 | 1069 | 'repository.read', 'repository.write', 'repository.admin') |
|
1070 | 1070 | @jsonify |
|
1071 | 1071 | def nodelist(self, repo_name, revision, f_path): |
|
1072 | 1072 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
1073 | 1073 | |
|
1074 | 1074 | metadata = self._get_nodelist_at_commit( |
|
1075 | 1075 | repo_name, commit.raw_id, f_path) |
|
1076 | 1076 | return {'nodes': metadata} |
|
1077 | 1077 | |
|
1078 | 1078 | @LoginRequired() |
|
1079 | 1079 | @XHRRequired() |
|
1080 | 1080 | @HasRepoPermissionAnyDecorator( |
|
1081 | 1081 | 'repository.read', 'repository.write', 'repository.admin') |
|
1082 | 1082 | def nodetree_full(self, repo_name, commit_id, f_path): |
|
1083 | 1083 | """ |
|
1084 | 1084 | Returns rendered html of file tree that contains commit date, |
|
1085 | 1085 | author, revision for the specified combination of |
|
1086 | 1086 | repo, commit_id and file path |
|
1087 | 1087 | |
|
1088 | 1088 | :param repo_name: name of the repository |
|
1089 | 1089 | :param commit_id: commit_id of file tree |
|
1090 | 1090 | :param f_path: file path of the requested directory |
|
1091 | 1091 | """ |
|
1092 | 1092 | |
|
1093 | 1093 | commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
1094 | 1094 | try: |
|
1095 | 1095 | dir_node = commit.get_node(f_path) |
|
1096 | 1096 | except RepositoryError as e: |
|
1097 | 1097 | return 'error {}'.format(safe_str(e)) |
|
1098 | 1098 | |
|
1099 | 1099 | if dir_node.is_file(): |
|
1100 | 1100 | return '' |
|
1101 | 1101 | |
|
1102 | 1102 | c.file = dir_node |
|
1103 | 1103 | c.commit = commit |
|
1104 | 1104 | |
|
1105 | 1105 | # using force=True here, make a little trick. We flush the cache and |
|
1106 | 1106 | # compute it using the same key as without full_load, so the fully |
|
1107 | 1107 | # loaded cached tree is now returned instead of partial |
|
1108 | 1108 | return self._get_tree_at_commit( |
|
1109 | 1109 | repo_name, commit.raw_id, dir_node.path, full_load=True, |
|
1110 | 1110 | force=True) |
@@ -1,1018 +1,1018 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | pull requests controller for rhodecode for initializing pull requests |
|
23 | 23 | """ |
|
24 | 24 | import types |
|
25 | 25 | |
|
26 | 26 | import peppercorn |
|
27 | 27 | import formencode |
|
28 | 28 | import logging |
|
29 | 29 | import collections |
|
30 | 30 | |
|
31 | 31 | from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest |
|
32 | 32 | from pylons import request, tmpl_context as c, url |
|
33 | 33 | from pylons.controllers.util import redirect |
|
34 | 34 | from pylons.i18n.translation import _ |
|
35 | 35 | from pyramid.threadlocal import get_current_registry |
|
36 | 36 | from sqlalchemy.sql import func |
|
37 | 37 | from sqlalchemy.sql.expression import or_ |
|
38 | 38 | |
|
39 | 39 | from rhodecode import events |
|
40 | 40 | from rhodecode.lib import auth, diffs, helpers as h, codeblocks |
|
41 | 41 | from rhodecode.lib.ext_json import json |
|
42 | 42 | from rhodecode.lib.base import ( |
|
43 | 43 | BaseRepoController, render, vcs_operation_context) |
|
44 | 44 | from rhodecode.lib.auth import ( |
|
45 | 45 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, |
|
46 | 46 | HasAcceptedRepoType, XHRRequired) |
|
47 | 47 | from rhodecode.lib.channelstream import channelstream_request |
|
48 | 48 | from rhodecode.lib.utils import jsonify |
|
49 | 49 | from rhodecode.lib.utils2 import ( |
|
50 | 50 | safe_int, safe_str, str2bool, safe_unicode) |
|
51 | 51 | from rhodecode.lib.vcs.backends.base import ( |
|
52 | 52 | EmptyCommit, UpdateFailureReason, EmptyRepository) |
|
53 | 53 | from rhodecode.lib.vcs.exceptions import ( |
|
54 | 54 | EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError, |
|
55 | 55 | NodeDoesNotExistError) |
|
56 | 56 | |
|
57 | 57 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
58 | 58 | from rhodecode.model.comment import CommentsModel |
|
59 | 59 | from rhodecode.model.db import (PullRequest, ChangesetStatus, ChangesetComment, |
|
60 | 60 | Repository, PullRequestVersion) |
|
61 | 61 | from rhodecode.model.forms import PullRequestForm |
|
62 | 62 | from rhodecode.model.meta import Session |
|
63 | 63 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck |
|
64 | 64 | |
|
65 | 65 | log = logging.getLogger(__name__) |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | class PullrequestsController(BaseRepoController): |
|
69 | 69 | |
|
70 | 70 | def __before__(self): |
|
71 | 71 | super(PullrequestsController, self).__before__() |
|
72 | 72 | c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED |
|
73 | 73 | c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED |
|
74 | 74 | |
|
75 | 75 | @LoginRequired() |
|
76 | 76 | @NotAnonymous() |
|
77 | 77 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
78 | 78 | 'repository.admin') |
|
79 | 79 | @HasAcceptedRepoType('git', 'hg') |
|
80 | 80 | def index(self): |
|
81 | 81 | source_repo = c.rhodecode_db_repo |
|
82 | 82 | |
|
83 | 83 | try: |
|
84 | 84 | source_repo.scm_instance().get_commit() |
|
85 | 85 | except EmptyRepositoryError: |
|
86 | 86 | h.flash(h.literal(_('There are no commits yet')), |
|
87 | 87 | category='warning') |
|
88 |
redirect( |
|
|
88 | redirect(h.route_path('repo_summary', repo_name=source_repo.repo_name)) | |
|
89 | 89 | |
|
90 | 90 | commit_id = request.GET.get('commit') |
|
91 | 91 | branch_ref = request.GET.get('branch') |
|
92 | 92 | bookmark_ref = request.GET.get('bookmark') |
|
93 | 93 | |
|
94 | 94 | try: |
|
95 | 95 | source_repo_data = PullRequestModel().generate_repo_data( |
|
96 | 96 | source_repo, commit_id=commit_id, |
|
97 | 97 | branch=branch_ref, bookmark=bookmark_ref) |
|
98 | 98 | except CommitDoesNotExistError as e: |
|
99 | 99 | log.exception(e) |
|
100 | 100 | h.flash(_('Commit does not exist'), 'error') |
|
101 | 101 | redirect(url('pullrequest_home', repo_name=source_repo.repo_name)) |
|
102 | 102 | |
|
103 | 103 | default_target_repo = source_repo |
|
104 | 104 | |
|
105 | 105 | if source_repo.parent: |
|
106 | 106 | parent_vcs_obj = source_repo.parent.scm_instance() |
|
107 | 107 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
108 | 108 | # change default if we have a parent repo |
|
109 | 109 | default_target_repo = source_repo.parent |
|
110 | 110 | |
|
111 | 111 | target_repo_data = PullRequestModel().generate_repo_data( |
|
112 | 112 | default_target_repo) |
|
113 | 113 | |
|
114 | 114 | selected_source_ref = source_repo_data['refs']['selected_ref'] |
|
115 | 115 | |
|
116 | 116 | title_source_ref = selected_source_ref.split(':', 2)[1] |
|
117 | 117 | c.default_title = PullRequestModel().generate_pullrequest_title( |
|
118 | 118 | source=source_repo.repo_name, |
|
119 | 119 | source_ref=title_source_ref, |
|
120 | 120 | target=default_target_repo.repo_name |
|
121 | 121 | ) |
|
122 | 122 | |
|
123 | 123 | c.default_repo_data = { |
|
124 | 124 | 'source_repo_name': source_repo.repo_name, |
|
125 | 125 | 'source_refs_json': json.dumps(source_repo_data), |
|
126 | 126 | 'target_repo_name': default_target_repo.repo_name, |
|
127 | 127 | 'target_refs_json': json.dumps(target_repo_data), |
|
128 | 128 | } |
|
129 | 129 | c.default_source_ref = selected_source_ref |
|
130 | 130 | |
|
131 | 131 | return render('/pullrequests/pullrequest.mako') |
|
132 | 132 | |
|
133 | 133 | @LoginRequired() |
|
134 | 134 | @NotAnonymous() |
|
135 | 135 | @XHRRequired() |
|
136 | 136 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
137 | 137 | 'repository.admin') |
|
138 | 138 | @jsonify |
|
139 | 139 | def get_repo_refs(self, repo_name, target_repo_name): |
|
140 | 140 | repo = Repository.get_by_repo_name(target_repo_name) |
|
141 | 141 | if not repo: |
|
142 | 142 | raise HTTPNotFound |
|
143 | 143 | return PullRequestModel().generate_repo_data(repo) |
|
144 | 144 | |
|
145 | 145 | @LoginRequired() |
|
146 | 146 | @NotAnonymous() |
|
147 | 147 | @XHRRequired() |
|
148 | 148 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
149 | 149 | 'repository.admin') |
|
150 | 150 | @jsonify |
|
151 | 151 | def get_repo_destinations(self, repo_name): |
|
152 | 152 | repo = Repository.get_by_repo_name(repo_name) |
|
153 | 153 | if not repo: |
|
154 | 154 | raise HTTPNotFound |
|
155 | 155 | filter_query = request.GET.get('query') |
|
156 | 156 | |
|
157 | 157 | query = Repository.query() \ |
|
158 | 158 | .order_by(func.length(Repository.repo_name)) \ |
|
159 | 159 | .filter(or_( |
|
160 | 160 | Repository.repo_name == repo.repo_name, |
|
161 | 161 | Repository.fork_id == repo.repo_id)) |
|
162 | 162 | |
|
163 | 163 | if filter_query: |
|
164 | 164 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
165 | 165 | query = query.filter( |
|
166 | 166 | Repository.repo_name.ilike(ilike_expression)) |
|
167 | 167 | |
|
168 | 168 | add_parent = False |
|
169 | 169 | if repo.parent: |
|
170 | 170 | if filter_query in repo.parent.repo_name: |
|
171 | 171 | parent_vcs_obj = repo.parent.scm_instance() |
|
172 | 172 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
173 | 173 | add_parent = True |
|
174 | 174 | |
|
175 | 175 | limit = 20 - 1 if add_parent else 20 |
|
176 | 176 | all_repos = query.limit(limit).all() |
|
177 | 177 | if add_parent: |
|
178 | 178 | all_repos += [repo.parent] |
|
179 | 179 | |
|
180 | 180 | repos = [] |
|
181 | 181 | for obj in self.scm_model.get_repos(all_repos): |
|
182 | 182 | repos.append({ |
|
183 | 183 | 'id': obj['name'], |
|
184 | 184 | 'text': obj['name'], |
|
185 | 185 | 'type': 'repo', |
|
186 | 186 | 'obj': obj['dbrepo'] |
|
187 | 187 | }) |
|
188 | 188 | |
|
189 | 189 | data = { |
|
190 | 190 | 'more': False, |
|
191 | 191 | 'results': [{ |
|
192 | 192 | 'text': _('Repositories'), |
|
193 | 193 | 'children': repos |
|
194 | 194 | }] if repos else [] |
|
195 | 195 | } |
|
196 | 196 | return data |
|
197 | 197 | |
|
198 | 198 | @LoginRequired() |
|
199 | 199 | @NotAnonymous() |
|
200 | 200 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
201 | 201 | 'repository.admin') |
|
202 | 202 | @HasAcceptedRepoType('git', 'hg') |
|
203 | 203 | @auth.CSRFRequired() |
|
204 | 204 | def create(self, repo_name): |
|
205 | 205 | repo = Repository.get_by_repo_name(repo_name) |
|
206 | 206 | if not repo: |
|
207 | 207 | raise HTTPNotFound |
|
208 | 208 | |
|
209 | 209 | controls = peppercorn.parse(request.POST.items()) |
|
210 | 210 | |
|
211 | 211 | try: |
|
212 | 212 | _form = PullRequestForm(repo.repo_id)().to_python(controls) |
|
213 | 213 | except formencode.Invalid as errors: |
|
214 | 214 | if errors.error_dict.get('revisions'): |
|
215 | 215 | msg = 'Revisions: %s' % errors.error_dict['revisions'] |
|
216 | 216 | elif errors.error_dict.get('pullrequest_title'): |
|
217 | 217 | msg = _('Pull request requires a title with min. 3 chars') |
|
218 | 218 | else: |
|
219 | 219 | msg = _('Error creating pull request: {}').format(errors) |
|
220 | 220 | log.exception(msg) |
|
221 | 221 | h.flash(msg, 'error') |
|
222 | 222 | |
|
223 | 223 | # would rather just go back to form ... |
|
224 | 224 | return redirect(url('pullrequest_home', repo_name=repo_name)) |
|
225 | 225 | |
|
226 | 226 | source_repo = _form['source_repo'] |
|
227 | 227 | source_ref = _form['source_ref'] |
|
228 | 228 | target_repo = _form['target_repo'] |
|
229 | 229 | target_ref = _form['target_ref'] |
|
230 | 230 | commit_ids = _form['revisions'][::-1] |
|
231 | 231 | |
|
232 | 232 | # find the ancestor for this pr |
|
233 | 233 | source_db_repo = Repository.get_by_repo_name(_form['source_repo']) |
|
234 | 234 | target_db_repo = Repository.get_by_repo_name(_form['target_repo']) |
|
235 | 235 | |
|
236 | 236 | source_scm = source_db_repo.scm_instance() |
|
237 | 237 | target_scm = target_db_repo.scm_instance() |
|
238 | 238 | |
|
239 | 239 | source_commit = source_scm.get_commit(source_ref.split(':')[-1]) |
|
240 | 240 | target_commit = target_scm.get_commit(target_ref.split(':')[-1]) |
|
241 | 241 | |
|
242 | 242 | ancestor = source_scm.get_common_ancestor( |
|
243 | 243 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
244 | 244 | |
|
245 | 245 | target_ref_type, target_ref_name, __ = _form['target_ref'].split(':') |
|
246 | 246 | target_ref = ':'.join((target_ref_type, target_ref_name, ancestor)) |
|
247 | 247 | |
|
248 | 248 | pullrequest_title = _form['pullrequest_title'] |
|
249 | 249 | title_source_ref = source_ref.split(':', 2)[1] |
|
250 | 250 | if not pullrequest_title: |
|
251 | 251 | pullrequest_title = PullRequestModel().generate_pullrequest_title( |
|
252 | 252 | source=source_repo, |
|
253 | 253 | source_ref=title_source_ref, |
|
254 | 254 | target=target_repo |
|
255 | 255 | ) |
|
256 | 256 | |
|
257 | 257 | description = _form['pullrequest_desc'] |
|
258 | 258 | |
|
259 | 259 | get_default_reviewers_data, validate_default_reviewers = \ |
|
260 | 260 | PullRequestModel().get_reviewer_functions() |
|
261 | 261 | |
|
262 | 262 | # recalculate reviewers logic, to make sure we can validate this |
|
263 | 263 | reviewer_rules = get_default_reviewers_data( |
|
264 | 264 | c.rhodecode_user, source_db_repo, source_commit, target_db_repo, |
|
265 | 265 | target_commit) |
|
266 | 266 | |
|
267 | 267 | reviewers = validate_default_reviewers( |
|
268 | 268 | _form['review_members'], reviewer_rules) |
|
269 | 269 | |
|
270 | 270 | try: |
|
271 | 271 | pull_request = PullRequestModel().create( |
|
272 | 272 | c.rhodecode_user.user_id, source_repo, source_ref, target_repo, |
|
273 | 273 | target_ref, commit_ids, reviewers, pullrequest_title, |
|
274 | 274 | description, reviewer_rules |
|
275 | 275 | ) |
|
276 | 276 | Session().commit() |
|
277 | 277 | h.flash(_('Successfully opened new pull request'), |
|
278 | 278 | category='success') |
|
279 | 279 | except Exception as e: |
|
280 | 280 | msg = _('Error occurred during creation of this pull request.') |
|
281 | 281 | log.exception(msg) |
|
282 | 282 | h.flash(msg, category='error') |
|
283 | 283 | return redirect(url('pullrequest_home', repo_name=repo_name)) |
|
284 | 284 | |
|
285 | 285 | return redirect(url('pullrequest_show', repo_name=target_repo, |
|
286 | 286 | pull_request_id=pull_request.pull_request_id)) |
|
287 | 287 | |
|
288 | 288 | @LoginRequired() |
|
289 | 289 | @NotAnonymous() |
|
290 | 290 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
291 | 291 | 'repository.admin') |
|
292 | 292 | @auth.CSRFRequired() |
|
293 | 293 | @jsonify |
|
294 | 294 | def update(self, repo_name, pull_request_id): |
|
295 | 295 | pull_request_id = safe_int(pull_request_id) |
|
296 | 296 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
297 | 297 | # only owner or admin can update it |
|
298 | 298 | allowed_to_update = PullRequestModel().check_user_update( |
|
299 | 299 | pull_request, c.rhodecode_user) |
|
300 | 300 | if allowed_to_update: |
|
301 | 301 | controls = peppercorn.parse(request.POST.items()) |
|
302 | 302 | |
|
303 | 303 | if 'review_members' in controls: |
|
304 | 304 | self._update_reviewers( |
|
305 | 305 | pull_request_id, controls['review_members'], |
|
306 | 306 | pull_request.reviewer_data) |
|
307 | 307 | elif str2bool(request.POST.get('update_commits', 'false')): |
|
308 | 308 | self._update_commits(pull_request) |
|
309 | 309 | elif str2bool(request.POST.get('close_pull_request', 'false')): |
|
310 | 310 | self._reject_close(pull_request) |
|
311 | 311 | elif str2bool(request.POST.get('edit_pull_request', 'false')): |
|
312 | 312 | self._edit_pull_request(pull_request) |
|
313 | 313 | else: |
|
314 | 314 | raise HTTPBadRequest() |
|
315 | 315 | return True |
|
316 | 316 | raise HTTPForbidden() |
|
317 | 317 | |
|
318 | 318 | def _edit_pull_request(self, pull_request): |
|
319 | 319 | try: |
|
320 | 320 | PullRequestModel().edit( |
|
321 | 321 | pull_request, request.POST.get('title'), |
|
322 | 322 | request.POST.get('description')) |
|
323 | 323 | except ValueError: |
|
324 | 324 | msg = _(u'Cannot update closed pull requests.') |
|
325 | 325 | h.flash(msg, category='error') |
|
326 | 326 | return |
|
327 | 327 | else: |
|
328 | 328 | Session().commit() |
|
329 | 329 | |
|
330 | 330 | msg = _(u'Pull request title & description updated.') |
|
331 | 331 | h.flash(msg, category='success') |
|
332 | 332 | return |
|
333 | 333 | |
|
334 | 334 | def _update_commits(self, pull_request): |
|
335 | 335 | resp = PullRequestModel().update_commits(pull_request) |
|
336 | 336 | |
|
337 | 337 | if resp.executed: |
|
338 | 338 | |
|
339 | 339 | if resp.target_changed and resp.source_changed: |
|
340 | 340 | changed = 'target and source repositories' |
|
341 | 341 | elif resp.target_changed and not resp.source_changed: |
|
342 | 342 | changed = 'target repository' |
|
343 | 343 | elif not resp.target_changed and resp.source_changed: |
|
344 | 344 | changed = 'source repository' |
|
345 | 345 | else: |
|
346 | 346 | changed = 'nothing' |
|
347 | 347 | |
|
348 | 348 | msg = _( |
|
349 | 349 | u'Pull request updated to "{source_commit_id}" with ' |
|
350 | 350 | u'{count_added} added, {count_removed} removed commits. ' |
|
351 | 351 | u'Source of changes: {change_source}') |
|
352 | 352 | msg = msg.format( |
|
353 | 353 | source_commit_id=pull_request.source_ref_parts.commit_id, |
|
354 | 354 | count_added=len(resp.changes.added), |
|
355 | 355 | count_removed=len(resp.changes.removed), |
|
356 | 356 | change_source=changed) |
|
357 | 357 | h.flash(msg, category='success') |
|
358 | 358 | |
|
359 | 359 | registry = get_current_registry() |
|
360 | 360 | rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {}) |
|
361 | 361 | channelstream_config = rhodecode_plugins.get('channelstream', {}) |
|
362 | 362 | if channelstream_config.get('enabled'): |
|
363 | 363 | message = msg + ( |
|
364 | 364 | ' - <a onclick="window.location.reload()">' |
|
365 | 365 | '<strong>{}</strong></a>'.format(_('Reload page'))) |
|
366 | 366 | channel = '/repo${}$/pr/{}'.format( |
|
367 | 367 | pull_request.target_repo.repo_name, |
|
368 | 368 | pull_request.pull_request_id |
|
369 | 369 | ) |
|
370 | 370 | payload = { |
|
371 | 371 | 'type': 'message', |
|
372 | 372 | 'user': 'system', |
|
373 | 373 | 'exclude_users': [request.user.username], |
|
374 | 374 | 'channel': channel, |
|
375 | 375 | 'message': { |
|
376 | 376 | 'message': message, |
|
377 | 377 | 'level': 'success', |
|
378 | 378 | 'topic': '/notifications' |
|
379 | 379 | } |
|
380 | 380 | } |
|
381 | 381 | channelstream_request( |
|
382 | 382 | channelstream_config, [payload], '/message', |
|
383 | 383 | raise_exc=False) |
|
384 | 384 | else: |
|
385 | 385 | msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason] |
|
386 | 386 | warning_reasons = [ |
|
387 | 387 | UpdateFailureReason.NO_CHANGE, |
|
388 | 388 | UpdateFailureReason.WRONG_REF_TYPE, |
|
389 | 389 | ] |
|
390 | 390 | category = 'warning' if resp.reason in warning_reasons else 'error' |
|
391 | 391 | h.flash(msg, category=category) |
|
392 | 392 | |
|
393 | 393 | @auth.CSRFRequired() |
|
394 | 394 | @LoginRequired() |
|
395 | 395 | @NotAnonymous() |
|
396 | 396 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
397 | 397 | 'repository.admin') |
|
398 | 398 | def merge(self, repo_name, pull_request_id): |
|
399 | 399 | """ |
|
400 | 400 | POST /{repo_name}/pull-request/{pull_request_id} |
|
401 | 401 | |
|
402 | 402 | Merge will perform a server-side merge of the specified |
|
403 | 403 | pull request, if the pull request is approved and mergeable. |
|
404 | 404 | After successful merging, the pull request is automatically |
|
405 | 405 | closed, with a relevant comment. |
|
406 | 406 | """ |
|
407 | 407 | pull_request_id = safe_int(pull_request_id) |
|
408 | 408 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
409 | 409 | user = c.rhodecode_user |
|
410 | 410 | |
|
411 | 411 | check = MergeCheck.validate(pull_request, user) |
|
412 | 412 | merge_possible = not check.failed |
|
413 | 413 | |
|
414 | 414 | for err_type, error_msg in check.errors: |
|
415 | 415 | h.flash(error_msg, category=err_type) |
|
416 | 416 | |
|
417 | 417 | if merge_possible: |
|
418 | 418 | log.debug("Pre-conditions checked, trying to merge.") |
|
419 | 419 | extras = vcs_operation_context( |
|
420 | 420 | request.environ, repo_name=pull_request.target_repo.repo_name, |
|
421 | 421 | username=user.username, action='push', |
|
422 | 422 | scm=pull_request.target_repo.repo_type) |
|
423 | 423 | self._merge_pull_request(pull_request, user, extras) |
|
424 | 424 | |
|
425 | 425 | return redirect(url( |
|
426 | 426 | 'pullrequest_show', |
|
427 | 427 | repo_name=pull_request.target_repo.repo_name, |
|
428 | 428 | pull_request_id=pull_request.pull_request_id)) |
|
429 | 429 | |
|
430 | 430 | def _merge_pull_request(self, pull_request, user, extras): |
|
431 | 431 | merge_resp = PullRequestModel().merge( |
|
432 | 432 | pull_request, user, extras=extras) |
|
433 | 433 | |
|
434 | 434 | if merge_resp.executed: |
|
435 | 435 | log.debug("The merge was successful, closing the pull request.") |
|
436 | 436 | PullRequestModel().close_pull_request( |
|
437 | 437 | pull_request.pull_request_id, user) |
|
438 | 438 | Session().commit() |
|
439 | 439 | msg = _('Pull request was successfully merged and closed.') |
|
440 | 440 | h.flash(msg, category='success') |
|
441 | 441 | else: |
|
442 | 442 | log.debug( |
|
443 | 443 | "The merge was not successful. Merge response: %s", |
|
444 | 444 | merge_resp) |
|
445 | 445 | msg = PullRequestModel().merge_status_message( |
|
446 | 446 | merge_resp.failure_reason) |
|
447 | 447 | h.flash(msg, category='error') |
|
448 | 448 | |
|
449 | 449 | def _update_reviewers(self, pull_request_id, review_members, reviewer_rules): |
|
450 | 450 | |
|
451 | 451 | get_default_reviewers_data, validate_default_reviewers = \ |
|
452 | 452 | PullRequestModel().get_reviewer_functions() |
|
453 | 453 | |
|
454 | 454 | try: |
|
455 | 455 | reviewers = validate_default_reviewers(review_members, reviewer_rules) |
|
456 | 456 | except ValueError as e: |
|
457 | 457 | log.error('Reviewers Validation:{}'.format(e)) |
|
458 | 458 | h.flash(e, category='error') |
|
459 | 459 | return |
|
460 | 460 | |
|
461 | 461 | PullRequestModel().update_reviewers(pull_request_id, reviewers) |
|
462 | 462 | h.flash(_('Pull request reviewers updated.'), category='success') |
|
463 | 463 | Session().commit() |
|
464 | 464 | |
|
465 | 465 | def _reject_close(self, pull_request): |
|
466 | 466 | if pull_request.is_closed(): |
|
467 | 467 | raise HTTPForbidden() |
|
468 | 468 | |
|
469 | 469 | PullRequestModel().close_pull_request_with_comment( |
|
470 | 470 | pull_request, c.rhodecode_user, c.rhodecode_db_repo) |
|
471 | 471 | Session().commit() |
|
472 | 472 | |
|
473 | 473 | @LoginRequired() |
|
474 | 474 | @NotAnonymous() |
|
475 | 475 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
476 | 476 | 'repository.admin') |
|
477 | 477 | @auth.CSRFRequired() |
|
478 | 478 | @jsonify |
|
479 | 479 | def delete(self, repo_name, pull_request_id): |
|
480 | 480 | pull_request_id = safe_int(pull_request_id) |
|
481 | 481 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
482 | 482 | |
|
483 | 483 | pr_closed = pull_request.is_closed() |
|
484 | 484 | allowed_to_delete = PullRequestModel().check_user_delete( |
|
485 | 485 | pull_request, c.rhodecode_user) and not pr_closed |
|
486 | 486 | |
|
487 | 487 | # only owner can delete it ! |
|
488 | 488 | if allowed_to_delete: |
|
489 | 489 | PullRequestModel().delete(pull_request) |
|
490 | 490 | Session().commit() |
|
491 | 491 | h.flash(_('Successfully deleted pull request'), |
|
492 | 492 | category='success') |
|
493 | 493 | return redirect(url('my_account_pullrequests')) |
|
494 | 494 | |
|
495 | 495 | h.flash(_('Your are not allowed to delete this pull request'), |
|
496 | 496 | category='error') |
|
497 | 497 | raise HTTPForbidden() |
|
498 | 498 | |
|
499 | 499 | def _get_pr_version(self, pull_request_id, version=None): |
|
500 | 500 | pull_request_id = safe_int(pull_request_id) |
|
501 | 501 | at_version = None |
|
502 | 502 | |
|
503 | 503 | if version and version == 'latest': |
|
504 | 504 | pull_request_ver = PullRequest.get(pull_request_id) |
|
505 | 505 | pull_request_obj = pull_request_ver |
|
506 | 506 | _org_pull_request_obj = pull_request_obj |
|
507 | 507 | at_version = 'latest' |
|
508 | 508 | elif version: |
|
509 | 509 | pull_request_ver = PullRequestVersion.get_or_404(version) |
|
510 | 510 | pull_request_obj = pull_request_ver |
|
511 | 511 | _org_pull_request_obj = pull_request_ver.pull_request |
|
512 | 512 | at_version = pull_request_ver.pull_request_version_id |
|
513 | 513 | else: |
|
514 | 514 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( |
|
515 | 515 | pull_request_id) |
|
516 | 516 | |
|
517 | 517 | pull_request_display_obj = PullRequest.get_pr_display_object( |
|
518 | 518 | pull_request_obj, _org_pull_request_obj) |
|
519 | 519 | |
|
520 | 520 | return _org_pull_request_obj, pull_request_obj, \ |
|
521 | 521 | pull_request_display_obj, at_version |
|
522 | 522 | |
|
523 | 523 | def _get_diffset( |
|
524 | 524 | self, source_repo, source_ref_id, target_ref_id, target_commit, |
|
525 | 525 | source_commit, diff_limit, file_limit, display_inline_comments): |
|
526 | 526 | vcs_diff = PullRequestModel().get_diff( |
|
527 | 527 | source_repo, source_ref_id, target_ref_id) |
|
528 | 528 | |
|
529 | 529 | diff_processor = diffs.DiffProcessor( |
|
530 | 530 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
531 | 531 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
532 | 532 | |
|
533 | 533 | _parsed = diff_processor.prepare() |
|
534 | 534 | |
|
535 | 535 | def _node_getter(commit): |
|
536 | 536 | def get_node(fname): |
|
537 | 537 | try: |
|
538 | 538 | return commit.get_node(fname) |
|
539 | 539 | except NodeDoesNotExistError: |
|
540 | 540 | return None |
|
541 | 541 | |
|
542 | 542 | return get_node |
|
543 | 543 | |
|
544 | 544 | diffset = codeblocks.DiffSet( |
|
545 | 545 | repo_name=c.repo_name, |
|
546 | 546 | source_repo_name=c.source_repo.repo_name, |
|
547 | 547 | source_node_getter=_node_getter(target_commit), |
|
548 | 548 | target_node_getter=_node_getter(source_commit), |
|
549 | 549 | comments=display_inline_comments |
|
550 | 550 | ) |
|
551 | 551 | diffset = diffset.render_patchset( |
|
552 | 552 | _parsed, target_commit.raw_id, source_commit.raw_id) |
|
553 | 553 | |
|
554 | 554 | return diffset |
|
555 | 555 | |
|
556 | 556 | @LoginRequired() |
|
557 | 557 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
558 | 558 | 'repository.admin') |
|
559 | 559 | def show(self, repo_name, pull_request_id): |
|
560 | 560 | pull_request_id = safe_int(pull_request_id) |
|
561 | 561 | version = request.GET.get('version') |
|
562 | 562 | from_version = request.GET.get('from_version') or version |
|
563 | 563 | merge_checks = request.GET.get('merge_checks') |
|
564 | 564 | c.fulldiff = str2bool(request.GET.get('fulldiff')) |
|
565 | 565 | |
|
566 | 566 | (pull_request_latest, |
|
567 | 567 | pull_request_at_ver, |
|
568 | 568 | pull_request_display_obj, |
|
569 | 569 | at_version) = self._get_pr_version( |
|
570 | 570 | pull_request_id, version=version) |
|
571 | 571 | pr_closed = pull_request_latest.is_closed() |
|
572 | 572 | |
|
573 | 573 | if pr_closed and (version or from_version): |
|
574 | 574 | # not allow to browse versions |
|
575 | 575 | return redirect(h.url('pullrequest_show', repo_name=repo_name, |
|
576 | 576 | pull_request_id=pull_request_id)) |
|
577 | 577 | |
|
578 | 578 | versions = pull_request_display_obj.versions() |
|
579 | 579 | |
|
580 | 580 | c.at_version = at_version |
|
581 | 581 | c.at_version_num = (at_version |
|
582 | 582 | if at_version and at_version != 'latest' |
|
583 | 583 | else None) |
|
584 | 584 | c.at_version_pos = ChangesetComment.get_index_from_version( |
|
585 | 585 | c.at_version_num, versions) |
|
586 | 586 | |
|
587 | 587 | (prev_pull_request_latest, |
|
588 | 588 | prev_pull_request_at_ver, |
|
589 | 589 | prev_pull_request_display_obj, |
|
590 | 590 | prev_at_version) = self._get_pr_version( |
|
591 | 591 | pull_request_id, version=from_version) |
|
592 | 592 | |
|
593 | 593 | c.from_version = prev_at_version |
|
594 | 594 | c.from_version_num = (prev_at_version |
|
595 | 595 | if prev_at_version and prev_at_version != 'latest' |
|
596 | 596 | else None) |
|
597 | 597 | c.from_version_pos = ChangesetComment.get_index_from_version( |
|
598 | 598 | c.from_version_num, versions) |
|
599 | 599 | |
|
600 | 600 | # define if we're in COMPARE mode or VIEW at version mode |
|
601 | 601 | compare = at_version != prev_at_version |
|
602 | 602 | |
|
603 | 603 | # pull_requests repo_name we opened it against |
|
604 | 604 | # ie. target_repo must match |
|
605 | 605 | if repo_name != pull_request_at_ver.target_repo.repo_name: |
|
606 | 606 | raise HTTPNotFound |
|
607 | 607 | |
|
608 | 608 | c.shadow_clone_url = PullRequestModel().get_shadow_clone_url( |
|
609 | 609 | pull_request_at_ver) |
|
610 | 610 | |
|
611 | 611 | c.pull_request = pull_request_display_obj |
|
612 | 612 | c.pull_request_latest = pull_request_latest |
|
613 | 613 | |
|
614 | 614 | if compare or (at_version and not at_version == 'latest'): |
|
615 | 615 | c.allowed_to_change_status = False |
|
616 | 616 | c.allowed_to_update = False |
|
617 | 617 | c.allowed_to_merge = False |
|
618 | 618 | c.allowed_to_delete = False |
|
619 | 619 | c.allowed_to_comment = False |
|
620 | 620 | c.allowed_to_close = False |
|
621 | 621 | else: |
|
622 | 622 | can_change_status = PullRequestModel().check_user_change_status( |
|
623 | 623 | pull_request_at_ver, c.rhodecode_user) |
|
624 | 624 | c.allowed_to_change_status = can_change_status and not pr_closed |
|
625 | 625 | |
|
626 | 626 | c.allowed_to_update = PullRequestModel().check_user_update( |
|
627 | 627 | pull_request_latest, c.rhodecode_user) and not pr_closed |
|
628 | 628 | c.allowed_to_merge = PullRequestModel().check_user_merge( |
|
629 | 629 | pull_request_latest, c.rhodecode_user) and not pr_closed |
|
630 | 630 | c.allowed_to_delete = PullRequestModel().check_user_delete( |
|
631 | 631 | pull_request_latest, c.rhodecode_user) and not pr_closed |
|
632 | 632 | c.allowed_to_comment = not pr_closed |
|
633 | 633 | c.allowed_to_close = c.allowed_to_merge and not pr_closed |
|
634 | 634 | |
|
635 | 635 | c.forbid_adding_reviewers = False |
|
636 | 636 | c.forbid_author_to_review = False |
|
637 | 637 | |
|
638 | 638 | if pull_request_latest.reviewer_data and \ |
|
639 | 639 | 'rules' in pull_request_latest.reviewer_data: |
|
640 | 640 | rules = pull_request_latest.reviewer_data['rules'] or {} |
|
641 | 641 | try: |
|
642 | 642 | c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers') |
|
643 | 643 | c.forbid_author_to_review = rules.get('forbid_author_to_review') |
|
644 | 644 | except Exception: |
|
645 | 645 | pass |
|
646 | 646 | |
|
647 | 647 | # check merge capabilities |
|
648 | 648 | _merge_check = MergeCheck.validate( |
|
649 | 649 | pull_request_latest, user=c.rhodecode_user) |
|
650 | 650 | c.pr_merge_errors = _merge_check.error_details |
|
651 | 651 | c.pr_merge_possible = not _merge_check.failed |
|
652 | 652 | c.pr_merge_message = _merge_check.merge_msg |
|
653 | 653 | |
|
654 | 654 | c.pull_request_review_status = _merge_check.review_status |
|
655 | 655 | if merge_checks: |
|
656 | 656 | return render('/pullrequests/pullrequest_merge_checks.mako') |
|
657 | 657 | |
|
658 | 658 | comments_model = CommentsModel() |
|
659 | 659 | |
|
660 | 660 | # reviewers and statuses |
|
661 | 661 | c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses() |
|
662 | 662 | allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers] |
|
663 | 663 | |
|
664 | 664 | # GENERAL COMMENTS with versions # |
|
665 | 665 | q = comments_model._all_general_comments_of_pull_request(pull_request_latest) |
|
666 | 666 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
667 | 667 | general_comments = q |
|
668 | 668 | |
|
669 | 669 | # pick comments we want to render at current version |
|
670 | 670 | c.comment_versions = comments_model.aggregate_comments( |
|
671 | 671 | general_comments, versions, c.at_version_num) |
|
672 | 672 | c.comments = c.comment_versions[c.at_version_num]['until'] |
|
673 | 673 | |
|
674 | 674 | # INLINE COMMENTS with versions # |
|
675 | 675 | q = comments_model._all_inline_comments_of_pull_request(pull_request_latest) |
|
676 | 676 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
677 | 677 | inline_comments = q |
|
678 | 678 | |
|
679 | 679 | c.inline_versions = comments_model.aggregate_comments( |
|
680 | 680 | inline_comments, versions, c.at_version_num, inline=True) |
|
681 | 681 | |
|
682 | 682 | # inject latest version |
|
683 | 683 | latest_ver = PullRequest.get_pr_display_object( |
|
684 | 684 | pull_request_latest, pull_request_latest) |
|
685 | 685 | |
|
686 | 686 | c.versions = versions + [latest_ver] |
|
687 | 687 | |
|
688 | 688 | # if we use version, then do not show later comments |
|
689 | 689 | # than current version |
|
690 | 690 | display_inline_comments = collections.defaultdict( |
|
691 | 691 | lambda: collections.defaultdict(list)) |
|
692 | 692 | for co in inline_comments: |
|
693 | 693 | if c.at_version_num: |
|
694 | 694 | # pick comments that are at least UPTO given version, so we |
|
695 | 695 | # don't render comments for higher version |
|
696 | 696 | should_render = co.pull_request_version_id and \ |
|
697 | 697 | co.pull_request_version_id <= c.at_version_num |
|
698 | 698 | else: |
|
699 | 699 | # showing all, for 'latest' |
|
700 | 700 | should_render = True |
|
701 | 701 | |
|
702 | 702 | if should_render: |
|
703 | 703 | display_inline_comments[co.f_path][co.line_no].append(co) |
|
704 | 704 | |
|
705 | 705 | # load diff data into template context, if we use compare mode then |
|
706 | 706 | # diff is calculated based on changes between versions of PR |
|
707 | 707 | |
|
708 | 708 | source_repo = pull_request_at_ver.source_repo |
|
709 | 709 | source_ref_id = pull_request_at_ver.source_ref_parts.commit_id |
|
710 | 710 | |
|
711 | 711 | target_repo = pull_request_at_ver.target_repo |
|
712 | 712 | target_ref_id = pull_request_at_ver.target_ref_parts.commit_id |
|
713 | 713 | |
|
714 | 714 | if compare: |
|
715 | 715 | # in compare switch the diff base to latest commit from prev version |
|
716 | 716 | target_ref_id = prev_pull_request_display_obj.revisions[0] |
|
717 | 717 | |
|
718 | 718 | # despite opening commits for bookmarks/branches/tags, we always |
|
719 | 719 | # convert this to rev to prevent changes after bookmark or branch change |
|
720 | 720 | c.source_ref_type = 'rev' |
|
721 | 721 | c.source_ref = source_ref_id |
|
722 | 722 | |
|
723 | 723 | c.target_ref_type = 'rev' |
|
724 | 724 | c.target_ref = target_ref_id |
|
725 | 725 | |
|
726 | 726 | c.source_repo = source_repo |
|
727 | 727 | c.target_repo = target_repo |
|
728 | 728 | |
|
729 | 729 | # diff_limit is the old behavior, will cut off the whole diff |
|
730 | 730 | # if the limit is applied otherwise will just hide the |
|
731 | 731 | # big files from the front-end |
|
732 | 732 | diff_limit = self.cut_off_limit_diff |
|
733 | 733 | file_limit = self.cut_off_limit_file |
|
734 | 734 | |
|
735 | 735 | c.commit_ranges = [] |
|
736 | 736 | source_commit = EmptyCommit() |
|
737 | 737 | target_commit = EmptyCommit() |
|
738 | 738 | c.missing_requirements = False |
|
739 | 739 | |
|
740 | 740 | source_scm = source_repo.scm_instance() |
|
741 | 741 | target_scm = target_repo.scm_instance() |
|
742 | 742 | |
|
743 | 743 | # try first shadow repo, fallback to regular repo |
|
744 | 744 | try: |
|
745 | 745 | commits_source_repo = pull_request_latest.get_shadow_repo() |
|
746 | 746 | except Exception: |
|
747 | 747 | log.debug('Failed to get shadow repo', exc_info=True) |
|
748 | 748 | commits_source_repo = source_scm |
|
749 | 749 | |
|
750 | 750 | c.commits_source_repo = commits_source_repo |
|
751 | 751 | commit_cache = {} |
|
752 | 752 | try: |
|
753 | 753 | pre_load = ["author", "branch", "date", "message"] |
|
754 | 754 | show_revs = pull_request_at_ver.revisions |
|
755 | 755 | for rev in show_revs: |
|
756 | 756 | comm = commits_source_repo.get_commit( |
|
757 | 757 | commit_id=rev, pre_load=pre_load) |
|
758 | 758 | c.commit_ranges.append(comm) |
|
759 | 759 | commit_cache[comm.raw_id] = comm |
|
760 | 760 | |
|
761 | 761 | # Order here matters, we first need to get target, and then |
|
762 | 762 | # the source |
|
763 | 763 | target_commit = commits_source_repo.get_commit( |
|
764 | 764 | commit_id=safe_str(target_ref_id)) |
|
765 | 765 | |
|
766 | 766 | source_commit = commits_source_repo.get_commit( |
|
767 | 767 | commit_id=safe_str(source_ref_id)) |
|
768 | 768 | |
|
769 | 769 | except CommitDoesNotExistError: |
|
770 | 770 | log.warning( |
|
771 | 771 | 'Failed to get commit from `{}` repo'.format( |
|
772 | 772 | commits_source_repo), exc_info=True) |
|
773 | 773 | except RepositoryRequirementError: |
|
774 | 774 | log.warning( |
|
775 | 775 | 'Failed to get all required data from repo', exc_info=True) |
|
776 | 776 | c.missing_requirements = True |
|
777 | 777 | |
|
778 | 778 | c.ancestor = None # set it to None, to hide it from PR view |
|
779 | 779 | |
|
780 | 780 | try: |
|
781 | 781 | ancestor_id = source_scm.get_common_ancestor( |
|
782 | 782 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
783 | 783 | c.ancestor_commit = source_scm.get_commit(ancestor_id) |
|
784 | 784 | except Exception: |
|
785 | 785 | c.ancestor_commit = None |
|
786 | 786 | |
|
787 | 787 | c.statuses = source_repo.statuses( |
|
788 | 788 | [x.raw_id for x in c.commit_ranges]) |
|
789 | 789 | |
|
790 | 790 | # auto collapse if we have more than limit |
|
791 | 791 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
792 | 792 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
793 | 793 | c.compare_mode = compare |
|
794 | 794 | |
|
795 | 795 | c.missing_commits = False |
|
796 | 796 | if (c.missing_requirements or isinstance(source_commit, EmptyCommit) |
|
797 | 797 | or source_commit == target_commit): |
|
798 | 798 | |
|
799 | 799 | c.missing_commits = True |
|
800 | 800 | else: |
|
801 | 801 | |
|
802 | 802 | c.diffset = self._get_diffset( |
|
803 | 803 | commits_source_repo, source_ref_id, target_ref_id, |
|
804 | 804 | target_commit, source_commit, |
|
805 | 805 | diff_limit, file_limit, display_inline_comments) |
|
806 | 806 | |
|
807 | 807 | c.limited_diff = c.diffset.limited_diff |
|
808 | 808 | |
|
809 | 809 | # calculate removed files that are bound to comments |
|
810 | 810 | comment_deleted_files = [ |
|
811 | 811 | fname for fname in display_inline_comments |
|
812 | 812 | if fname not in c.diffset.file_stats] |
|
813 | 813 | |
|
814 | 814 | c.deleted_files_comments = collections.defaultdict(dict) |
|
815 | 815 | for fname, per_line_comments in display_inline_comments.items(): |
|
816 | 816 | if fname in comment_deleted_files: |
|
817 | 817 | c.deleted_files_comments[fname]['stats'] = 0 |
|
818 | 818 | c.deleted_files_comments[fname]['comments'] = list() |
|
819 | 819 | for lno, comments in per_line_comments.items(): |
|
820 | 820 | c.deleted_files_comments[fname]['comments'].extend( |
|
821 | 821 | comments) |
|
822 | 822 | |
|
823 | 823 | # this is a hack to properly display links, when creating PR, the |
|
824 | 824 | # compare view and others uses different notation, and |
|
825 | 825 | # compare_commits.mako renders links based on the target_repo. |
|
826 | 826 | # We need to swap that here to generate it properly on the html side |
|
827 | 827 | c.target_repo = c.source_repo |
|
828 | 828 | |
|
829 | 829 | c.commit_statuses = ChangesetStatus.STATUSES |
|
830 | 830 | |
|
831 | 831 | c.show_version_changes = not pr_closed |
|
832 | 832 | if c.show_version_changes: |
|
833 | 833 | cur_obj = pull_request_at_ver |
|
834 | 834 | prev_obj = prev_pull_request_at_ver |
|
835 | 835 | |
|
836 | 836 | old_commit_ids = prev_obj.revisions |
|
837 | 837 | new_commit_ids = cur_obj.revisions |
|
838 | 838 | commit_changes = PullRequestModel()._calculate_commit_id_changes( |
|
839 | 839 | old_commit_ids, new_commit_ids) |
|
840 | 840 | c.commit_changes_summary = commit_changes |
|
841 | 841 | |
|
842 | 842 | # calculate the diff for commits between versions |
|
843 | 843 | c.commit_changes = [] |
|
844 | 844 | mark = lambda cs, fw: list( |
|
845 | 845 | h.itertools.izip_longest([], cs, fillvalue=fw)) |
|
846 | 846 | for c_type, raw_id in mark(commit_changes.added, 'a') \ |
|
847 | 847 | + mark(commit_changes.removed, 'r') \ |
|
848 | 848 | + mark(commit_changes.common, 'c'): |
|
849 | 849 | |
|
850 | 850 | if raw_id in commit_cache: |
|
851 | 851 | commit = commit_cache[raw_id] |
|
852 | 852 | else: |
|
853 | 853 | try: |
|
854 | 854 | commit = commits_source_repo.get_commit(raw_id) |
|
855 | 855 | except CommitDoesNotExistError: |
|
856 | 856 | # in case we fail extracting still use "dummy" commit |
|
857 | 857 | # for display in commit diff |
|
858 | 858 | commit = h.AttributeDict( |
|
859 | 859 | {'raw_id': raw_id, |
|
860 | 860 | 'message': 'EMPTY or MISSING COMMIT'}) |
|
861 | 861 | c.commit_changes.append([c_type, commit]) |
|
862 | 862 | |
|
863 | 863 | # current user review statuses for each version |
|
864 | 864 | c.review_versions = {} |
|
865 | 865 | if c.rhodecode_user.user_id in allowed_reviewers: |
|
866 | 866 | for co in general_comments: |
|
867 | 867 | if co.author.user_id == c.rhodecode_user.user_id: |
|
868 | 868 | # each comment has a status change |
|
869 | 869 | status = co.status_change |
|
870 | 870 | if status: |
|
871 | 871 | _ver_pr = status[0].comment.pull_request_version_id |
|
872 | 872 | c.review_versions[_ver_pr] = status[0] |
|
873 | 873 | |
|
874 | 874 | return render('/pullrequests/pullrequest_show.mako') |
|
875 | 875 | |
|
876 | 876 | @LoginRequired() |
|
877 | 877 | @NotAnonymous() |
|
878 | 878 | @HasRepoPermissionAnyDecorator( |
|
879 | 879 | 'repository.read', 'repository.write', 'repository.admin') |
|
880 | 880 | @auth.CSRFRequired() |
|
881 | 881 | @jsonify |
|
882 | 882 | def comment(self, repo_name, pull_request_id): |
|
883 | 883 | pull_request_id = safe_int(pull_request_id) |
|
884 | 884 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
885 | 885 | if pull_request.is_closed(): |
|
886 | 886 | raise HTTPForbidden() |
|
887 | 887 | |
|
888 | 888 | status = request.POST.get('changeset_status', None) |
|
889 | 889 | text = request.POST.get('text') |
|
890 | 890 | comment_type = request.POST.get('comment_type') |
|
891 | 891 | resolves_comment_id = request.POST.get('resolves_comment_id', None) |
|
892 | 892 | close_pull_request = request.POST.get('close_pull_request') |
|
893 | 893 | |
|
894 | 894 | close_pr = False |
|
895 | 895 | # only owner or admin or person with write permissions |
|
896 | 896 | allowed_to_close = PullRequestModel().check_user_update( |
|
897 | 897 | pull_request, c.rhodecode_user) |
|
898 | 898 | |
|
899 | 899 | if close_pull_request and allowed_to_close: |
|
900 | 900 | close_pr = True |
|
901 | 901 | pull_request_review_status = pull_request.calculated_review_status() |
|
902 | 902 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: |
|
903 | 903 | # approved only if we have voting consent |
|
904 | 904 | status = ChangesetStatus.STATUS_APPROVED |
|
905 | 905 | else: |
|
906 | 906 | status = ChangesetStatus.STATUS_REJECTED |
|
907 | 907 | |
|
908 | 908 | allowed_to_change_status = PullRequestModel().check_user_change_status( |
|
909 | 909 | pull_request, c.rhodecode_user) |
|
910 | 910 | |
|
911 | 911 | if status and allowed_to_change_status: |
|
912 | 912 | message = (_('Status change %(transition_icon)s %(status)s') |
|
913 | 913 | % {'transition_icon': '>', |
|
914 | 914 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
915 | 915 | if close_pr: |
|
916 | 916 | message = _('Closing with') + ' ' + message |
|
917 | 917 | text = text or message |
|
918 | 918 | comm = CommentsModel().create( |
|
919 | 919 | text=text, |
|
920 | 920 | repo=c.rhodecode_db_repo.repo_id, |
|
921 | 921 | user=c.rhodecode_user.user_id, |
|
922 | 922 | pull_request=pull_request_id, |
|
923 | 923 | f_path=request.POST.get('f_path'), |
|
924 | 924 | line_no=request.POST.get('line'), |
|
925 | 925 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
926 | 926 | if status and allowed_to_change_status else None), |
|
927 | 927 | status_change_type=(status |
|
928 | 928 | if status and allowed_to_change_status else None), |
|
929 | 929 | closing_pr=close_pr, |
|
930 | 930 | comment_type=comment_type, |
|
931 | 931 | resolves_comment_id=resolves_comment_id |
|
932 | 932 | ) |
|
933 | 933 | |
|
934 | 934 | if allowed_to_change_status: |
|
935 | 935 | old_calculated_status = pull_request.calculated_review_status() |
|
936 | 936 | # get status if set ! |
|
937 | 937 | if status: |
|
938 | 938 | ChangesetStatusModel().set_status( |
|
939 | 939 | c.rhodecode_db_repo.repo_id, |
|
940 | 940 | status, |
|
941 | 941 | c.rhodecode_user.user_id, |
|
942 | 942 | comm, |
|
943 | 943 | pull_request=pull_request_id |
|
944 | 944 | ) |
|
945 | 945 | |
|
946 | 946 | Session().flush() |
|
947 | 947 | events.trigger(events.PullRequestCommentEvent(pull_request, comm)) |
|
948 | 948 | # we now calculate the status of pull request, and based on that |
|
949 | 949 | # calculation we set the commits status |
|
950 | 950 | calculated_status = pull_request.calculated_review_status() |
|
951 | 951 | if old_calculated_status != calculated_status: |
|
952 | 952 | PullRequestModel()._trigger_pull_request_hook( |
|
953 | 953 | pull_request, c.rhodecode_user, 'review_status_change') |
|
954 | 954 | |
|
955 | 955 | calculated_status_lbl = ChangesetStatus.get_status_lbl( |
|
956 | 956 | calculated_status) |
|
957 | 957 | |
|
958 | 958 | if close_pr: |
|
959 | 959 | status_completed = ( |
|
960 | 960 | calculated_status in [ChangesetStatus.STATUS_APPROVED, |
|
961 | 961 | ChangesetStatus.STATUS_REJECTED]) |
|
962 | 962 | if close_pull_request or status_completed: |
|
963 | 963 | PullRequestModel().close_pull_request( |
|
964 | 964 | pull_request_id, c.rhodecode_user) |
|
965 | 965 | else: |
|
966 | 966 | h.flash(_('Closing pull request on other statuses than ' |
|
967 | 967 | 'rejected or approved is forbidden. ' |
|
968 | 968 | 'Calculated status from all reviewers ' |
|
969 | 969 | 'is currently: %s') % calculated_status_lbl, |
|
970 | 970 | category='warning') |
|
971 | 971 | |
|
972 | 972 | Session().commit() |
|
973 | 973 | |
|
974 | 974 | if not request.is_xhr: |
|
975 | 975 | return redirect(h.url('pullrequest_show', repo_name=repo_name, |
|
976 | 976 | pull_request_id=pull_request_id)) |
|
977 | 977 | |
|
978 | 978 | data = { |
|
979 | 979 | 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))), |
|
980 | 980 | } |
|
981 | 981 | if comm: |
|
982 | 982 | c.co = comm |
|
983 | 983 | c.inline_comment = True if comm.line_no else False |
|
984 | 984 | data.update(comm.get_dict()) |
|
985 | 985 | data.update({'rendered_text': |
|
986 | 986 | render('changeset/changeset_comment_block.mako')}) |
|
987 | 987 | |
|
988 | 988 | return data |
|
989 | 989 | |
|
990 | 990 | @LoginRequired() |
|
991 | 991 | @NotAnonymous() |
|
992 | 992 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
993 | 993 | 'repository.admin') |
|
994 | 994 | @auth.CSRFRequired() |
|
995 | 995 | @jsonify |
|
996 | 996 | def delete_comment(self, repo_name, comment_id): |
|
997 | 997 | return self._delete_comment(comment_id) |
|
998 | 998 | |
|
999 | 999 | def _delete_comment(self, comment_id): |
|
1000 | 1000 | comment_id = safe_int(comment_id) |
|
1001 | 1001 | co = ChangesetComment.get_or_404(comment_id) |
|
1002 | 1002 | if co.pull_request.is_closed(): |
|
1003 | 1003 | # don't allow deleting comments on closed pull request |
|
1004 | 1004 | raise HTTPForbidden() |
|
1005 | 1005 | |
|
1006 | 1006 | is_owner = co.author.user_id == c.rhodecode_user.user_id |
|
1007 | 1007 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name) |
|
1008 | 1008 | if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner: |
|
1009 | 1009 | old_calculated_status = co.pull_request.calculated_review_status() |
|
1010 | 1010 | CommentsModel().delete(comment=co) |
|
1011 | 1011 | Session().commit() |
|
1012 | 1012 | calculated_status = co.pull_request.calculated_review_status() |
|
1013 | 1013 | if old_calculated_status != calculated_status: |
|
1014 | 1014 | PullRequestModel()._trigger_pull_request_hook( |
|
1015 | 1015 | co.pull_request, c.rhodecode_user, 'review_status_change') |
|
1016 | 1016 | return True |
|
1017 | 1017 | else: |
|
1018 | 1018 | raise HTTPForbidden() |
@@ -1,314 +1,315 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | from pylons import url |
|
24 | 24 | from pylons.i18n.translation import _ |
|
25 | 25 | from webhelpers.html.builder import literal |
|
26 | 26 | from webhelpers.html.tags import link_to |
|
27 | 27 | |
|
28 | 28 | from rhodecode.lib.utils2 import AttributeDict |
|
29 | 29 | from rhodecode.lib.vcs.backends.base import BaseCommit |
|
30 | 30 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | log = logging.getLogger(__name__) |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | def action_parser(user_log, feed=False, parse_cs=False): |
|
37 | 37 | """ |
|
38 | 38 | This helper will action_map the specified string action into translated |
|
39 | 39 | fancy names with icons and links |
|
40 | 40 | |
|
41 | 41 | :param user_log: user log instance |
|
42 | 42 | :param feed: use output for feeds (no html and fancy icons) |
|
43 | 43 | :param parse_cs: parse Changesets into VCS instances |
|
44 | 44 | """ |
|
45 | 45 | ap = ActionParser(user_log, feed=False, parse_commits=False) |
|
46 | 46 | return ap.callbacks() |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class ActionParser(object): |
|
50 | 50 | |
|
51 | 51 | commits_limit = 3 # display this amount always |
|
52 | 52 | commits_top_limit = 50 # show up to this amount of commits hidden |
|
53 | 53 | |
|
54 | 54 | def __init__(self, user_log, feed=False, parse_commits=False): |
|
55 | 55 | self.user_log = user_log |
|
56 | 56 | self.feed = feed |
|
57 | 57 | self.parse_commits = parse_commits |
|
58 | 58 | |
|
59 | 59 | self.action = user_log.action |
|
60 | 60 | self.action_params = ' ' |
|
61 | 61 | x = self.action.split(':', 1) |
|
62 | 62 | if len(x) > 1: |
|
63 | 63 | self.action, self.action_params = x |
|
64 | 64 | |
|
65 | 65 | def callbacks(self): |
|
66 | 66 | action_str = self.action_map.get(self.action, self.action) |
|
67 | 67 | if self.feed: |
|
68 | 68 | action = action_str[0].replace('[', '').replace(']', '') |
|
69 | 69 | else: |
|
70 | 70 | action = action_str[0]\ |
|
71 | 71 | .replace('[', '<span class="journal_highlight">')\ |
|
72 | 72 | .replace(']', '</span>') |
|
73 | 73 | |
|
74 | 74 | action_params_func = _no_params_func |
|
75 | 75 | if callable(action_str[1]): |
|
76 | 76 | action_params_func = action_str[1] |
|
77 | 77 | |
|
78 | 78 | # returned callbacks we need to call to get |
|
79 | 79 | return [ |
|
80 | 80 | lambda: literal(action), action_params_func, |
|
81 | 81 | self.action_parser_icon] |
|
82 | 82 | |
|
83 | 83 | @property |
|
84 | 84 | def action_map(self): |
|
85 | 85 | |
|
86 | 86 | # action : translated str, callback(extractor), icon |
|
87 | 87 | action_map = { |
|
88 | 88 | 'user_deleted_repo': ( |
|
89 | 89 | _('[deleted] repository'), |
|
90 | 90 | None, 'icon-trash'), |
|
91 | 91 | 'user_created_repo': ( |
|
92 | 92 | _('[created] repository'), |
|
93 | 93 | None, 'icon-plus icon-plus-colored'), |
|
94 | 94 | 'user_created_fork': ( |
|
95 | 95 | _('[created] repository as fork'), |
|
96 | 96 | None, 'icon-code-fork'), |
|
97 | 97 | 'user_forked_repo': ( |
|
98 | 98 | _('[forked] repository'), |
|
99 | 99 | self.get_fork_name, 'icon-code-fork'), |
|
100 | 100 | 'user_updated_repo': ( |
|
101 | 101 | _('[updated] repository'), |
|
102 | 102 | None, 'icon-pencil icon-pencil-colored'), |
|
103 | 103 | 'user_downloaded_archive': ( |
|
104 | 104 | _('[downloaded] archive from repository'), |
|
105 | 105 | self.get_archive_name, 'icon-download-alt'), |
|
106 | 106 | 'admin_deleted_repo': ( |
|
107 | 107 | _('[delete] repository'), |
|
108 | 108 | None, 'icon-trash'), |
|
109 | 109 | 'admin_created_repo': ( |
|
110 | 110 | _('[created] repository'), |
|
111 | 111 | None, 'icon-plus icon-plus-colored'), |
|
112 | 112 | 'admin_forked_repo': ( |
|
113 | 113 | _('[forked] repository'), |
|
114 | 114 | None, 'icon-code-fork icon-fork-colored'), |
|
115 | 115 | 'admin_updated_repo': ( |
|
116 | 116 | _('[updated] repository'), |
|
117 | 117 | None, 'icon-pencil icon-pencil-colored'), |
|
118 | 118 | 'admin_created_user': ( |
|
119 | 119 | _('[created] user'), |
|
120 | 120 | self.get_user_name, 'icon-user icon-user-colored'), |
|
121 | 121 | 'admin_updated_user': ( |
|
122 | 122 | _('[updated] user'), |
|
123 | 123 | self.get_user_name, 'icon-user icon-user-colored'), |
|
124 | 124 | 'admin_created_users_group': ( |
|
125 | 125 | _('[created] user group'), |
|
126 | 126 | self.get_users_group, 'icon-pencil icon-pencil-colored'), |
|
127 | 127 | 'admin_updated_users_group': ( |
|
128 | 128 | _('[updated] user group'), |
|
129 | 129 | self.get_users_group, 'icon-pencil icon-pencil-colored'), |
|
130 | 130 | 'user_commented_revision': ( |
|
131 | 131 | _('[commented] on commit in repository'), |
|
132 | 132 | self.get_cs_links, 'icon-comment icon-comment-colored'), |
|
133 | 133 | 'user_commented_pull_request': ( |
|
134 | 134 | _('[commented] on pull request for'), |
|
135 | 135 | self.get_pull_request, 'icon-comment icon-comment-colored'), |
|
136 | 136 | 'user_closed_pull_request': ( |
|
137 | 137 | _('[closed] pull request for'), |
|
138 | 138 | self.get_pull_request, 'icon-check'), |
|
139 | 139 | 'user_merged_pull_request': ( |
|
140 | 140 | _('[merged] pull request for'), |
|
141 | 141 | self.get_pull_request, 'icon-check'), |
|
142 | 142 | 'push': ( |
|
143 | 143 | _('[pushed] into'), |
|
144 | 144 | self.get_cs_links, 'icon-arrow-up'), |
|
145 | 145 | 'push_local': ( |
|
146 | 146 | _('[committed via RhodeCode] into repository'), |
|
147 | 147 | self.get_cs_links, 'icon-pencil icon-pencil-colored'), |
|
148 | 148 | 'push_remote': ( |
|
149 | 149 | _('[pulled from remote] into repository'), |
|
150 | 150 | self.get_cs_links, 'icon-arrow-up'), |
|
151 | 151 | 'pull': ( |
|
152 | 152 | _('[pulled] from'), |
|
153 | 153 | None, 'icon-arrow-down'), |
|
154 | 154 | 'started_following_repo': ( |
|
155 | 155 | _('[started following] repository'), |
|
156 | 156 | None, 'icon-heart icon-heart-colored'), |
|
157 | 157 | 'stopped_following_repo': ( |
|
158 | 158 | _('[stopped following] repository'), |
|
159 | 159 | None, 'icon-heart-empty icon-heart-colored'), |
|
160 | 160 | } |
|
161 | 161 | return action_map |
|
162 | 162 | |
|
163 | 163 | def get_fork_name(self): |
|
164 | from rhodecode.lib import helpers as h | |
|
164 | 165 | repo_name = self.action_params |
|
165 |
_url = |
|
|
166 | _url = h.route_path('repo_summary', repo_name=repo_name) | |
|
166 | 167 | return _('fork name %s') % link_to(self.action_params, _url) |
|
167 | 168 | |
|
168 | 169 | def get_user_name(self): |
|
169 | 170 | user_name = self.action_params |
|
170 | 171 | return user_name |
|
171 | 172 | |
|
172 | 173 | def get_users_group(self): |
|
173 | 174 | group_name = self.action_params |
|
174 | 175 | return group_name |
|
175 | 176 | |
|
176 | 177 | def get_pull_request(self): |
|
177 | 178 | pull_request_id = self.action_params |
|
178 | 179 | if self.is_deleted(): |
|
179 | 180 | repo_name = self.user_log.repository_name |
|
180 | 181 | else: |
|
181 | 182 | repo_name = self.user_log.repository.repo_name |
|
182 | 183 | return link_to( |
|
183 | 184 | _('Pull request #%s') % pull_request_id, |
|
184 | 185 | url('pullrequest_show', repo_name=repo_name, |
|
185 | 186 | pull_request_id=pull_request_id)) |
|
186 | 187 | |
|
187 | 188 | def get_archive_name(self): |
|
188 | 189 | archive_name = self.action_params |
|
189 | 190 | return archive_name |
|
190 | 191 | |
|
191 | 192 | def action_parser_icon(self): |
|
192 | 193 | tmpl = """<i class="%s" alt="%s"></i>""" |
|
193 | 194 | ico = self.action_map.get(self.action, ['', '', ''])[2] |
|
194 | 195 | return literal(tmpl % (ico, self.action)) |
|
195 | 196 | |
|
196 | 197 | def get_cs_links(self): |
|
197 | 198 | if self.is_deleted(): |
|
198 | 199 | return self.action_params |
|
199 | 200 | |
|
200 | 201 | repo_name = self.user_log.repository.repo_name |
|
201 | 202 | commit_ids = self.action_params.split(',') |
|
202 | 203 | commits = self.get_commits(commit_ids) |
|
203 | 204 | |
|
204 | 205 | link_generator = ( |
|
205 | 206 | self.lnk(commit, repo_name) |
|
206 | 207 | for commit in commits[:self.commits_limit]) |
|
207 | 208 | commit_links = [" " + ', '.join(link_generator)] |
|
208 | 209 | _op1, _name1 = _get_op(commit_ids[0]) |
|
209 | 210 | _op2, _name2 = _get_op(commit_ids[-1]) |
|
210 | 211 | |
|
211 | 212 | commit_id_range = '%s...%s' % (_name1, _name2) |
|
212 | 213 | |
|
213 | 214 | compare_view = ( |
|
214 | 215 | ' <div class="compare_view tooltip" title="%s">' |
|
215 | 216 | '<a href="%s">%s</a> </div>' % ( |
|
216 | 217 | _('Show all combined commits %s->%s') % ( |
|
217 | 218 | commit_ids[0][:12], commit_ids[-1][:12] |
|
218 | 219 | ), |
|
219 | 220 | url('changeset_home', repo_name=repo_name, |
|
220 | 221 | revision=commit_id_range), _('compare view') |
|
221 | 222 | ) |
|
222 | 223 | ) |
|
223 | 224 | |
|
224 | 225 | if len(commit_ids) > self.commits_limit: |
|
225 | 226 | more_count = len(commit_ids) - self.commits_limit |
|
226 | 227 | commit_links.append( |
|
227 | 228 | _(' and %(num)s more commits') % {'num': more_count} |
|
228 | 229 | ) |
|
229 | 230 | |
|
230 | 231 | if len(commits) > 1: |
|
231 | 232 | commit_links.append(compare_view) |
|
232 | 233 | return ''.join(commit_links) |
|
233 | 234 | |
|
234 | 235 | def get_commits(self, commit_ids): |
|
235 | 236 | commits = [] |
|
236 | 237 | if not filter(lambda v: v != '', commit_ids): |
|
237 | 238 | return commits |
|
238 | 239 | |
|
239 | 240 | repo = None |
|
240 | 241 | if self.parse_commits: |
|
241 | 242 | repo = self.user_log.repository.scm_instance() |
|
242 | 243 | |
|
243 | 244 | for commit_id in commit_ids[:self.commits_top_limit]: |
|
244 | 245 | _op, _name = _get_op(commit_id) |
|
245 | 246 | |
|
246 | 247 | # we want parsed commits, or new log store format is bad |
|
247 | 248 | if self.parse_commits: |
|
248 | 249 | try: |
|
249 | 250 | commit = repo.get_commit(commit_id=commit_id) |
|
250 | 251 | commits.append(commit) |
|
251 | 252 | except CommitDoesNotExistError: |
|
252 | 253 | log.error( |
|
253 | 254 | 'cannot find commit id %s in this repository', |
|
254 | 255 | commit_id) |
|
255 | 256 | commits.append(commit_id) |
|
256 | 257 | continue |
|
257 | 258 | else: |
|
258 | 259 | fake_commit = AttributeDict({ |
|
259 | 260 | 'short_id': commit_id[:12], |
|
260 | 261 | 'raw_id': commit_id, |
|
261 | 262 | 'message': '', |
|
262 | 263 | 'op': _op, |
|
263 | 264 | 'ref_name': _name |
|
264 | 265 | }) |
|
265 | 266 | commits.append(fake_commit) |
|
266 | 267 | |
|
267 | 268 | return commits |
|
268 | 269 | |
|
269 | 270 | def lnk(self, commit_or_id, repo_name): |
|
270 | 271 | from rhodecode.lib.helpers import tooltip |
|
271 | 272 | |
|
272 | 273 | if isinstance(commit_or_id, (BaseCommit, AttributeDict)): |
|
273 | 274 | lazy_cs = True |
|
274 | 275 | if (getattr(commit_or_id, 'op', None) and |
|
275 | 276 | getattr(commit_or_id, 'ref_name', None)): |
|
276 | 277 | lazy_cs = False |
|
277 | 278 | lbl = '?' |
|
278 | 279 | if commit_or_id.op == 'delete_branch': |
|
279 | 280 | lbl = '%s' % _('Deleted branch: %s') % commit_or_id.ref_name |
|
280 | 281 | title = '' |
|
281 | 282 | elif commit_or_id.op == 'tag': |
|
282 | 283 | lbl = '%s' % _('Created tag: %s') % commit_or_id.ref_name |
|
283 | 284 | title = '' |
|
284 | 285 | _url = '#' |
|
285 | 286 | |
|
286 | 287 | else: |
|
287 | 288 | lbl = '%s' % (commit_or_id.short_id[:8]) |
|
288 | 289 | _url = url('changeset_home', repo_name=repo_name, |
|
289 | 290 | revision=commit_or_id.raw_id) |
|
290 | 291 | title = tooltip(commit_or_id.message) |
|
291 | 292 | else: |
|
292 | 293 | # commit cannot be found/striped/removed etc. |
|
293 | 294 | lbl = ('%s' % commit_or_id)[:12] |
|
294 | 295 | _url = '#' |
|
295 | 296 | title = _('Commit not found') |
|
296 | 297 | if self.parse_commits: |
|
297 | 298 | return link_to(lbl, _url, title=title, class_='tooltip') |
|
298 | 299 | return link_to(lbl, _url, raw_id=commit_or_id.raw_id, repo_name=repo_name, |
|
299 | 300 | class_='lazy-cs' if lazy_cs else '') |
|
300 | 301 | |
|
301 | 302 | def is_deleted(self): |
|
302 | 303 | return self.user_log.repository is None |
|
303 | 304 | |
|
304 | 305 | |
|
305 | 306 | def _no_params_func(): |
|
306 | 307 | return "" |
|
307 | 308 | |
|
308 | 309 | |
|
309 | 310 | def _get_op(commit_id): |
|
310 | 311 | _op = None |
|
311 | 312 | _name = commit_id |
|
312 | 313 | if len(commit_id.split('=>')) == 2: |
|
313 | 314 | _op, _name = commit_id.split('=>') |
|
314 | 315 | return _op, _name |
@@ -1,2007 +1,2007 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | authentication and permission libraries |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os |
|
26 | 26 | import inspect |
|
27 | 27 | import collections |
|
28 | 28 | import fnmatch |
|
29 | 29 | import hashlib |
|
30 | 30 | import itertools |
|
31 | 31 | import logging |
|
32 | 32 | import random |
|
33 | 33 | import traceback |
|
34 | 34 | from functools import wraps |
|
35 | 35 | |
|
36 | 36 | import ipaddress |
|
37 | 37 | from pyramid.httpexceptions import HTTPForbidden, HTTPFound |
|
38 | 38 | from pylons import url, request |
|
39 | 39 | from pylons.controllers.util import abort, redirect |
|
40 | 40 | from pylons.i18n.translation import _ |
|
41 | 41 | from sqlalchemy.orm.exc import ObjectDeletedError |
|
42 | 42 | from sqlalchemy.orm import joinedload |
|
43 | 43 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
44 | 44 | |
|
45 | 45 | import rhodecode |
|
46 | 46 | from rhodecode.model import meta |
|
47 | 47 | from rhodecode.model.meta import Session |
|
48 | 48 | from rhodecode.model.user import UserModel |
|
49 | 49 | from rhodecode.model.db import ( |
|
50 | 50 | User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember, |
|
51 | 51 | UserIpMap, UserApiKeys, RepoGroup) |
|
52 | 52 | from rhodecode.lib import caches |
|
53 | 53 | from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5 |
|
54 | 54 | from rhodecode.lib.utils import ( |
|
55 | 55 | get_repo_slug, get_repo_group_slug, get_user_group_slug) |
|
56 | 56 | from rhodecode.lib.caching_query import FromCache |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | if rhodecode.is_unix: |
|
60 | 60 | import bcrypt |
|
61 | 61 | |
|
62 | 62 | log = logging.getLogger(__name__) |
|
63 | 63 | |
|
64 | 64 | csrf_token_key = "csrf_token" |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | class PasswordGenerator(object): |
|
68 | 68 | """ |
|
69 | 69 | This is a simple class for generating password from different sets of |
|
70 | 70 | characters |
|
71 | 71 | usage:: |
|
72 | 72 | |
|
73 | 73 | passwd_gen = PasswordGenerator() |
|
74 | 74 | #print 8-letter password containing only big and small letters |
|
75 | 75 | of alphabet |
|
76 | 76 | passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL) |
|
77 | 77 | """ |
|
78 | 78 | ALPHABETS_NUM = r'''1234567890''' |
|
79 | 79 | ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm''' |
|
80 | 80 | ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM''' |
|
81 | 81 | ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?''' |
|
82 | 82 | ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \ |
|
83 | 83 | + ALPHABETS_NUM + ALPHABETS_SPECIAL |
|
84 | 84 | ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM |
|
85 | 85 | ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL |
|
86 | 86 | ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM |
|
87 | 87 | ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM |
|
88 | 88 | |
|
89 | 89 | def __init__(self, passwd=''): |
|
90 | 90 | self.passwd = passwd |
|
91 | 91 | |
|
92 | 92 | def gen_password(self, length, type_=None): |
|
93 | 93 | if type_ is None: |
|
94 | 94 | type_ = self.ALPHABETS_FULL |
|
95 | 95 | self.passwd = ''.join([random.choice(type_) for _ in xrange(length)]) |
|
96 | 96 | return self.passwd |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | class _RhodeCodeCryptoBase(object): |
|
100 | 100 | ENC_PREF = None |
|
101 | 101 | |
|
102 | 102 | def hash_create(self, str_): |
|
103 | 103 | """ |
|
104 | 104 | hash the string using |
|
105 | 105 | |
|
106 | 106 | :param str_: password to hash |
|
107 | 107 | """ |
|
108 | 108 | raise NotImplementedError |
|
109 | 109 | |
|
110 | 110 | def hash_check_with_upgrade(self, password, hashed): |
|
111 | 111 | """ |
|
112 | 112 | Returns tuple in which first element is boolean that states that |
|
113 | 113 | given password matches it's hashed version, and the second is new hash |
|
114 | 114 | of the password, in case this password should be migrated to new |
|
115 | 115 | cipher. |
|
116 | 116 | """ |
|
117 | 117 | checked_hash = self.hash_check(password, hashed) |
|
118 | 118 | return checked_hash, None |
|
119 | 119 | |
|
120 | 120 | def hash_check(self, password, hashed): |
|
121 | 121 | """ |
|
122 | 122 | Checks matching password with it's hashed value. |
|
123 | 123 | |
|
124 | 124 | :param password: password |
|
125 | 125 | :param hashed: password in hashed form |
|
126 | 126 | """ |
|
127 | 127 | raise NotImplementedError |
|
128 | 128 | |
|
129 | 129 | def _assert_bytes(self, value): |
|
130 | 130 | """ |
|
131 | 131 | Passing in an `unicode` object can lead to hard to detect issues |
|
132 | 132 | if passwords contain non-ascii characters. Doing a type check |
|
133 | 133 | during runtime, so that such mistakes are detected early on. |
|
134 | 134 | """ |
|
135 | 135 | if not isinstance(value, str): |
|
136 | 136 | raise TypeError( |
|
137 | 137 | "Bytestring required as input, got %r." % (value, )) |
|
138 | 138 | |
|
139 | 139 | |
|
140 | 140 | class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase): |
|
141 | 141 | ENC_PREF = ('$2a$10', '$2b$10') |
|
142 | 142 | |
|
143 | 143 | def hash_create(self, str_): |
|
144 | 144 | self._assert_bytes(str_) |
|
145 | 145 | return bcrypt.hashpw(str_, bcrypt.gensalt(10)) |
|
146 | 146 | |
|
147 | 147 | def hash_check_with_upgrade(self, password, hashed): |
|
148 | 148 | """ |
|
149 | 149 | Returns tuple in which first element is boolean that states that |
|
150 | 150 | given password matches it's hashed version, and the second is new hash |
|
151 | 151 | of the password, in case this password should be migrated to new |
|
152 | 152 | cipher. |
|
153 | 153 | |
|
154 | 154 | This implements special upgrade logic which works like that: |
|
155 | 155 | - check if the given password == bcrypted hash, if yes then we |
|
156 | 156 | properly used password and it was already in bcrypt. Proceed |
|
157 | 157 | without any changes |
|
158 | 158 | - if bcrypt hash check is not working try with sha256. If hash compare |
|
159 | 159 | is ok, it means we using correct but old hashed password. indicate |
|
160 | 160 | hash change and proceed |
|
161 | 161 | """ |
|
162 | 162 | |
|
163 | 163 | new_hash = None |
|
164 | 164 | |
|
165 | 165 | # regular pw check |
|
166 | 166 | password_match_bcrypt = self.hash_check(password, hashed) |
|
167 | 167 | |
|
168 | 168 | # now we want to know if the password was maybe from sha256 |
|
169 | 169 | # basically calling _RhodeCodeCryptoSha256().hash_check() |
|
170 | 170 | if not password_match_bcrypt: |
|
171 | 171 | if _RhodeCodeCryptoSha256().hash_check(password, hashed): |
|
172 | 172 | new_hash = self.hash_create(password) # make new bcrypt hash |
|
173 | 173 | password_match_bcrypt = True |
|
174 | 174 | |
|
175 | 175 | return password_match_bcrypt, new_hash |
|
176 | 176 | |
|
177 | 177 | def hash_check(self, password, hashed): |
|
178 | 178 | """ |
|
179 | 179 | Checks matching password with it's hashed value. |
|
180 | 180 | |
|
181 | 181 | :param password: password |
|
182 | 182 | :param hashed: password in hashed form |
|
183 | 183 | """ |
|
184 | 184 | self._assert_bytes(password) |
|
185 | 185 | try: |
|
186 | 186 | return bcrypt.hashpw(password, hashed) == hashed |
|
187 | 187 | except ValueError as e: |
|
188 | 188 | # we're having a invalid salt here probably, we should not crash |
|
189 | 189 | # just return with False as it would be a wrong password. |
|
190 | 190 | log.debug('Failed to check password hash using bcrypt %s', |
|
191 | 191 | safe_str(e)) |
|
192 | 192 | |
|
193 | 193 | return False |
|
194 | 194 | |
|
195 | 195 | |
|
196 | 196 | class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase): |
|
197 | 197 | ENC_PREF = '_' |
|
198 | 198 | |
|
199 | 199 | def hash_create(self, str_): |
|
200 | 200 | self._assert_bytes(str_) |
|
201 | 201 | return hashlib.sha256(str_).hexdigest() |
|
202 | 202 | |
|
203 | 203 | def hash_check(self, password, hashed): |
|
204 | 204 | """ |
|
205 | 205 | Checks matching password with it's hashed value. |
|
206 | 206 | |
|
207 | 207 | :param password: password |
|
208 | 208 | :param hashed: password in hashed form |
|
209 | 209 | """ |
|
210 | 210 | self._assert_bytes(password) |
|
211 | 211 | return hashlib.sha256(password).hexdigest() == hashed |
|
212 | 212 | |
|
213 | 213 | |
|
214 | 214 | class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase): |
|
215 | 215 | ENC_PREF = '_' |
|
216 | 216 | |
|
217 | 217 | def hash_create(self, str_): |
|
218 | 218 | self._assert_bytes(str_) |
|
219 | 219 | return hashlib.md5(str_).hexdigest() |
|
220 | 220 | |
|
221 | 221 | def hash_check(self, password, hashed): |
|
222 | 222 | """ |
|
223 | 223 | Checks matching password with it's hashed value. |
|
224 | 224 | |
|
225 | 225 | :param password: password |
|
226 | 226 | :param hashed: password in hashed form |
|
227 | 227 | """ |
|
228 | 228 | self._assert_bytes(password) |
|
229 | 229 | return hashlib.md5(password).hexdigest() == hashed |
|
230 | 230 | |
|
231 | 231 | |
|
232 | 232 | def crypto_backend(): |
|
233 | 233 | """ |
|
234 | 234 | Return the matching crypto backend. |
|
235 | 235 | |
|
236 | 236 | Selection is based on if we run tests or not, we pick md5 backend to run |
|
237 | 237 | tests faster since BCRYPT is expensive to calculate |
|
238 | 238 | """ |
|
239 | 239 | if rhodecode.is_test: |
|
240 | 240 | RhodeCodeCrypto = _RhodeCodeCryptoMd5() |
|
241 | 241 | else: |
|
242 | 242 | RhodeCodeCrypto = _RhodeCodeCryptoBCrypt() |
|
243 | 243 | |
|
244 | 244 | return RhodeCodeCrypto |
|
245 | 245 | |
|
246 | 246 | |
|
247 | 247 | def get_crypt_password(password): |
|
248 | 248 | """ |
|
249 | 249 | Create the hash of `password` with the active crypto backend. |
|
250 | 250 | |
|
251 | 251 | :param password: The cleartext password. |
|
252 | 252 | :type password: unicode |
|
253 | 253 | """ |
|
254 | 254 | password = safe_str(password) |
|
255 | 255 | return crypto_backend().hash_create(password) |
|
256 | 256 | |
|
257 | 257 | |
|
258 | 258 | def check_password(password, hashed): |
|
259 | 259 | """ |
|
260 | 260 | Check if the value in `password` matches the hash in `hashed`. |
|
261 | 261 | |
|
262 | 262 | :param password: The cleartext password. |
|
263 | 263 | :type password: unicode |
|
264 | 264 | |
|
265 | 265 | :param hashed: The expected hashed version of the password. |
|
266 | 266 | :type hashed: The hash has to be passed in in text representation. |
|
267 | 267 | """ |
|
268 | 268 | password = safe_str(password) |
|
269 | 269 | return crypto_backend().hash_check(password, hashed) |
|
270 | 270 | |
|
271 | 271 | |
|
272 | 272 | def generate_auth_token(data, salt=None): |
|
273 | 273 | """ |
|
274 | 274 | Generates API KEY from given string |
|
275 | 275 | """ |
|
276 | 276 | |
|
277 | 277 | if salt is None: |
|
278 | 278 | salt = os.urandom(16) |
|
279 | 279 | return hashlib.sha1(safe_str(data) + salt).hexdigest() |
|
280 | 280 | |
|
281 | 281 | |
|
282 | 282 | class CookieStoreWrapper(object): |
|
283 | 283 | |
|
284 | 284 | def __init__(self, cookie_store): |
|
285 | 285 | self.cookie_store = cookie_store |
|
286 | 286 | |
|
287 | 287 | def __repr__(self): |
|
288 | 288 | return 'CookieStore<%s>' % (self.cookie_store) |
|
289 | 289 | |
|
290 | 290 | def get(self, key, other=None): |
|
291 | 291 | if isinstance(self.cookie_store, dict): |
|
292 | 292 | return self.cookie_store.get(key, other) |
|
293 | 293 | elif isinstance(self.cookie_store, AuthUser): |
|
294 | 294 | return self.cookie_store.__dict__.get(key, other) |
|
295 | 295 | |
|
296 | 296 | |
|
297 | 297 | def _cached_perms_data(user_id, scope, user_is_admin, |
|
298 | 298 | user_inherit_default_permissions, explicit, algo): |
|
299 | 299 | |
|
300 | 300 | permissions = PermissionCalculator( |
|
301 | 301 | user_id, scope, user_is_admin, user_inherit_default_permissions, |
|
302 | 302 | explicit, algo) |
|
303 | 303 | return permissions.calculate() |
|
304 | 304 | |
|
305 | 305 | class PermOrigin: |
|
306 | 306 | ADMIN = 'superadmin' |
|
307 | 307 | |
|
308 | 308 | REPO_USER = 'user:%s' |
|
309 | 309 | REPO_USERGROUP = 'usergroup:%s' |
|
310 | 310 | REPO_OWNER = 'repo.owner' |
|
311 | 311 | REPO_DEFAULT = 'repo.default' |
|
312 | 312 | REPO_PRIVATE = 'repo.private' |
|
313 | 313 | |
|
314 | 314 | REPOGROUP_USER = 'user:%s' |
|
315 | 315 | REPOGROUP_USERGROUP = 'usergroup:%s' |
|
316 | 316 | REPOGROUP_OWNER = 'group.owner' |
|
317 | 317 | REPOGROUP_DEFAULT = 'group.default' |
|
318 | 318 | |
|
319 | 319 | USERGROUP_USER = 'user:%s' |
|
320 | 320 | USERGROUP_USERGROUP = 'usergroup:%s' |
|
321 | 321 | USERGROUP_OWNER = 'usergroup.owner' |
|
322 | 322 | USERGROUP_DEFAULT = 'usergroup.default' |
|
323 | 323 | |
|
324 | 324 | |
|
325 | 325 | class PermOriginDict(dict): |
|
326 | 326 | """ |
|
327 | 327 | A special dict used for tracking permissions along with their origins. |
|
328 | 328 | |
|
329 | 329 | `__setitem__` has been overridden to expect a tuple(perm, origin) |
|
330 | 330 | `__getitem__` will return only the perm |
|
331 | 331 | `.perm_origin_stack` will return the stack of (perm, origin) set per key |
|
332 | 332 | |
|
333 | 333 | >>> perms = PermOriginDict() |
|
334 | 334 | >>> perms['resource'] = 'read', 'default' |
|
335 | 335 | >>> perms['resource'] |
|
336 | 336 | 'read' |
|
337 | 337 | >>> perms['resource'] = 'write', 'admin' |
|
338 | 338 | >>> perms['resource'] |
|
339 | 339 | 'write' |
|
340 | 340 | >>> perms.perm_origin_stack |
|
341 | 341 | {'resource': [('read', 'default'), ('write', 'admin')]} |
|
342 | 342 | """ |
|
343 | 343 | |
|
344 | 344 | |
|
345 | 345 | def __init__(self, *args, **kw): |
|
346 | 346 | dict.__init__(self, *args, **kw) |
|
347 | 347 | self.perm_origin_stack = {} |
|
348 | 348 | |
|
349 | 349 | def __setitem__(self, key, (perm, origin)): |
|
350 | 350 | self.perm_origin_stack.setdefault(key, []).append((perm, origin)) |
|
351 | 351 | dict.__setitem__(self, key, perm) |
|
352 | 352 | |
|
353 | 353 | |
|
354 | 354 | class PermissionCalculator(object): |
|
355 | 355 | |
|
356 | 356 | def __init__( |
|
357 | 357 | self, user_id, scope, user_is_admin, |
|
358 | 358 | user_inherit_default_permissions, explicit, algo): |
|
359 | 359 | self.user_id = user_id |
|
360 | 360 | self.user_is_admin = user_is_admin |
|
361 | 361 | self.inherit_default_permissions = user_inherit_default_permissions |
|
362 | 362 | self.explicit = explicit |
|
363 | 363 | self.algo = algo |
|
364 | 364 | |
|
365 | 365 | scope = scope or {} |
|
366 | 366 | self.scope_repo_id = scope.get('repo_id') |
|
367 | 367 | self.scope_repo_group_id = scope.get('repo_group_id') |
|
368 | 368 | self.scope_user_group_id = scope.get('user_group_id') |
|
369 | 369 | |
|
370 | 370 | self.default_user_id = User.get_default_user(cache=True).user_id |
|
371 | 371 | |
|
372 | 372 | self.permissions_repositories = PermOriginDict() |
|
373 | 373 | self.permissions_repository_groups = PermOriginDict() |
|
374 | 374 | self.permissions_user_groups = PermOriginDict() |
|
375 | 375 | self.permissions_global = set() |
|
376 | 376 | |
|
377 | 377 | self.default_repo_perms = Permission.get_default_repo_perms( |
|
378 | 378 | self.default_user_id, self.scope_repo_id) |
|
379 | 379 | self.default_repo_groups_perms = Permission.get_default_group_perms( |
|
380 | 380 | self.default_user_id, self.scope_repo_group_id) |
|
381 | 381 | self.default_user_group_perms = \ |
|
382 | 382 | Permission.get_default_user_group_perms( |
|
383 | 383 | self.default_user_id, self.scope_user_group_id) |
|
384 | 384 | |
|
385 | 385 | def calculate(self): |
|
386 | 386 | if self.user_is_admin: |
|
387 | 387 | return self._admin_permissions() |
|
388 | 388 | |
|
389 | 389 | self._calculate_global_default_permissions() |
|
390 | 390 | self._calculate_global_permissions() |
|
391 | 391 | self._calculate_default_permissions() |
|
392 | 392 | self._calculate_repository_permissions() |
|
393 | 393 | self._calculate_repository_group_permissions() |
|
394 | 394 | self._calculate_user_group_permissions() |
|
395 | 395 | return self._permission_structure() |
|
396 | 396 | |
|
397 | 397 | def _admin_permissions(self): |
|
398 | 398 | """ |
|
399 | 399 | admin user have all default rights for repositories |
|
400 | 400 | and groups set to admin |
|
401 | 401 | """ |
|
402 | 402 | self.permissions_global.add('hg.admin') |
|
403 | 403 | self.permissions_global.add('hg.create.write_on_repogroup.true') |
|
404 | 404 | |
|
405 | 405 | # repositories |
|
406 | 406 | for perm in self.default_repo_perms: |
|
407 | 407 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
408 | 408 | p = 'repository.admin' |
|
409 | 409 | self.permissions_repositories[r_k] = p, PermOrigin.ADMIN |
|
410 | 410 | |
|
411 | 411 | # repository groups |
|
412 | 412 | for perm in self.default_repo_groups_perms: |
|
413 | 413 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
414 | 414 | p = 'group.admin' |
|
415 | 415 | self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN |
|
416 | 416 | |
|
417 | 417 | # user groups |
|
418 | 418 | for perm in self.default_user_group_perms: |
|
419 | 419 | u_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
420 | 420 | p = 'usergroup.admin' |
|
421 | 421 | self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN |
|
422 | 422 | |
|
423 | 423 | return self._permission_structure() |
|
424 | 424 | |
|
425 | 425 | def _calculate_global_default_permissions(self): |
|
426 | 426 | """ |
|
427 | 427 | global permissions taken from the default user |
|
428 | 428 | """ |
|
429 | 429 | default_global_perms = UserToPerm.query()\ |
|
430 | 430 | .filter(UserToPerm.user_id == self.default_user_id)\ |
|
431 | 431 | .options(joinedload(UserToPerm.permission)) |
|
432 | 432 | |
|
433 | 433 | for perm in default_global_perms: |
|
434 | 434 | self.permissions_global.add(perm.permission.permission_name) |
|
435 | 435 | |
|
436 | 436 | def _calculate_global_permissions(self): |
|
437 | 437 | """ |
|
438 | 438 | Set global system permissions with user permissions or permissions |
|
439 | 439 | taken from the user groups of the current user. |
|
440 | 440 | |
|
441 | 441 | The permissions include repo creating, repo group creating, forking |
|
442 | 442 | etc. |
|
443 | 443 | """ |
|
444 | 444 | |
|
445 | 445 | # now we read the defined permissions and overwrite what we have set |
|
446 | 446 | # before those can be configured from groups or users explicitly. |
|
447 | 447 | |
|
448 | 448 | # TODO: johbo: This seems to be out of sync, find out the reason |
|
449 | 449 | # for the comment below and update it. |
|
450 | 450 | |
|
451 | 451 | # In case we want to extend this list we should be always in sync with |
|
452 | 452 | # User.DEFAULT_USER_PERMISSIONS definitions |
|
453 | 453 | _configurable = frozenset([ |
|
454 | 454 | 'hg.fork.none', 'hg.fork.repository', |
|
455 | 455 | 'hg.create.none', 'hg.create.repository', |
|
456 | 456 | 'hg.usergroup.create.false', 'hg.usergroup.create.true', |
|
457 | 457 | 'hg.repogroup.create.false', 'hg.repogroup.create.true', |
|
458 | 458 | 'hg.create.write_on_repogroup.false', |
|
459 | 459 | 'hg.create.write_on_repogroup.true', |
|
460 | 460 | 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true' |
|
461 | 461 | ]) |
|
462 | 462 | |
|
463 | 463 | # USER GROUPS comes first user group global permissions |
|
464 | 464 | user_perms_from_users_groups = Session().query(UserGroupToPerm)\ |
|
465 | 465 | .options(joinedload(UserGroupToPerm.permission))\ |
|
466 | 466 | .join((UserGroupMember, UserGroupToPerm.users_group_id == |
|
467 | 467 | UserGroupMember.users_group_id))\ |
|
468 | 468 | .filter(UserGroupMember.user_id == self.user_id)\ |
|
469 | 469 | .order_by(UserGroupToPerm.users_group_id)\ |
|
470 | 470 | .all() |
|
471 | 471 | |
|
472 | 472 | # need to group here by groups since user can be in more than |
|
473 | 473 | # one group, so we get all groups |
|
474 | 474 | _explicit_grouped_perms = [ |
|
475 | 475 | [x, list(y)] for x, y in |
|
476 | 476 | itertools.groupby(user_perms_from_users_groups, |
|
477 | 477 | lambda _x: _x.users_group)] |
|
478 | 478 | |
|
479 | 479 | for gr, perms in _explicit_grouped_perms: |
|
480 | 480 | # since user can be in multiple groups iterate over them and |
|
481 | 481 | # select the lowest permissions first (more explicit) |
|
482 | 482 | # TODO: marcink: do this^^ |
|
483 | 483 | |
|
484 | 484 | # group doesn't inherit default permissions so we actually set them |
|
485 | 485 | if not gr.inherit_default_permissions: |
|
486 | 486 | # NEED TO IGNORE all previously set configurable permissions |
|
487 | 487 | # and replace them with explicitly set from this user |
|
488 | 488 | # group permissions |
|
489 | 489 | self.permissions_global = self.permissions_global.difference( |
|
490 | 490 | _configurable) |
|
491 | 491 | for perm in perms: |
|
492 | 492 | self.permissions_global.add(perm.permission.permission_name) |
|
493 | 493 | |
|
494 | 494 | # user explicit global permissions |
|
495 | 495 | user_perms = Session().query(UserToPerm)\ |
|
496 | 496 | .options(joinedload(UserToPerm.permission))\ |
|
497 | 497 | .filter(UserToPerm.user_id == self.user_id).all() |
|
498 | 498 | |
|
499 | 499 | if not self.inherit_default_permissions: |
|
500 | 500 | # NEED TO IGNORE all configurable permissions and |
|
501 | 501 | # replace them with explicitly set from this user permissions |
|
502 | 502 | self.permissions_global = self.permissions_global.difference( |
|
503 | 503 | _configurable) |
|
504 | 504 | for perm in user_perms: |
|
505 | 505 | self.permissions_global.add(perm.permission.permission_name) |
|
506 | 506 | |
|
507 | 507 | def _calculate_default_permissions(self): |
|
508 | 508 | """ |
|
509 | 509 | Set default user permissions for repositories, repository groups |
|
510 | 510 | taken from the default user. |
|
511 | 511 | |
|
512 | 512 | Calculate inheritance of object permissions based on what we have now |
|
513 | 513 | in GLOBAL permissions. We check if .false is in GLOBAL since this is |
|
514 | 514 | explicitly set. Inherit is the opposite of .false being there. |
|
515 | 515 | |
|
516 | 516 | .. note:: |
|
517 | 517 | |
|
518 | 518 | the syntax is little bit odd but what we need to check here is |
|
519 | 519 | the opposite of .false permission being in the list so even for |
|
520 | 520 | inconsistent state when both .true/.false is there |
|
521 | 521 | .false is more important |
|
522 | 522 | |
|
523 | 523 | """ |
|
524 | 524 | user_inherit_object_permissions = not ('hg.inherit_default_perms.false' |
|
525 | 525 | in self.permissions_global) |
|
526 | 526 | |
|
527 | 527 | # defaults for repositories, taken from `default` user permissions |
|
528 | 528 | # on given repo |
|
529 | 529 | for perm in self.default_repo_perms: |
|
530 | 530 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
531 | 531 | o = PermOrigin.REPO_DEFAULT |
|
532 | 532 | if perm.Repository.private and not ( |
|
533 | 533 | perm.Repository.user_id == self.user_id): |
|
534 | 534 | # disable defaults for private repos, |
|
535 | 535 | p = 'repository.none' |
|
536 | 536 | o = PermOrigin.REPO_PRIVATE |
|
537 | 537 | elif perm.Repository.user_id == self.user_id: |
|
538 | 538 | # set admin if owner |
|
539 | 539 | p = 'repository.admin' |
|
540 | 540 | o = PermOrigin.REPO_OWNER |
|
541 | 541 | else: |
|
542 | 542 | p = perm.Permission.permission_name |
|
543 | 543 | # if we decide this user isn't inheriting permissions from |
|
544 | 544 | # default user we set him to .none so only explicit |
|
545 | 545 | # permissions work |
|
546 | 546 | if not user_inherit_object_permissions: |
|
547 | 547 | p = 'repository.none' |
|
548 | 548 | self.permissions_repositories[r_k] = p, o |
|
549 | 549 | |
|
550 | 550 | # defaults for repository groups taken from `default` user permission |
|
551 | 551 | # on given group |
|
552 | 552 | for perm in self.default_repo_groups_perms: |
|
553 | 553 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
554 | 554 | o = PermOrigin.REPOGROUP_DEFAULT |
|
555 | 555 | if perm.RepoGroup.user_id == self.user_id: |
|
556 | 556 | # set admin if owner |
|
557 | 557 | p = 'group.admin' |
|
558 | 558 | o = PermOrigin.REPOGROUP_OWNER |
|
559 | 559 | else: |
|
560 | 560 | p = perm.Permission.permission_name |
|
561 | 561 | |
|
562 | 562 | # if we decide this user isn't inheriting permissions from default |
|
563 | 563 | # user we set him to .none so only explicit permissions work |
|
564 | 564 | if not user_inherit_object_permissions: |
|
565 | 565 | p = 'group.none' |
|
566 | 566 | self.permissions_repository_groups[rg_k] = p, o |
|
567 | 567 | |
|
568 | 568 | # defaults for user groups taken from `default` user permission |
|
569 | 569 | # on given user group |
|
570 | 570 | for perm in self.default_user_group_perms: |
|
571 | 571 | u_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
572 | 572 | o = PermOrigin.USERGROUP_DEFAULT |
|
573 | 573 | if perm.UserGroup.user_id == self.user_id: |
|
574 | 574 | # set admin if owner |
|
575 | 575 | p = 'usergroup.admin' |
|
576 | 576 | o = PermOrigin.USERGROUP_OWNER |
|
577 | 577 | else: |
|
578 | 578 | p = perm.Permission.permission_name |
|
579 | 579 | |
|
580 | 580 | # if we decide this user isn't inheriting permissions from default |
|
581 | 581 | # user we set him to .none so only explicit permissions work |
|
582 | 582 | if not user_inherit_object_permissions: |
|
583 | 583 | p = 'usergroup.none' |
|
584 | 584 | self.permissions_user_groups[u_k] = p, o |
|
585 | 585 | |
|
586 | 586 | def _calculate_repository_permissions(self): |
|
587 | 587 | """ |
|
588 | 588 | Repository permissions for the current user. |
|
589 | 589 | |
|
590 | 590 | Check if the user is part of user groups for this repository and |
|
591 | 591 | fill in the permission from it. `_choose_permission` decides of which |
|
592 | 592 | permission should be selected based on selected method. |
|
593 | 593 | """ |
|
594 | 594 | |
|
595 | 595 | # user group for repositories permissions |
|
596 | 596 | user_repo_perms_from_user_group = Permission\ |
|
597 | 597 | .get_default_repo_perms_from_user_group( |
|
598 | 598 | self.user_id, self.scope_repo_id) |
|
599 | 599 | |
|
600 | 600 | multiple_counter = collections.defaultdict(int) |
|
601 | 601 | for perm in user_repo_perms_from_user_group: |
|
602 | 602 | r_k = perm.UserGroupRepoToPerm.repository.repo_name |
|
603 | 603 | ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name |
|
604 | 604 | multiple_counter[r_k] += 1 |
|
605 | 605 | p = perm.Permission.permission_name |
|
606 | 606 | o = PermOrigin.REPO_USERGROUP % ug_k |
|
607 | 607 | |
|
608 | 608 | if perm.Repository.user_id == self.user_id: |
|
609 | 609 | # set admin if owner |
|
610 | 610 | p = 'repository.admin' |
|
611 | 611 | o = PermOrigin.REPO_OWNER |
|
612 | 612 | else: |
|
613 | 613 | if multiple_counter[r_k] > 1: |
|
614 | 614 | cur_perm = self.permissions_repositories[r_k] |
|
615 | 615 | p = self._choose_permission(p, cur_perm) |
|
616 | 616 | self.permissions_repositories[r_k] = p, o |
|
617 | 617 | |
|
618 | 618 | # user explicit permissions for repositories, overrides any specified |
|
619 | 619 | # by the group permission |
|
620 | 620 | user_repo_perms = Permission.get_default_repo_perms( |
|
621 | 621 | self.user_id, self.scope_repo_id) |
|
622 | 622 | for perm in user_repo_perms: |
|
623 | 623 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
624 | 624 | o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username |
|
625 | 625 | # set admin if owner |
|
626 | 626 | if perm.Repository.user_id == self.user_id: |
|
627 | 627 | p = 'repository.admin' |
|
628 | 628 | o = PermOrigin.REPO_OWNER |
|
629 | 629 | else: |
|
630 | 630 | p = perm.Permission.permission_name |
|
631 | 631 | if not self.explicit: |
|
632 | 632 | cur_perm = self.permissions_repositories.get( |
|
633 | 633 | r_k, 'repository.none') |
|
634 | 634 | p = self._choose_permission(p, cur_perm) |
|
635 | 635 | self.permissions_repositories[r_k] = p, o |
|
636 | 636 | |
|
637 | 637 | def _calculate_repository_group_permissions(self): |
|
638 | 638 | """ |
|
639 | 639 | Repository group permissions for the current user. |
|
640 | 640 | |
|
641 | 641 | Check if the user is part of user groups for repository groups and |
|
642 | 642 | fill in the permissions from it. `_choose_permmission` decides of which |
|
643 | 643 | permission should be selected based on selected method. |
|
644 | 644 | """ |
|
645 | 645 | # user group for repo groups permissions |
|
646 | 646 | user_repo_group_perms_from_user_group = Permission\ |
|
647 | 647 | .get_default_group_perms_from_user_group( |
|
648 | 648 | self.user_id, self.scope_repo_group_id) |
|
649 | 649 | |
|
650 | 650 | multiple_counter = collections.defaultdict(int) |
|
651 | 651 | for perm in user_repo_group_perms_from_user_group: |
|
652 | 652 | g_k = perm.UserGroupRepoGroupToPerm.group.group_name |
|
653 | 653 | ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name |
|
654 | 654 | o = PermOrigin.REPOGROUP_USERGROUP % ug_k |
|
655 | 655 | multiple_counter[g_k] += 1 |
|
656 | 656 | p = perm.Permission.permission_name |
|
657 | 657 | if perm.RepoGroup.user_id == self.user_id: |
|
658 | 658 | # set admin if owner, even for member of other user group |
|
659 | 659 | p = 'group.admin' |
|
660 | 660 | o = PermOrigin.REPOGROUP_OWNER |
|
661 | 661 | else: |
|
662 | 662 | if multiple_counter[g_k] > 1: |
|
663 | 663 | cur_perm = self.permissions_repository_groups[g_k] |
|
664 | 664 | p = self._choose_permission(p, cur_perm) |
|
665 | 665 | self.permissions_repository_groups[g_k] = p, o |
|
666 | 666 | |
|
667 | 667 | # user explicit permissions for repository groups |
|
668 | 668 | user_repo_groups_perms = Permission.get_default_group_perms( |
|
669 | 669 | self.user_id, self.scope_repo_group_id) |
|
670 | 670 | for perm in user_repo_groups_perms: |
|
671 | 671 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
672 | 672 | u_k = perm.UserRepoGroupToPerm.user.username |
|
673 | 673 | o = PermOrigin.REPOGROUP_USER % u_k |
|
674 | 674 | |
|
675 | 675 | if perm.RepoGroup.user_id == self.user_id: |
|
676 | 676 | # set admin if owner |
|
677 | 677 | p = 'group.admin' |
|
678 | 678 | o = PermOrigin.REPOGROUP_OWNER |
|
679 | 679 | else: |
|
680 | 680 | p = perm.Permission.permission_name |
|
681 | 681 | if not self.explicit: |
|
682 | 682 | cur_perm = self.permissions_repository_groups.get( |
|
683 | 683 | rg_k, 'group.none') |
|
684 | 684 | p = self._choose_permission(p, cur_perm) |
|
685 | 685 | self.permissions_repository_groups[rg_k] = p, o |
|
686 | 686 | |
|
687 | 687 | def _calculate_user_group_permissions(self): |
|
688 | 688 | """ |
|
689 | 689 | User group permissions for the current user. |
|
690 | 690 | """ |
|
691 | 691 | # user group for user group permissions |
|
692 | 692 | user_group_from_user_group = Permission\ |
|
693 | 693 | .get_default_user_group_perms_from_user_group( |
|
694 | 694 | self.user_id, self.scope_user_group_id) |
|
695 | 695 | |
|
696 | 696 | multiple_counter = collections.defaultdict(int) |
|
697 | 697 | for perm in user_group_from_user_group: |
|
698 | 698 | g_k = perm.UserGroupUserGroupToPerm\ |
|
699 | 699 | .target_user_group.users_group_name |
|
700 | 700 | u_k = perm.UserGroupUserGroupToPerm\ |
|
701 | 701 | .user_group.users_group_name |
|
702 | 702 | o = PermOrigin.USERGROUP_USERGROUP % u_k |
|
703 | 703 | multiple_counter[g_k] += 1 |
|
704 | 704 | p = perm.Permission.permission_name |
|
705 | 705 | |
|
706 | 706 | if perm.UserGroup.user_id == self.user_id: |
|
707 | 707 | # set admin if owner, even for member of other user group |
|
708 | 708 | p = 'usergroup.admin' |
|
709 | 709 | o = PermOrigin.USERGROUP_OWNER |
|
710 | 710 | else: |
|
711 | 711 | if multiple_counter[g_k] > 1: |
|
712 | 712 | cur_perm = self.permissions_user_groups[g_k] |
|
713 | 713 | p = self._choose_permission(p, cur_perm) |
|
714 | 714 | self.permissions_user_groups[g_k] = p, o |
|
715 | 715 | |
|
716 | 716 | # user explicit permission for user groups |
|
717 | 717 | user_user_groups_perms = Permission.get_default_user_group_perms( |
|
718 | 718 | self.user_id, self.scope_user_group_id) |
|
719 | 719 | for perm in user_user_groups_perms: |
|
720 | 720 | ug_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
721 | 721 | u_k = perm.UserUserGroupToPerm.user.username |
|
722 | 722 | o = PermOrigin.USERGROUP_USER % u_k |
|
723 | 723 | |
|
724 | 724 | if perm.UserGroup.user_id == self.user_id: |
|
725 | 725 | # set admin if owner |
|
726 | 726 | p = 'usergroup.admin' |
|
727 | 727 | o = PermOrigin.USERGROUP_OWNER |
|
728 | 728 | else: |
|
729 | 729 | p = perm.Permission.permission_name |
|
730 | 730 | if not self.explicit: |
|
731 | 731 | cur_perm = self.permissions_user_groups.get( |
|
732 | 732 | ug_k, 'usergroup.none') |
|
733 | 733 | p = self._choose_permission(p, cur_perm) |
|
734 | 734 | self.permissions_user_groups[ug_k] = p, o |
|
735 | 735 | |
|
736 | 736 | def _choose_permission(self, new_perm, cur_perm): |
|
737 | 737 | new_perm_val = Permission.PERM_WEIGHTS[new_perm] |
|
738 | 738 | cur_perm_val = Permission.PERM_WEIGHTS[cur_perm] |
|
739 | 739 | if self.algo == 'higherwin': |
|
740 | 740 | if new_perm_val > cur_perm_val: |
|
741 | 741 | return new_perm |
|
742 | 742 | return cur_perm |
|
743 | 743 | elif self.algo == 'lowerwin': |
|
744 | 744 | if new_perm_val < cur_perm_val: |
|
745 | 745 | return new_perm |
|
746 | 746 | return cur_perm |
|
747 | 747 | |
|
748 | 748 | def _permission_structure(self): |
|
749 | 749 | return { |
|
750 | 750 | 'global': self.permissions_global, |
|
751 | 751 | 'repositories': self.permissions_repositories, |
|
752 | 752 | 'repositories_groups': self.permissions_repository_groups, |
|
753 | 753 | 'user_groups': self.permissions_user_groups, |
|
754 | 754 | } |
|
755 | 755 | |
|
756 | 756 | |
|
757 | 757 | def allowed_auth_token_access(controller_name, whitelist=None, auth_token=None): |
|
758 | 758 | """ |
|
759 | 759 | Check if given controller_name is in whitelist of auth token access |
|
760 | 760 | """ |
|
761 | 761 | if not whitelist: |
|
762 | 762 | from rhodecode import CONFIG |
|
763 | 763 | whitelist = aslist( |
|
764 | 764 | CONFIG.get('api_access_controllers_whitelist'), sep=',') |
|
765 | 765 | log.debug( |
|
766 | 766 | 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,)) |
|
767 | 767 | |
|
768 | 768 | auth_token_access_valid = False |
|
769 | 769 | for entry in whitelist: |
|
770 | 770 | if fnmatch.fnmatch(controller_name, entry): |
|
771 | 771 | auth_token_access_valid = True |
|
772 | 772 | break |
|
773 | 773 | |
|
774 | 774 | if auth_token_access_valid: |
|
775 | 775 | log.debug('controller:%s matches entry in whitelist' |
|
776 | 776 | % (controller_name,)) |
|
777 | 777 | else: |
|
778 | 778 | msg = ('controller: %s does *NOT* match any entry in whitelist' |
|
779 | 779 | % (controller_name,)) |
|
780 | 780 | if auth_token: |
|
781 | 781 | # if we use auth token key and don't have access it's a warning |
|
782 | 782 | log.warning(msg) |
|
783 | 783 | else: |
|
784 | 784 | log.debug(msg) |
|
785 | 785 | |
|
786 | 786 | return auth_token_access_valid |
|
787 | 787 | |
|
788 | 788 | |
|
789 | 789 | class AuthUser(object): |
|
790 | 790 | """ |
|
791 | 791 | A simple object that handles all attributes of user in RhodeCode |
|
792 | 792 | |
|
793 | 793 | It does lookup based on API key,given user, or user present in session |
|
794 | 794 | Then it fills all required information for such user. It also checks if |
|
795 | 795 | anonymous access is enabled and if so, it returns default user as logged in |
|
796 | 796 | """ |
|
797 | 797 | GLOBAL_PERMS = [x[0] for x in Permission.PERMS] |
|
798 | 798 | |
|
799 | 799 | def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None): |
|
800 | 800 | |
|
801 | 801 | self.user_id = user_id |
|
802 | 802 | self._api_key = api_key |
|
803 | 803 | |
|
804 | 804 | self.api_key = None |
|
805 | 805 | self.feed_token = '' |
|
806 | 806 | self.username = username |
|
807 | 807 | self.ip_addr = ip_addr |
|
808 | 808 | self.name = '' |
|
809 | 809 | self.lastname = '' |
|
810 | 810 | self.email = '' |
|
811 | 811 | self.is_authenticated = False |
|
812 | 812 | self.admin = False |
|
813 | 813 | self.inherit_default_permissions = False |
|
814 | 814 | self.password = '' |
|
815 | 815 | |
|
816 | 816 | self.anonymous_user = None # propagated on propagate_data |
|
817 | 817 | self.propagate_data() |
|
818 | 818 | self._instance = None |
|
819 | 819 | self._permissions_scoped_cache = {} # used to bind scoped calculation |
|
820 | 820 | |
|
821 | 821 | @LazyProperty |
|
822 | 822 | def permissions(self): |
|
823 | 823 | return self.get_perms(user=self, cache=False) |
|
824 | 824 | |
|
825 | 825 | def permissions_with_scope(self, scope): |
|
826 | 826 | """ |
|
827 | 827 | Call the get_perms function with scoped data. The scope in that function |
|
828 | 828 | narrows the SQL calls to the given ID of objects resulting in fetching |
|
829 | 829 | Just particular permission we want to obtain. If scope is an empty dict |
|
830 | 830 | then it basically narrows the scope to GLOBAL permissions only. |
|
831 | 831 | |
|
832 | 832 | :param scope: dict |
|
833 | 833 | """ |
|
834 | 834 | if 'repo_name' in scope: |
|
835 | 835 | obj = Repository.get_by_repo_name(scope['repo_name']) |
|
836 | 836 | if obj: |
|
837 | 837 | scope['repo_id'] = obj.repo_id |
|
838 | 838 | _scope = { |
|
839 | 839 | 'repo_id': -1, |
|
840 | 840 | 'user_group_id': -1, |
|
841 | 841 | 'repo_group_id': -1, |
|
842 | 842 | } |
|
843 | 843 | _scope.update(scope) |
|
844 | 844 | cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b, |
|
845 | 845 | _scope.items()))) |
|
846 | 846 | if cache_key not in self._permissions_scoped_cache: |
|
847 | 847 | # store in cache to mimic how the @LazyProperty works, |
|
848 | 848 | # the difference here is that we use the unique key calculated |
|
849 | 849 | # from params and values |
|
850 | 850 | res = self.get_perms(user=self, cache=False, scope=_scope) |
|
851 | 851 | self._permissions_scoped_cache[cache_key] = res |
|
852 | 852 | return self._permissions_scoped_cache[cache_key] |
|
853 | 853 | |
|
854 | 854 | def get_instance(self): |
|
855 | 855 | return User.get(self.user_id) |
|
856 | 856 | |
|
857 | 857 | def update_lastactivity(self): |
|
858 | 858 | if self.user_id: |
|
859 | 859 | User.get(self.user_id).update_lastactivity() |
|
860 | 860 | |
|
861 | 861 | def propagate_data(self): |
|
862 | 862 | """ |
|
863 | 863 | Fills in user data and propagates values to this instance. Maps fetched |
|
864 | 864 | user attributes to this class instance attributes |
|
865 | 865 | """ |
|
866 | 866 | log.debug('starting data propagation for new potential AuthUser') |
|
867 | 867 | user_model = UserModel() |
|
868 | 868 | anon_user = self.anonymous_user = User.get_default_user(cache=True) |
|
869 | 869 | is_user_loaded = False |
|
870 | 870 | |
|
871 | 871 | # lookup by userid |
|
872 | 872 | if self.user_id is not None and self.user_id != anon_user.user_id: |
|
873 | 873 | log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id) |
|
874 | 874 | is_user_loaded = user_model.fill_data(self, user_id=self.user_id) |
|
875 | 875 | |
|
876 | 876 | # try go get user by api key |
|
877 | 877 | elif self._api_key and self._api_key != anon_user.api_key: |
|
878 | 878 | log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key) |
|
879 | 879 | is_user_loaded = user_model.fill_data(self, api_key=self._api_key) |
|
880 | 880 | |
|
881 | 881 | # lookup by username |
|
882 | 882 | elif self.username: |
|
883 | 883 | log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username) |
|
884 | 884 | is_user_loaded = user_model.fill_data(self, username=self.username) |
|
885 | 885 | else: |
|
886 | 886 | log.debug('No data in %s that could been used to log in' % self) |
|
887 | 887 | |
|
888 | 888 | if not is_user_loaded: |
|
889 | 889 | log.debug('Failed to load user. Fallback to default user') |
|
890 | 890 | # if we cannot authenticate user try anonymous |
|
891 | 891 | if anon_user.active: |
|
892 | 892 | user_model.fill_data(self, user_id=anon_user.user_id) |
|
893 | 893 | # then we set this user is logged in |
|
894 | 894 | self.is_authenticated = True |
|
895 | 895 | else: |
|
896 | 896 | # in case of disabled anonymous user we reset some of the |
|
897 | 897 | # parameters so such user is "corrupted", skipping the fill_data |
|
898 | 898 | for attr in ['user_id', 'username', 'admin', 'active']: |
|
899 | 899 | setattr(self, attr, None) |
|
900 | 900 | self.is_authenticated = False |
|
901 | 901 | |
|
902 | 902 | if not self.username: |
|
903 | 903 | self.username = 'None' |
|
904 | 904 | |
|
905 | 905 | log.debug('Auth User is now %s' % self) |
|
906 | 906 | |
|
907 | 907 | def get_perms(self, user, scope=None, explicit=True, algo='higherwin', |
|
908 | 908 | cache=False): |
|
909 | 909 | """ |
|
910 | 910 | Fills user permission attribute with permissions taken from database |
|
911 | 911 | works for permissions given for repositories, and for permissions that |
|
912 | 912 | are granted to groups |
|
913 | 913 | |
|
914 | 914 | :param user: instance of User object from database |
|
915 | 915 | :param explicit: In case there are permissions both for user and a group |
|
916 | 916 | that user is part of, explicit flag will defiine if user will |
|
917 | 917 | explicitly override permissions from group, if it's False it will |
|
918 | 918 | make decision based on the algo |
|
919 | 919 | :param algo: algorithm to decide what permission should be choose if |
|
920 | 920 | it's multiple defined, eg user in two different groups. It also |
|
921 | 921 | decides if explicit flag is turned off how to specify the permission |
|
922 | 922 | for case when user is in a group + have defined separate permission |
|
923 | 923 | """ |
|
924 | 924 | user_id = user.user_id |
|
925 | 925 | user_is_admin = user.is_admin |
|
926 | 926 | |
|
927 | 927 | # inheritance of global permissions like create repo/fork repo etc |
|
928 | 928 | user_inherit_default_permissions = user.inherit_default_permissions |
|
929 | 929 | |
|
930 | 930 | log.debug('Computing PERMISSION tree for scope %s' % (scope, )) |
|
931 | 931 | compute = caches.conditional_cache( |
|
932 | 932 | 'short_term', 'cache_desc', |
|
933 | 933 | condition=cache, func=_cached_perms_data) |
|
934 | 934 | result = compute(user_id, scope, user_is_admin, |
|
935 | 935 | user_inherit_default_permissions, explicit, algo) |
|
936 | 936 | |
|
937 | 937 | result_repr = [] |
|
938 | 938 | for k in result: |
|
939 | 939 | result_repr.append((k, len(result[k]))) |
|
940 | 940 | |
|
941 | 941 | log.debug('PERMISSION tree computed %s' % (result_repr,)) |
|
942 | 942 | return result |
|
943 | 943 | |
|
944 | 944 | @property |
|
945 | 945 | def is_default(self): |
|
946 | 946 | return self.username == User.DEFAULT_USER |
|
947 | 947 | |
|
948 | 948 | @property |
|
949 | 949 | def is_admin(self): |
|
950 | 950 | return self.admin |
|
951 | 951 | |
|
952 | 952 | @property |
|
953 | 953 | def is_user_object(self): |
|
954 | 954 | return self.user_id is not None |
|
955 | 955 | |
|
956 | 956 | @property |
|
957 | 957 | def repositories_admin(self): |
|
958 | 958 | """ |
|
959 | 959 | Returns list of repositories you're an admin of |
|
960 | 960 | """ |
|
961 | 961 | return [ |
|
962 | 962 | x[0] for x in self.permissions['repositories'].iteritems() |
|
963 | 963 | if x[1] == 'repository.admin'] |
|
964 | 964 | |
|
965 | 965 | @property |
|
966 | 966 | def repository_groups_admin(self): |
|
967 | 967 | """ |
|
968 | 968 | Returns list of repository groups you're an admin of |
|
969 | 969 | """ |
|
970 | 970 | return [ |
|
971 | 971 | x[0] for x in self.permissions['repositories_groups'].iteritems() |
|
972 | 972 | if x[1] == 'group.admin'] |
|
973 | 973 | |
|
974 | 974 | @property |
|
975 | 975 | def user_groups_admin(self): |
|
976 | 976 | """ |
|
977 | 977 | Returns list of user groups you're an admin of |
|
978 | 978 | """ |
|
979 | 979 | return [ |
|
980 | 980 | x[0] for x in self.permissions['user_groups'].iteritems() |
|
981 | 981 | if x[1] == 'usergroup.admin'] |
|
982 | 982 | |
|
983 | 983 | @property |
|
984 | 984 | def ip_allowed(self): |
|
985 | 985 | """ |
|
986 | 986 | Checks if ip_addr used in constructor is allowed from defined list of |
|
987 | 987 | allowed ip_addresses for user |
|
988 | 988 | |
|
989 | 989 | :returns: boolean, True if ip is in allowed ip range |
|
990 | 990 | """ |
|
991 | 991 | # check IP |
|
992 | 992 | inherit = self.inherit_default_permissions |
|
993 | 993 | return AuthUser.check_ip_allowed(self.user_id, self.ip_addr, |
|
994 | 994 | inherit_from_default=inherit) |
|
995 | 995 | @property |
|
996 | 996 | def personal_repo_group(self): |
|
997 | 997 | return RepoGroup.get_user_personal_repo_group(self.user_id) |
|
998 | 998 | |
|
999 | 999 | @classmethod |
|
1000 | 1000 | def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default): |
|
1001 | 1001 | allowed_ips = AuthUser.get_allowed_ips( |
|
1002 | 1002 | user_id, cache=True, inherit_from_default=inherit_from_default) |
|
1003 | 1003 | if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips): |
|
1004 | 1004 | log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips)) |
|
1005 | 1005 | return True |
|
1006 | 1006 | else: |
|
1007 | 1007 | log.info('Access for IP:%s forbidden, ' |
|
1008 | 1008 | 'not in %s' % (ip_addr, allowed_ips)) |
|
1009 | 1009 | return False |
|
1010 | 1010 | |
|
1011 | 1011 | def __repr__(self): |
|
1012 | 1012 | return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\ |
|
1013 | 1013 | % (self.user_id, self.username, self.ip_addr, self.is_authenticated) |
|
1014 | 1014 | |
|
1015 | 1015 | def set_authenticated(self, authenticated=True): |
|
1016 | 1016 | if self.user_id != self.anonymous_user.user_id: |
|
1017 | 1017 | self.is_authenticated = authenticated |
|
1018 | 1018 | |
|
1019 | 1019 | def get_cookie_store(self): |
|
1020 | 1020 | return { |
|
1021 | 1021 | 'username': self.username, |
|
1022 | 1022 | 'password': md5(self.password), |
|
1023 | 1023 | 'user_id': self.user_id, |
|
1024 | 1024 | 'is_authenticated': self.is_authenticated |
|
1025 | 1025 | } |
|
1026 | 1026 | |
|
1027 | 1027 | @classmethod |
|
1028 | 1028 | def from_cookie_store(cls, cookie_store): |
|
1029 | 1029 | """ |
|
1030 | 1030 | Creates AuthUser from a cookie store |
|
1031 | 1031 | |
|
1032 | 1032 | :param cls: |
|
1033 | 1033 | :param cookie_store: |
|
1034 | 1034 | """ |
|
1035 | 1035 | user_id = cookie_store.get('user_id') |
|
1036 | 1036 | username = cookie_store.get('username') |
|
1037 | 1037 | api_key = cookie_store.get('api_key') |
|
1038 | 1038 | return AuthUser(user_id, api_key, username) |
|
1039 | 1039 | |
|
1040 | 1040 | @classmethod |
|
1041 | 1041 | def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False): |
|
1042 | 1042 | _set = set() |
|
1043 | 1043 | |
|
1044 | 1044 | if inherit_from_default: |
|
1045 | 1045 | default_ips = UserIpMap.query().filter( |
|
1046 | 1046 | UserIpMap.user == User.get_default_user(cache=True)) |
|
1047 | 1047 | if cache: |
|
1048 | 1048 | default_ips = default_ips.options( |
|
1049 | 1049 | FromCache("sql_cache_short", "get_user_ips_default")) |
|
1050 | 1050 | |
|
1051 | 1051 | # populate from default user |
|
1052 | 1052 | for ip in default_ips: |
|
1053 | 1053 | try: |
|
1054 | 1054 | _set.add(ip.ip_addr) |
|
1055 | 1055 | except ObjectDeletedError: |
|
1056 | 1056 | # since we use heavy caching sometimes it happens that |
|
1057 | 1057 | # we get deleted objects here, we just skip them |
|
1058 | 1058 | pass |
|
1059 | 1059 | |
|
1060 | 1060 | user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id) |
|
1061 | 1061 | if cache: |
|
1062 | 1062 | user_ips = user_ips.options( |
|
1063 | 1063 | FromCache("sql_cache_short", "get_user_ips_%s" % user_id)) |
|
1064 | 1064 | |
|
1065 | 1065 | for ip in user_ips: |
|
1066 | 1066 | try: |
|
1067 | 1067 | _set.add(ip.ip_addr) |
|
1068 | 1068 | except ObjectDeletedError: |
|
1069 | 1069 | # since we use heavy caching sometimes it happens that we get |
|
1070 | 1070 | # deleted objects here, we just skip them |
|
1071 | 1071 | pass |
|
1072 | 1072 | return _set or set(['0.0.0.0/0', '::/0']) |
|
1073 | 1073 | |
|
1074 | 1074 | |
|
1075 | 1075 | def set_available_permissions(config): |
|
1076 | 1076 | """ |
|
1077 | 1077 | This function will propagate pylons globals with all available defined |
|
1078 | 1078 | permission given in db. We don't want to check each time from db for new |
|
1079 | 1079 | permissions since adding a new permission also requires application restart |
|
1080 | 1080 | ie. to decorate new views with the newly created permission |
|
1081 | 1081 | |
|
1082 | 1082 | :param config: current pylons config instance |
|
1083 | 1083 | |
|
1084 | 1084 | """ |
|
1085 | 1085 | log.info('getting information about all available permissions') |
|
1086 | 1086 | try: |
|
1087 | 1087 | sa = meta.Session |
|
1088 | 1088 | all_perms = sa.query(Permission).all() |
|
1089 | 1089 | config['available_permissions'] = [x.permission_name for x in all_perms] |
|
1090 | 1090 | except Exception: |
|
1091 | 1091 | log.error(traceback.format_exc()) |
|
1092 | 1092 | finally: |
|
1093 | 1093 | meta.Session.remove() |
|
1094 | 1094 | |
|
1095 | 1095 | |
|
1096 | 1096 | def get_csrf_token(session=None, force_new=False, save_if_missing=True): |
|
1097 | 1097 | """ |
|
1098 | 1098 | Return the current authentication token, creating one if one doesn't |
|
1099 | 1099 | already exist and the save_if_missing flag is present. |
|
1100 | 1100 | |
|
1101 | 1101 | :param session: pass in the pylons session, else we use the global ones |
|
1102 | 1102 | :param force_new: force to re-generate the token and store it in session |
|
1103 | 1103 | :param save_if_missing: save the newly generated token if it's missing in |
|
1104 | 1104 | session |
|
1105 | 1105 | """ |
|
1106 | 1106 | if not session: |
|
1107 | 1107 | from pylons import session |
|
1108 | 1108 | |
|
1109 | 1109 | if (csrf_token_key not in session and save_if_missing) or force_new: |
|
1110 | 1110 | token = hashlib.sha1(str(random.getrandbits(128))).hexdigest() |
|
1111 | 1111 | session[csrf_token_key] = token |
|
1112 | 1112 | if hasattr(session, 'save'): |
|
1113 | 1113 | session.save() |
|
1114 | 1114 | return session.get(csrf_token_key) |
|
1115 | 1115 | |
|
1116 | 1116 | |
|
1117 | 1117 | # CHECK DECORATORS |
|
1118 | 1118 | class CSRFRequired(object): |
|
1119 | 1119 | """ |
|
1120 | 1120 | Decorator for authenticating a form |
|
1121 | 1121 | |
|
1122 | 1122 | This decorator uses an authorization token stored in the client's |
|
1123 | 1123 | session for prevention of certain Cross-site request forgery (CSRF) |
|
1124 | 1124 | attacks (See |
|
1125 | 1125 | http://en.wikipedia.org/wiki/Cross-site_request_forgery for more |
|
1126 | 1126 | information). |
|
1127 | 1127 | |
|
1128 | 1128 | For use with the ``webhelpers.secure_form`` helper functions. |
|
1129 | 1129 | |
|
1130 | 1130 | """ |
|
1131 | 1131 | def __init__(self, token=csrf_token_key, header='X-CSRF-Token', |
|
1132 | 1132 | except_methods=None): |
|
1133 | 1133 | self.token = token |
|
1134 | 1134 | self.header = header |
|
1135 | 1135 | self.except_methods = except_methods or [] |
|
1136 | 1136 | |
|
1137 | 1137 | def __call__(self, func): |
|
1138 | 1138 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1139 | 1139 | |
|
1140 | 1140 | def _get_csrf(self, _request): |
|
1141 | 1141 | return _request.POST.get(self.token, _request.headers.get(self.header)) |
|
1142 | 1142 | |
|
1143 | 1143 | def check_csrf(self, _request, cur_token): |
|
1144 | 1144 | supplied_token = self._get_csrf(_request) |
|
1145 | 1145 | return supplied_token and supplied_token == cur_token |
|
1146 | 1146 | |
|
1147 | 1147 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1148 | 1148 | if request.method in self.except_methods: |
|
1149 | 1149 | return func(*fargs, **fkwargs) |
|
1150 | 1150 | |
|
1151 | 1151 | cur_token = get_csrf_token(save_if_missing=False) |
|
1152 | 1152 | if self.check_csrf(request, cur_token): |
|
1153 | 1153 | if request.POST.get(self.token): |
|
1154 | 1154 | del request.POST[self.token] |
|
1155 | 1155 | return func(*fargs, **fkwargs) |
|
1156 | 1156 | else: |
|
1157 | 1157 | reason = 'token-missing' |
|
1158 | 1158 | supplied_token = self._get_csrf(request) |
|
1159 | 1159 | if supplied_token and cur_token != supplied_token: |
|
1160 | 1160 | reason = 'token-mismatch [%s:%s]' % (cur_token or ''[:6], |
|
1161 | 1161 | supplied_token or ''[:6]) |
|
1162 | 1162 | |
|
1163 | 1163 | csrf_message = \ |
|
1164 | 1164 | ("Cross-site request forgery detected, request denied. See " |
|
1165 | 1165 | "http://en.wikipedia.org/wiki/Cross-site_request_forgery for " |
|
1166 | 1166 | "more information.") |
|
1167 | 1167 | log.warn('Cross-site request forgery detected, request %r DENIED: %s ' |
|
1168 | 1168 | 'REMOTE_ADDR:%s, HEADERS:%s' % ( |
|
1169 | 1169 | request, reason, request.remote_addr, request.headers)) |
|
1170 | 1170 | |
|
1171 | 1171 | raise HTTPForbidden(explanation=csrf_message) |
|
1172 | 1172 | |
|
1173 | 1173 | |
|
1174 | 1174 | class LoginRequired(object): |
|
1175 | 1175 | """ |
|
1176 | 1176 | Must be logged in to execute this function else |
|
1177 | 1177 | redirect to login page |
|
1178 | 1178 | |
|
1179 | 1179 | :param api_access: if enabled this checks only for valid auth token |
|
1180 | 1180 | and grants access based on valid token |
|
1181 | 1181 | """ |
|
1182 | 1182 | def __init__(self, auth_token_access=None): |
|
1183 | 1183 | self.auth_token_access = auth_token_access |
|
1184 | 1184 | |
|
1185 | 1185 | def __call__(self, func): |
|
1186 | 1186 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1187 | 1187 | |
|
1188 | 1188 | def _get_request(self): |
|
1189 | 1189 | from pyramid.threadlocal import get_current_request |
|
1190 | 1190 | pyramid_request = get_current_request() |
|
1191 | 1191 | if not pyramid_request: |
|
1192 | 1192 | # return global request of pylons in case pyramid isn't available |
|
1193 | 1193 | return request |
|
1194 | 1194 | return pyramid_request |
|
1195 | 1195 | |
|
1196 | 1196 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1197 | 1197 | from rhodecode.lib import helpers as h |
|
1198 | 1198 | cls = fargs[0] |
|
1199 | 1199 | user = cls._rhodecode_user |
|
1200 | 1200 | request = self._get_request() |
|
1201 | 1201 | |
|
1202 | 1202 | loc = "%s:%s" % (cls.__class__.__name__, func.__name__) |
|
1203 | 1203 | log.debug('Starting login restriction checks for user: %s' % (user,)) |
|
1204 | 1204 | # check if our IP is allowed |
|
1205 | 1205 | ip_access_valid = True |
|
1206 | 1206 | if not user.ip_allowed: |
|
1207 | 1207 | h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))), |
|
1208 | 1208 | category='warning') |
|
1209 | 1209 | ip_access_valid = False |
|
1210 | 1210 | |
|
1211 | 1211 | # check if we used an APIKEY and it's a valid one |
|
1212 | 1212 | # defined white-list of controllers which API access will be enabled |
|
1213 | 1213 | _auth_token = request.GET.get( |
|
1214 | 1214 | 'auth_token', '') or request.GET.get('api_key', '') |
|
1215 | 1215 | auth_token_access_valid = allowed_auth_token_access( |
|
1216 | 1216 | loc, auth_token=_auth_token) |
|
1217 | 1217 | |
|
1218 | 1218 | # explicit controller is enabled or API is in our whitelist |
|
1219 | 1219 | if self.auth_token_access or auth_token_access_valid: |
|
1220 | 1220 | log.debug('Checking AUTH TOKEN access for %s' % (cls,)) |
|
1221 | 1221 | db_user = user.get_instance() |
|
1222 | 1222 | |
|
1223 | 1223 | if db_user: |
|
1224 | 1224 | if self.auth_token_access: |
|
1225 | 1225 | roles = self.auth_token_access |
|
1226 | 1226 | else: |
|
1227 | 1227 | roles = [UserApiKeys.ROLE_HTTP] |
|
1228 | 1228 | token_match = db_user.authenticate_by_token( |
|
1229 | 1229 | _auth_token, roles=roles) |
|
1230 | 1230 | else: |
|
1231 | 1231 | log.debug('Unable to fetch db instance for auth user: %s', user) |
|
1232 | 1232 | token_match = False |
|
1233 | 1233 | |
|
1234 | 1234 | if _auth_token and token_match: |
|
1235 | 1235 | auth_token_access_valid = True |
|
1236 | 1236 | log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],)) |
|
1237 | 1237 | else: |
|
1238 | 1238 | auth_token_access_valid = False |
|
1239 | 1239 | if not _auth_token: |
|
1240 | 1240 | log.debug("AUTH TOKEN *NOT* present in request") |
|
1241 | 1241 | else: |
|
1242 | 1242 | log.warning( |
|
1243 | 1243 | "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:]) |
|
1244 | 1244 | |
|
1245 | 1245 | log.debug('Checking if %s is authenticated @ %s' % (user.username, loc)) |
|
1246 | 1246 | reason = 'RHODECODE_AUTH' if user.is_authenticated \ |
|
1247 | 1247 | else 'AUTH_TOKEN_AUTH' |
|
1248 | 1248 | |
|
1249 | 1249 | if ip_access_valid and ( |
|
1250 | 1250 | user.is_authenticated or auth_token_access_valid): |
|
1251 | 1251 | log.info( |
|
1252 | 1252 | 'user %s authenticating with:%s IS authenticated on func %s' |
|
1253 | 1253 | % (user, reason, loc)) |
|
1254 | 1254 | |
|
1255 | 1255 | # update user data to check last activity |
|
1256 | 1256 | user.update_lastactivity() |
|
1257 | 1257 | Session().commit() |
|
1258 | 1258 | return func(*fargs, **fkwargs) |
|
1259 | 1259 | else: |
|
1260 | 1260 | log.warning( |
|
1261 | 1261 | 'user %s authenticating with:%s NOT authenticated on ' |
|
1262 | 1262 | 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s' |
|
1263 | 1263 | % (user, reason, loc, ip_access_valid, |
|
1264 | 1264 | auth_token_access_valid)) |
|
1265 | 1265 | # we preserve the get PARAM |
|
1266 | 1266 | came_from = request.path_qs |
|
1267 | 1267 | log.debug('redirecting to login page with %s' % (came_from,)) |
|
1268 | 1268 | return redirect( |
|
1269 | 1269 | h.route_path('login', _query={'came_from': came_from})) |
|
1270 | 1270 | |
|
1271 | 1271 | |
|
1272 | 1272 | class NotAnonymous(object): |
|
1273 | 1273 | """ |
|
1274 | 1274 | Must be logged in to execute this function else |
|
1275 | 1275 | redirect to login page |
|
1276 | 1276 | """ |
|
1277 | 1277 | |
|
1278 | 1278 | def __call__(self, func): |
|
1279 | 1279 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1280 | 1280 | |
|
1281 | 1281 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1282 | 1282 | import rhodecode.lib.helpers as h |
|
1283 | 1283 | cls = fargs[0] |
|
1284 | 1284 | self.user = cls._rhodecode_user |
|
1285 | 1285 | |
|
1286 | 1286 | log.debug('Checking if user is not anonymous @%s' % cls) |
|
1287 | 1287 | |
|
1288 | 1288 | anonymous = self.user.username == User.DEFAULT_USER |
|
1289 | 1289 | |
|
1290 | 1290 | if anonymous: |
|
1291 | 1291 | came_from = request.path_qs |
|
1292 | 1292 | h.flash(_('You need to be a registered user to ' |
|
1293 | 1293 | 'perform this action'), |
|
1294 | 1294 | category='warning') |
|
1295 | 1295 | return redirect( |
|
1296 | 1296 | h.route_path('login', _query={'came_from': came_from})) |
|
1297 | 1297 | else: |
|
1298 | 1298 | return func(*fargs, **fkwargs) |
|
1299 | 1299 | |
|
1300 | 1300 | |
|
1301 | 1301 | class XHRRequired(object): |
|
1302 | 1302 | # TODO(marcink): remove this in favor of the predicates in pyramid routes |
|
1303 | 1303 | |
|
1304 | 1304 | def __call__(self, func): |
|
1305 | 1305 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1306 | 1306 | |
|
1307 | 1307 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1308 | 1308 | log.debug('Checking if request is XMLHttpRequest (XHR)') |
|
1309 | 1309 | xhr_message = 'This is not a valid XMLHttpRequest (XHR) request' |
|
1310 | 1310 | if not request.is_xhr: |
|
1311 | 1311 | abort(400, detail=xhr_message) |
|
1312 | 1312 | |
|
1313 | 1313 | return func(*fargs, **fkwargs) |
|
1314 | 1314 | |
|
1315 | 1315 | |
|
1316 | 1316 | class HasAcceptedRepoType(object): |
|
1317 | 1317 | """ |
|
1318 | 1318 | Check if requested repo is within given repo type aliases |
|
1319 | 1319 | """ |
|
1320 | 1320 | |
|
1321 | 1321 | # TODO(marcink): remove this in favor of the predicates in pyramid routes |
|
1322 | 1322 | |
|
1323 | 1323 | def __init__(self, *repo_type_list): |
|
1324 | 1324 | self.repo_type_list = set(repo_type_list) |
|
1325 | 1325 | |
|
1326 | 1326 | def __call__(self, func): |
|
1327 | 1327 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1328 | 1328 | |
|
1329 | 1329 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1330 | 1330 | import rhodecode.lib.helpers as h |
|
1331 | 1331 | cls = fargs[0] |
|
1332 | 1332 | rhodecode_repo = cls.rhodecode_repo |
|
1333 | 1333 | |
|
1334 | 1334 | log.debug('%s checking repo type for %s in %s', |
|
1335 | 1335 | self.__class__.__name__, |
|
1336 | 1336 | rhodecode_repo.alias, self.repo_type_list) |
|
1337 | 1337 | |
|
1338 | 1338 | if rhodecode_repo.alias in self.repo_type_list: |
|
1339 | 1339 | return func(*fargs, **fkwargs) |
|
1340 | 1340 | else: |
|
1341 | 1341 | h.flash(h.literal( |
|
1342 | 1342 | _('Action not supported for %s.' % rhodecode_repo.alias)), |
|
1343 | 1343 | category='warning') |
|
1344 | 1344 | return redirect( |
|
1345 |
|
|
|
1345 | h.route_path('repo_summary', repo_name=cls.rhodecode_db_repo.repo_name)) | |
|
1346 | 1346 | |
|
1347 | 1347 | |
|
1348 | 1348 | class PermsDecorator(object): |
|
1349 | 1349 | """ |
|
1350 | 1350 | Base class for controller decorators, we extract the current user from |
|
1351 | 1351 | the class itself, which has it stored in base controllers |
|
1352 | 1352 | """ |
|
1353 | 1353 | |
|
1354 | 1354 | def __init__(self, *required_perms): |
|
1355 | 1355 | self.required_perms = set(required_perms) |
|
1356 | 1356 | |
|
1357 | 1357 | def __call__(self, func): |
|
1358 | 1358 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1359 | 1359 | |
|
1360 | 1360 | def _get_request(self): |
|
1361 | 1361 | from pyramid.threadlocal import get_current_request |
|
1362 | 1362 | pyramid_request = get_current_request() |
|
1363 | 1363 | if not pyramid_request: |
|
1364 | 1364 | # return global request of pylons in case pyramid isn't available |
|
1365 | 1365 | return request |
|
1366 | 1366 | return pyramid_request |
|
1367 | 1367 | |
|
1368 | 1368 | def _get_came_from(self): |
|
1369 | 1369 | _request = self._get_request() |
|
1370 | 1370 | |
|
1371 | 1371 | # both pylons/pyramid has this attribute |
|
1372 | 1372 | return _request.path_qs |
|
1373 | 1373 | |
|
1374 | 1374 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1375 | 1375 | import rhodecode.lib.helpers as h |
|
1376 | 1376 | cls = fargs[0] |
|
1377 | 1377 | _user = cls._rhodecode_user |
|
1378 | 1378 | |
|
1379 | 1379 | log.debug('checking %s permissions %s for %s %s', |
|
1380 | 1380 | self.__class__.__name__, self.required_perms, cls, _user) |
|
1381 | 1381 | |
|
1382 | 1382 | if self.check_permissions(_user): |
|
1383 | 1383 | log.debug('Permission granted for %s %s', cls, _user) |
|
1384 | 1384 | return func(*fargs, **fkwargs) |
|
1385 | 1385 | |
|
1386 | 1386 | else: |
|
1387 | 1387 | log.debug('Permission denied for %s %s', cls, _user) |
|
1388 | 1388 | anonymous = _user.username == User.DEFAULT_USER |
|
1389 | 1389 | |
|
1390 | 1390 | if anonymous: |
|
1391 | 1391 | came_from = self._get_came_from() |
|
1392 | 1392 | h.flash(_('You need to be signed in to view this page'), |
|
1393 | 1393 | category='warning') |
|
1394 | 1394 | raise HTTPFound( |
|
1395 | 1395 | h.route_path('login', _query={'came_from': came_from})) |
|
1396 | 1396 | |
|
1397 | 1397 | else: |
|
1398 | 1398 | # redirect with forbidden ret code |
|
1399 | 1399 | raise HTTPForbidden() |
|
1400 | 1400 | |
|
1401 | 1401 | def check_permissions(self, user): |
|
1402 | 1402 | """Dummy function for overriding""" |
|
1403 | 1403 | raise NotImplementedError( |
|
1404 | 1404 | 'You have to write this function in child class') |
|
1405 | 1405 | |
|
1406 | 1406 | |
|
1407 | 1407 | class HasPermissionAllDecorator(PermsDecorator): |
|
1408 | 1408 | """ |
|
1409 | 1409 | Checks for access permission for all given predicates. All of them |
|
1410 | 1410 | have to be meet in order to fulfill the request |
|
1411 | 1411 | """ |
|
1412 | 1412 | |
|
1413 | 1413 | def check_permissions(self, user): |
|
1414 | 1414 | perms = user.permissions_with_scope({}) |
|
1415 | 1415 | if self.required_perms.issubset(perms['global']): |
|
1416 | 1416 | return True |
|
1417 | 1417 | return False |
|
1418 | 1418 | |
|
1419 | 1419 | |
|
1420 | 1420 | class HasPermissionAnyDecorator(PermsDecorator): |
|
1421 | 1421 | """ |
|
1422 | 1422 | Checks for access permission for any of given predicates. In order to |
|
1423 | 1423 | fulfill the request any of predicates must be meet |
|
1424 | 1424 | """ |
|
1425 | 1425 | |
|
1426 | 1426 | def check_permissions(self, user): |
|
1427 | 1427 | perms = user.permissions_with_scope({}) |
|
1428 | 1428 | if self.required_perms.intersection(perms['global']): |
|
1429 | 1429 | return True |
|
1430 | 1430 | return False |
|
1431 | 1431 | |
|
1432 | 1432 | |
|
1433 | 1433 | class HasRepoPermissionAllDecorator(PermsDecorator): |
|
1434 | 1434 | """ |
|
1435 | 1435 | Checks for access permission for all given predicates for specific |
|
1436 | 1436 | repository. All of them have to be meet in order to fulfill the request |
|
1437 | 1437 | """ |
|
1438 | 1438 | def _get_repo_name(self): |
|
1439 | 1439 | _request = self._get_request() |
|
1440 | 1440 | return get_repo_slug(_request) |
|
1441 | 1441 | |
|
1442 | 1442 | def check_permissions(self, user): |
|
1443 | 1443 | perms = user.permissions |
|
1444 | 1444 | repo_name = self._get_repo_name() |
|
1445 | 1445 | |
|
1446 | 1446 | try: |
|
1447 | 1447 | user_perms = set([perms['repositories'][repo_name]]) |
|
1448 | 1448 | except KeyError: |
|
1449 | 1449 | log.debug('cannot locate repo with name: `%s` in permissions defs', |
|
1450 | 1450 | repo_name) |
|
1451 | 1451 | return False |
|
1452 | 1452 | |
|
1453 | 1453 | log.debug('checking `%s` permissions for repo `%s`', |
|
1454 | 1454 | user_perms, repo_name) |
|
1455 | 1455 | if self.required_perms.issubset(user_perms): |
|
1456 | 1456 | return True |
|
1457 | 1457 | return False |
|
1458 | 1458 | |
|
1459 | 1459 | |
|
1460 | 1460 | class HasRepoPermissionAnyDecorator(PermsDecorator): |
|
1461 | 1461 | """ |
|
1462 | 1462 | Checks for access permission for any of given predicates for specific |
|
1463 | 1463 | repository. In order to fulfill the request any of predicates must be meet |
|
1464 | 1464 | """ |
|
1465 | 1465 | def _get_repo_name(self): |
|
1466 | 1466 | _request = self._get_request() |
|
1467 | 1467 | return get_repo_slug(_request) |
|
1468 | 1468 | |
|
1469 | 1469 | def check_permissions(self, user): |
|
1470 | 1470 | perms = user.permissions |
|
1471 | 1471 | repo_name = self._get_repo_name() |
|
1472 | 1472 | |
|
1473 | 1473 | try: |
|
1474 | 1474 | user_perms = set([perms['repositories'][repo_name]]) |
|
1475 | 1475 | except KeyError: |
|
1476 | 1476 | log.debug('cannot locate repo with name: `%s` in permissions defs', |
|
1477 | 1477 | repo_name) |
|
1478 | 1478 | return False |
|
1479 | 1479 | |
|
1480 | 1480 | log.debug('checking `%s` permissions for repo `%s`', |
|
1481 | 1481 | user_perms, repo_name) |
|
1482 | 1482 | if self.required_perms.intersection(user_perms): |
|
1483 | 1483 | return True |
|
1484 | 1484 | return False |
|
1485 | 1485 | |
|
1486 | 1486 | |
|
1487 | 1487 | class HasRepoGroupPermissionAllDecorator(PermsDecorator): |
|
1488 | 1488 | """ |
|
1489 | 1489 | Checks for access permission for all given predicates for specific |
|
1490 | 1490 | repository group. All of them have to be meet in order to |
|
1491 | 1491 | fulfill the request |
|
1492 | 1492 | """ |
|
1493 | 1493 | def _get_repo_group_name(self): |
|
1494 | 1494 | _request = self._get_request() |
|
1495 | 1495 | return get_repo_group_slug(_request) |
|
1496 | 1496 | |
|
1497 | 1497 | def check_permissions(self, user): |
|
1498 | 1498 | perms = user.permissions |
|
1499 | 1499 | group_name = self._get_repo_group_name() |
|
1500 | 1500 | try: |
|
1501 | 1501 | user_perms = set([perms['repositories_groups'][group_name]]) |
|
1502 | 1502 | except KeyError: |
|
1503 | 1503 | log.debug('cannot locate repo group with name: `%s` in permissions defs', |
|
1504 | 1504 | group_name) |
|
1505 | 1505 | return False |
|
1506 | 1506 | |
|
1507 | 1507 | log.debug('checking `%s` permissions for repo group `%s`', |
|
1508 | 1508 | user_perms, group_name) |
|
1509 | 1509 | if self.required_perms.issubset(user_perms): |
|
1510 | 1510 | return True |
|
1511 | 1511 | return False |
|
1512 | 1512 | |
|
1513 | 1513 | |
|
1514 | 1514 | class HasRepoGroupPermissionAnyDecorator(PermsDecorator): |
|
1515 | 1515 | """ |
|
1516 | 1516 | Checks for access permission for any of given predicates for specific |
|
1517 | 1517 | repository group. In order to fulfill the request any |
|
1518 | 1518 | of predicates must be met |
|
1519 | 1519 | """ |
|
1520 | 1520 | def _get_repo_group_name(self): |
|
1521 | 1521 | _request = self._get_request() |
|
1522 | 1522 | return get_repo_group_slug(_request) |
|
1523 | 1523 | |
|
1524 | 1524 | def check_permissions(self, user): |
|
1525 | 1525 | perms = user.permissions |
|
1526 | 1526 | group_name = self._get_repo_group_name() |
|
1527 | 1527 | |
|
1528 | 1528 | try: |
|
1529 | 1529 | user_perms = set([perms['repositories_groups'][group_name]]) |
|
1530 | 1530 | except KeyError: |
|
1531 | 1531 | log.debug('cannot locate repo group with name: `%s` in permissions defs', |
|
1532 | 1532 | group_name) |
|
1533 | 1533 | return False |
|
1534 | 1534 | |
|
1535 | 1535 | log.debug('checking `%s` permissions for repo group `%s`', |
|
1536 | 1536 | user_perms, group_name) |
|
1537 | 1537 | if self.required_perms.intersection(user_perms): |
|
1538 | 1538 | return True |
|
1539 | 1539 | return False |
|
1540 | 1540 | |
|
1541 | 1541 | |
|
1542 | 1542 | class HasUserGroupPermissionAllDecorator(PermsDecorator): |
|
1543 | 1543 | """ |
|
1544 | 1544 | Checks for access permission for all given predicates for specific |
|
1545 | 1545 | user group. All of them have to be meet in order to fulfill the request |
|
1546 | 1546 | """ |
|
1547 | 1547 | def _get_user_group_name(self): |
|
1548 | 1548 | _request = self._get_request() |
|
1549 | 1549 | return get_user_group_slug(_request) |
|
1550 | 1550 | |
|
1551 | 1551 | def check_permissions(self, user): |
|
1552 | 1552 | perms = user.permissions |
|
1553 | 1553 | group_name = self._get_user_group_name() |
|
1554 | 1554 | try: |
|
1555 | 1555 | user_perms = set([perms['user_groups'][group_name]]) |
|
1556 | 1556 | except KeyError: |
|
1557 | 1557 | return False |
|
1558 | 1558 | |
|
1559 | 1559 | if self.required_perms.issubset(user_perms): |
|
1560 | 1560 | return True |
|
1561 | 1561 | return False |
|
1562 | 1562 | |
|
1563 | 1563 | |
|
1564 | 1564 | class HasUserGroupPermissionAnyDecorator(PermsDecorator): |
|
1565 | 1565 | """ |
|
1566 | 1566 | Checks for access permission for any of given predicates for specific |
|
1567 | 1567 | user group. In order to fulfill the request any of predicates must be meet |
|
1568 | 1568 | """ |
|
1569 | 1569 | def _get_user_group_name(self): |
|
1570 | 1570 | _request = self._get_request() |
|
1571 | 1571 | return get_user_group_slug(_request) |
|
1572 | 1572 | |
|
1573 | 1573 | def check_permissions(self, user): |
|
1574 | 1574 | perms = user.permissions |
|
1575 | 1575 | group_name = self._get_user_group_name() |
|
1576 | 1576 | try: |
|
1577 | 1577 | user_perms = set([perms['user_groups'][group_name]]) |
|
1578 | 1578 | except KeyError: |
|
1579 | 1579 | return False |
|
1580 | 1580 | |
|
1581 | 1581 | if self.required_perms.intersection(user_perms): |
|
1582 | 1582 | return True |
|
1583 | 1583 | return False |
|
1584 | 1584 | |
|
1585 | 1585 | |
|
1586 | 1586 | # CHECK FUNCTIONS |
|
1587 | 1587 | class PermsFunction(object): |
|
1588 | 1588 | """Base function for other check functions""" |
|
1589 | 1589 | |
|
1590 | 1590 | def __init__(self, *perms): |
|
1591 | 1591 | self.required_perms = set(perms) |
|
1592 | 1592 | self.repo_name = None |
|
1593 | 1593 | self.repo_group_name = None |
|
1594 | 1594 | self.user_group_name = None |
|
1595 | 1595 | |
|
1596 | 1596 | def __bool__(self): |
|
1597 | 1597 | frame = inspect.currentframe() |
|
1598 | 1598 | stack_trace = traceback.format_stack(frame) |
|
1599 | 1599 | log.error('Checking bool value on a class instance of perm ' |
|
1600 | 1600 | 'function is not allowed: %s' % ''.join(stack_trace)) |
|
1601 | 1601 | # rather than throwing errors, here we always return False so if by |
|
1602 | 1602 | # accident someone checks truth for just an instance it will always end |
|
1603 | 1603 | # up in returning False |
|
1604 | 1604 | return False |
|
1605 | 1605 | __nonzero__ = __bool__ |
|
1606 | 1606 | |
|
1607 | 1607 | def __call__(self, check_location='', user=None): |
|
1608 | 1608 | if not user: |
|
1609 | 1609 | log.debug('Using user attribute from global request') |
|
1610 | 1610 | # TODO: remove this someday,put as user as attribute here |
|
1611 | 1611 | request = self._get_request() |
|
1612 | 1612 | user = request.user |
|
1613 | 1613 | |
|
1614 | 1614 | # init auth user if not already given |
|
1615 | 1615 | if not isinstance(user, AuthUser): |
|
1616 | 1616 | log.debug('Wrapping user %s into AuthUser', user) |
|
1617 | 1617 | user = AuthUser(user.user_id) |
|
1618 | 1618 | |
|
1619 | 1619 | cls_name = self.__class__.__name__ |
|
1620 | 1620 | check_scope = self._get_check_scope(cls_name) |
|
1621 | 1621 | check_location = check_location or 'unspecified location' |
|
1622 | 1622 | |
|
1623 | 1623 | log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name, |
|
1624 | 1624 | self.required_perms, user, check_scope, check_location) |
|
1625 | 1625 | if not user: |
|
1626 | 1626 | log.warning('Empty user given for permission check') |
|
1627 | 1627 | return False |
|
1628 | 1628 | |
|
1629 | 1629 | if self.check_permissions(user): |
|
1630 | 1630 | log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s', |
|
1631 | 1631 | check_scope, user, check_location) |
|
1632 | 1632 | return True |
|
1633 | 1633 | |
|
1634 | 1634 | else: |
|
1635 | 1635 | log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s', |
|
1636 | 1636 | check_scope, user, check_location) |
|
1637 | 1637 | return False |
|
1638 | 1638 | |
|
1639 | 1639 | def _get_request(self): |
|
1640 | 1640 | from pyramid.threadlocal import get_current_request |
|
1641 | 1641 | pyramid_request = get_current_request() |
|
1642 | 1642 | if not pyramid_request: |
|
1643 | 1643 | # return global request of pylons incase pyramid one isn't available |
|
1644 | 1644 | return request |
|
1645 | 1645 | return pyramid_request |
|
1646 | 1646 | |
|
1647 | 1647 | def _get_check_scope(self, cls_name): |
|
1648 | 1648 | return { |
|
1649 | 1649 | 'HasPermissionAll': 'GLOBAL', |
|
1650 | 1650 | 'HasPermissionAny': 'GLOBAL', |
|
1651 | 1651 | 'HasRepoPermissionAll': 'repo:%s' % self.repo_name, |
|
1652 | 1652 | 'HasRepoPermissionAny': 'repo:%s' % self.repo_name, |
|
1653 | 1653 | 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name, |
|
1654 | 1654 | 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name, |
|
1655 | 1655 | 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name, |
|
1656 | 1656 | 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name, |
|
1657 | 1657 | }.get(cls_name, '?:%s' % cls_name) |
|
1658 | 1658 | |
|
1659 | 1659 | def check_permissions(self, user): |
|
1660 | 1660 | """Dummy function for overriding""" |
|
1661 | 1661 | raise Exception('You have to write this function in child class') |
|
1662 | 1662 | |
|
1663 | 1663 | |
|
1664 | 1664 | class HasPermissionAll(PermsFunction): |
|
1665 | 1665 | def check_permissions(self, user): |
|
1666 | 1666 | perms = user.permissions_with_scope({}) |
|
1667 | 1667 | if self.required_perms.issubset(perms.get('global')): |
|
1668 | 1668 | return True |
|
1669 | 1669 | return False |
|
1670 | 1670 | |
|
1671 | 1671 | |
|
1672 | 1672 | class HasPermissionAny(PermsFunction): |
|
1673 | 1673 | def check_permissions(self, user): |
|
1674 | 1674 | perms = user.permissions_with_scope({}) |
|
1675 | 1675 | if self.required_perms.intersection(perms.get('global')): |
|
1676 | 1676 | return True |
|
1677 | 1677 | return False |
|
1678 | 1678 | |
|
1679 | 1679 | |
|
1680 | 1680 | class HasRepoPermissionAll(PermsFunction): |
|
1681 | 1681 | def __call__(self, repo_name=None, check_location='', user=None): |
|
1682 | 1682 | self.repo_name = repo_name |
|
1683 | 1683 | return super(HasRepoPermissionAll, self).__call__(check_location, user) |
|
1684 | 1684 | |
|
1685 | 1685 | def _get_repo_name(self): |
|
1686 | 1686 | if not self.repo_name: |
|
1687 | 1687 | _request = self._get_request() |
|
1688 | 1688 | self.repo_name = get_repo_slug(_request) |
|
1689 | 1689 | return self.repo_name |
|
1690 | 1690 | |
|
1691 | 1691 | def check_permissions(self, user): |
|
1692 | 1692 | self.repo_name = self._get_repo_name() |
|
1693 | 1693 | perms = user.permissions |
|
1694 | 1694 | try: |
|
1695 | 1695 | user_perms = set([perms['repositories'][self.repo_name]]) |
|
1696 | 1696 | except KeyError: |
|
1697 | 1697 | return False |
|
1698 | 1698 | if self.required_perms.issubset(user_perms): |
|
1699 | 1699 | return True |
|
1700 | 1700 | return False |
|
1701 | 1701 | |
|
1702 | 1702 | |
|
1703 | 1703 | class HasRepoPermissionAny(PermsFunction): |
|
1704 | 1704 | def __call__(self, repo_name=None, check_location='', user=None): |
|
1705 | 1705 | self.repo_name = repo_name |
|
1706 | 1706 | return super(HasRepoPermissionAny, self).__call__(check_location, user) |
|
1707 | 1707 | |
|
1708 | 1708 | def _get_repo_name(self): |
|
1709 | 1709 | if not self.repo_name: |
|
1710 | 1710 | self.repo_name = get_repo_slug(request) |
|
1711 | 1711 | return self.repo_name |
|
1712 | 1712 | |
|
1713 | 1713 | def check_permissions(self, user): |
|
1714 | 1714 | self.repo_name = self._get_repo_name() |
|
1715 | 1715 | perms = user.permissions |
|
1716 | 1716 | try: |
|
1717 | 1717 | user_perms = set([perms['repositories'][self.repo_name]]) |
|
1718 | 1718 | except KeyError: |
|
1719 | 1719 | return False |
|
1720 | 1720 | if self.required_perms.intersection(user_perms): |
|
1721 | 1721 | return True |
|
1722 | 1722 | return False |
|
1723 | 1723 | |
|
1724 | 1724 | |
|
1725 | 1725 | class HasRepoGroupPermissionAny(PermsFunction): |
|
1726 | 1726 | def __call__(self, group_name=None, check_location='', user=None): |
|
1727 | 1727 | self.repo_group_name = group_name |
|
1728 | 1728 | return super(HasRepoGroupPermissionAny, self).__call__( |
|
1729 | 1729 | check_location, user) |
|
1730 | 1730 | |
|
1731 | 1731 | def check_permissions(self, user): |
|
1732 | 1732 | perms = user.permissions |
|
1733 | 1733 | try: |
|
1734 | 1734 | user_perms = set( |
|
1735 | 1735 | [perms['repositories_groups'][self.repo_group_name]]) |
|
1736 | 1736 | except KeyError: |
|
1737 | 1737 | return False |
|
1738 | 1738 | if self.required_perms.intersection(user_perms): |
|
1739 | 1739 | return True |
|
1740 | 1740 | return False |
|
1741 | 1741 | |
|
1742 | 1742 | |
|
1743 | 1743 | class HasRepoGroupPermissionAll(PermsFunction): |
|
1744 | 1744 | def __call__(self, group_name=None, check_location='', user=None): |
|
1745 | 1745 | self.repo_group_name = group_name |
|
1746 | 1746 | return super(HasRepoGroupPermissionAll, self).__call__( |
|
1747 | 1747 | check_location, user) |
|
1748 | 1748 | |
|
1749 | 1749 | def check_permissions(self, user): |
|
1750 | 1750 | perms = user.permissions |
|
1751 | 1751 | try: |
|
1752 | 1752 | user_perms = set( |
|
1753 | 1753 | [perms['repositories_groups'][self.repo_group_name]]) |
|
1754 | 1754 | except KeyError: |
|
1755 | 1755 | return False |
|
1756 | 1756 | if self.required_perms.issubset(user_perms): |
|
1757 | 1757 | return True |
|
1758 | 1758 | return False |
|
1759 | 1759 | |
|
1760 | 1760 | |
|
1761 | 1761 | class HasUserGroupPermissionAny(PermsFunction): |
|
1762 | 1762 | def __call__(self, user_group_name=None, check_location='', user=None): |
|
1763 | 1763 | self.user_group_name = user_group_name |
|
1764 | 1764 | return super(HasUserGroupPermissionAny, self).__call__( |
|
1765 | 1765 | check_location, user) |
|
1766 | 1766 | |
|
1767 | 1767 | def check_permissions(self, user): |
|
1768 | 1768 | perms = user.permissions |
|
1769 | 1769 | try: |
|
1770 | 1770 | user_perms = set([perms['user_groups'][self.user_group_name]]) |
|
1771 | 1771 | except KeyError: |
|
1772 | 1772 | return False |
|
1773 | 1773 | if self.required_perms.intersection(user_perms): |
|
1774 | 1774 | return True |
|
1775 | 1775 | return False |
|
1776 | 1776 | |
|
1777 | 1777 | |
|
1778 | 1778 | class HasUserGroupPermissionAll(PermsFunction): |
|
1779 | 1779 | def __call__(self, user_group_name=None, check_location='', user=None): |
|
1780 | 1780 | self.user_group_name = user_group_name |
|
1781 | 1781 | return super(HasUserGroupPermissionAll, self).__call__( |
|
1782 | 1782 | check_location, user) |
|
1783 | 1783 | |
|
1784 | 1784 | def check_permissions(self, user): |
|
1785 | 1785 | perms = user.permissions |
|
1786 | 1786 | try: |
|
1787 | 1787 | user_perms = set([perms['user_groups'][self.user_group_name]]) |
|
1788 | 1788 | except KeyError: |
|
1789 | 1789 | return False |
|
1790 | 1790 | if self.required_perms.issubset(user_perms): |
|
1791 | 1791 | return True |
|
1792 | 1792 | return False |
|
1793 | 1793 | |
|
1794 | 1794 | |
|
1795 | 1795 | # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH |
|
1796 | 1796 | class HasPermissionAnyMiddleware(object): |
|
1797 | 1797 | def __init__(self, *perms): |
|
1798 | 1798 | self.required_perms = set(perms) |
|
1799 | 1799 | |
|
1800 | 1800 | def __call__(self, user, repo_name): |
|
1801 | 1801 | # repo_name MUST be unicode, since we handle keys in permission |
|
1802 | 1802 | # dict by unicode |
|
1803 | 1803 | repo_name = safe_unicode(repo_name) |
|
1804 | 1804 | user = AuthUser(user.user_id) |
|
1805 | 1805 | log.debug( |
|
1806 | 1806 | 'Checking VCS protocol permissions %s for user:%s repo:`%s`', |
|
1807 | 1807 | self.required_perms, user, repo_name) |
|
1808 | 1808 | |
|
1809 | 1809 | if self.check_permissions(user, repo_name): |
|
1810 | 1810 | log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s', |
|
1811 | 1811 | repo_name, user, 'PermissionMiddleware') |
|
1812 | 1812 | return True |
|
1813 | 1813 | |
|
1814 | 1814 | else: |
|
1815 | 1815 | log.debug('Permission to repo:`%s` DENIED for user:%s @ %s', |
|
1816 | 1816 | repo_name, user, 'PermissionMiddleware') |
|
1817 | 1817 | return False |
|
1818 | 1818 | |
|
1819 | 1819 | def check_permissions(self, user, repo_name): |
|
1820 | 1820 | perms = user.permissions_with_scope({'repo_name': repo_name}) |
|
1821 | 1821 | |
|
1822 | 1822 | try: |
|
1823 | 1823 | user_perms = set([perms['repositories'][repo_name]]) |
|
1824 | 1824 | except Exception: |
|
1825 | 1825 | log.exception('Error while accessing user permissions') |
|
1826 | 1826 | return False |
|
1827 | 1827 | |
|
1828 | 1828 | if self.required_perms.intersection(user_perms): |
|
1829 | 1829 | return True |
|
1830 | 1830 | return False |
|
1831 | 1831 | |
|
1832 | 1832 | |
|
1833 | 1833 | # SPECIAL VERSION TO HANDLE API AUTH |
|
1834 | 1834 | class _BaseApiPerm(object): |
|
1835 | 1835 | def __init__(self, *perms): |
|
1836 | 1836 | self.required_perms = set(perms) |
|
1837 | 1837 | |
|
1838 | 1838 | def __call__(self, check_location=None, user=None, repo_name=None, |
|
1839 | 1839 | group_name=None, user_group_name=None): |
|
1840 | 1840 | cls_name = self.__class__.__name__ |
|
1841 | 1841 | check_scope = 'global:%s' % (self.required_perms,) |
|
1842 | 1842 | if repo_name: |
|
1843 | 1843 | check_scope += ', repo_name:%s' % (repo_name,) |
|
1844 | 1844 | |
|
1845 | 1845 | if group_name: |
|
1846 | 1846 | check_scope += ', repo_group_name:%s' % (group_name,) |
|
1847 | 1847 | |
|
1848 | 1848 | if user_group_name: |
|
1849 | 1849 | check_scope += ', user_group_name:%s' % (user_group_name,) |
|
1850 | 1850 | |
|
1851 | 1851 | log.debug( |
|
1852 | 1852 | 'checking cls:%s %s %s @ %s' |
|
1853 | 1853 | % (cls_name, self.required_perms, check_scope, check_location)) |
|
1854 | 1854 | if not user: |
|
1855 | 1855 | log.debug('Empty User passed into arguments') |
|
1856 | 1856 | return False |
|
1857 | 1857 | |
|
1858 | 1858 | # process user |
|
1859 | 1859 | if not isinstance(user, AuthUser): |
|
1860 | 1860 | user = AuthUser(user.user_id) |
|
1861 | 1861 | if not check_location: |
|
1862 | 1862 | check_location = 'unspecified' |
|
1863 | 1863 | if self.check_permissions(user.permissions, repo_name, group_name, |
|
1864 | 1864 | user_group_name): |
|
1865 | 1865 | log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s', |
|
1866 | 1866 | check_scope, user, check_location) |
|
1867 | 1867 | return True |
|
1868 | 1868 | |
|
1869 | 1869 | else: |
|
1870 | 1870 | log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s', |
|
1871 | 1871 | check_scope, user, check_location) |
|
1872 | 1872 | return False |
|
1873 | 1873 | |
|
1874 | 1874 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1875 | 1875 | user_group_name=None): |
|
1876 | 1876 | """ |
|
1877 | 1877 | implement in child class should return True if permissions are ok, |
|
1878 | 1878 | False otherwise |
|
1879 | 1879 | |
|
1880 | 1880 | :param perm_defs: dict with permission definitions |
|
1881 | 1881 | :param repo_name: repo name |
|
1882 | 1882 | """ |
|
1883 | 1883 | raise NotImplementedError() |
|
1884 | 1884 | |
|
1885 | 1885 | |
|
1886 | 1886 | class HasPermissionAllApi(_BaseApiPerm): |
|
1887 | 1887 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1888 | 1888 | user_group_name=None): |
|
1889 | 1889 | if self.required_perms.issubset(perm_defs.get('global')): |
|
1890 | 1890 | return True |
|
1891 | 1891 | return False |
|
1892 | 1892 | |
|
1893 | 1893 | |
|
1894 | 1894 | class HasPermissionAnyApi(_BaseApiPerm): |
|
1895 | 1895 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1896 | 1896 | user_group_name=None): |
|
1897 | 1897 | if self.required_perms.intersection(perm_defs.get('global')): |
|
1898 | 1898 | return True |
|
1899 | 1899 | return False |
|
1900 | 1900 | |
|
1901 | 1901 | |
|
1902 | 1902 | class HasRepoPermissionAllApi(_BaseApiPerm): |
|
1903 | 1903 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1904 | 1904 | user_group_name=None): |
|
1905 | 1905 | try: |
|
1906 | 1906 | _user_perms = set([perm_defs['repositories'][repo_name]]) |
|
1907 | 1907 | except KeyError: |
|
1908 | 1908 | log.warning(traceback.format_exc()) |
|
1909 | 1909 | return False |
|
1910 | 1910 | if self.required_perms.issubset(_user_perms): |
|
1911 | 1911 | return True |
|
1912 | 1912 | return False |
|
1913 | 1913 | |
|
1914 | 1914 | |
|
1915 | 1915 | class HasRepoPermissionAnyApi(_BaseApiPerm): |
|
1916 | 1916 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1917 | 1917 | user_group_name=None): |
|
1918 | 1918 | try: |
|
1919 | 1919 | _user_perms = set([perm_defs['repositories'][repo_name]]) |
|
1920 | 1920 | except KeyError: |
|
1921 | 1921 | log.warning(traceback.format_exc()) |
|
1922 | 1922 | return False |
|
1923 | 1923 | if self.required_perms.intersection(_user_perms): |
|
1924 | 1924 | return True |
|
1925 | 1925 | return False |
|
1926 | 1926 | |
|
1927 | 1927 | |
|
1928 | 1928 | class HasRepoGroupPermissionAnyApi(_BaseApiPerm): |
|
1929 | 1929 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1930 | 1930 | user_group_name=None): |
|
1931 | 1931 | try: |
|
1932 | 1932 | _user_perms = set([perm_defs['repositories_groups'][group_name]]) |
|
1933 | 1933 | except KeyError: |
|
1934 | 1934 | log.warning(traceback.format_exc()) |
|
1935 | 1935 | return False |
|
1936 | 1936 | if self.required_perms.intersection(_user_perms): |
|
1937 | 1937 | return True |
|
1938 | 1938 | return False |
|
1939 | 1939 | |
|
1940 | 1940 | |
|
1941 | 1941 | class HasRepoGroupPermissionAllApi(_BaseApiPerm): |
|
1942 | 1942 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1943 | 1943 | user_group_name=None): |
|
1944 | 1944 | try: |
|
1945 | 1945 | _user_perms = set([perm_defs['repositories_groups'][group_name]]) |
|
1946 | 1946 | except KeyError: |
|
1947 | 1947 | log.warning(traceback.format_exc()) |
|
1948 | 1948 | return False |
|
1949 | 1949 | if self.required_perms.issubset(_user_perms): |
|
1950 | 1950 | return True |
|
1951 | 1951 | return False |
|
1952 | 1952 | |
|
1953 | 1953 | |
|
1954 | 1954 | class HasUserGroupPermissionAnyApi(_BaseApiPerm): |
|
1955 | 1955 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1956 | 1956 | user_group_name=None): |
|
1957 | 1957 | try: |
|
1958 | 1958 | _user_perms = set([perm_defs['user_groups'][user_group_name]]) |
|
1959 | 1959 | except KeyError: |
|
1960 | 1960 | log.warning(traceback.format_exc()) |
|
1961 | 1961 | return False |
|
1962 | 1962 | if self.required_perms.intersection(_user_perms): |
|
1963 | 1963 | return True |
|
1964 | 1964 | return False |
|
1965 | 1965 | |
|
1966 | 1966 | |
|
1967 | 1967 | def check_ip_access(source_ip, allowed_ips=None): |
|
1968 | 1968 | """ |
|
1969 | 1969 | Checks if source_ip is a subnet of any of allowed_ips. |
|
1970 | 1970 | |
|
1971 | 1971 | :param source_ip: |
|
1972 | 1972 | :param allowed_ips: list of allowed ips together with mask |
|
1973 | 1973 | """ |
|
1974 | 1974 | log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips)) |
|
1975 | 1975 | source_ip_address = ipaddress.ip_address(source_ip) |
|
1976 | 1976 | if isinstance(allowed_ips, (tuple, list, set)): |
|
1977 | 1977 | for ip in allowed_ips: |
|
1978 | 1978 | try: |
|
1979 | 1979 | network_address = ipaddress.ip_network(ip, strict=False) |
|
1980 | 1980 | if source_ip_address in network_address: |
|
1981 | 1981 | log.debug('IP %s is network %s' % |
|
1982 | 1982 | (source_ip_address, network_address)) |
|
1983 | 1983 | return True |
|
1984 | 1984 | # for any case we cannot determine the IP, don't crash just |
|
1985 | 1985 | # skip it and log as error, we want to say forbidden still when |
|
1986 | 1986 | # sending bad IP |
|
1987 | 1987 | except Exception: |
|
1988 | 1988 | log.error(traceback.format_exc()) |
|
1989 | 1989 | continue |
|
1990 | 1990 | return False |
|
1991 | 1991 | |
|
1992 | 1992 | |
|
1993 | 1993 | def get_cython_compat_decorator(wrapper, func): |
|
1994 | 1994 | """ |
|
1995 | 1995 | Creates a cython compatible decorator. The previously used |
|
1996 | 1996 | decorator.decorator() function seems to be incompatible with cython. |
|
1997 | 1997 | |
|
1998 | 1998 | :param wrapper: __wrapper method of the decorator class |
|
1999 | 1999 | :param func: decorated function |
|
2000 | 2000 | """ |
|
2001 | 2001 | @wraps(func) |
|
2002 | 2002 | def local_wrapper(*args, **kwds): |
|
2003 | 2003 | return wrapper(func, *args, **kwds) |
|
2004 | 2004 | local_wrapper.__wrapped__ = func |
|
2005 | 2005 | return local_wrapper |
|
2006 | 2006 | |
|
2007 | 2007 |
@@ -1,592 +1,592 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | The base Controller API |
|
23 | 23 | Provides the BaseController class for subclassing. And usage in different |
|
24 | 24 | controllers |
|
25 | 25 | """ |
|
26 | 26 | |
|
27 | 27 | import logging |
|
28 | 28 | import socket |
|
29 | 29 | |
|
30 | 30 | import ipaddress |
|
31 | 31 | import pyramid.threadlocal |
|
32 | 32 | |
|
33 | 33 | from paste.auth.basic import AuthBasicAuthenticator |
|
34 | 34 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception |
|
35 | 35 | from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION |
|
36 | 36 | from pylons import config, tmpl_context as c, request, session, url |
|
37 | 37 | from pylons.controllers import WSGIController |
|
38 | 38 | from pylons.controllers.util import redirect |
|
39 | 39 | from pylons.i18n import translation |
|
40 | 40 | # marcink: don't remove this import |
|
41 | 41 | from pylons.templating import render_mako as render # noqa |
|
42 | 42 | from pylons.i18n.translation import _ |
|
43 | 43 | from webob.exc import HTTPFound |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | import rhodecode |
|
47 | 47 | from rhodecode.authentication.base import VCS_TYPE |
|
48 | 48 | from rhodecode.lib import auth, utils2 |
|
49 | 49 | from rhodecode.lib import helpers as h |
|
50 | 50 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper |
|
51 | 51 | from rhodecode.lib.exceptions import UserCreationError |
|
52 | 52 | from rhodecode.lib.utils import ( |
|
53 | 53 | get_repo_slug, set_rhodecode_config, password_changed, |
|
54 | 54 | get_enabled_hook_classes) |
|
55 | 55 | from rhodecode.lib.utils2 import ( |
|
56 | 56 | str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist) |
|
57 | 57 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError |
|
58 | 58 | from rhodecode.model import meta |
|
59 | 59 | from rhodecode.model.db import Repository, User, ChangesetComment |
|
60 | 60 | from rhodecode.model.notification import NotificationModel |
|
61 | 61 | from rhodecode.model.scm import ScmModel |
|
62 | 62 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | log = logging.getLogger(__name__) |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | def _filter_proxy(ip): |
|
69 | 69 | """ |
|
70 | 70 | Passed in IP addresses in HEADERS can be in a special format of multiple |
|
71 | 71 | ips. Those comma separated IPs are passed from various proxies in the |
|
72 | 72 | chain of request processing. The left-most being the original client. |
|
73 | 73 | We only care about the first IP which came from the org. client. |
|
74 | 74 | |
|
75 | 75 | :param ip: ip string from headers |
|
76 | 76 | """ |
|
77 | 77 | if ',' in ip: |
|
78 | 78 | _ips = ip.split(',') |
|
79 | 79 | _first_ip = _ips[0].strip() |
|
80 | 80 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) |
|
81 | 81 | return _first_ip |
|
82 | 82 | return ip |
|
83 | 83 | |
|
84 | 84 | |
|
85 | 85 | def _filter_port(ip): |
|
86 | 86 | """ |
|
87 | 87 | Removes a port from ip, there are 4 main cases to handle here. |
|
88 | 88 | - ipv4 eg. 127.0.0.1 |
|
89 | 89 | - ipv6 eg. ::1 |
|
90 | 90 | - ipv4+port eg. 127.0.0.1:8080 |
|
91 | 91 | - ipv6+port eg. [::1]:8080 |
|
92 | 92 | |
|
93 | 93 | :param ip: |
|
94 | 94 | """ |
|
95 | 95 | def is_ipv6(ip_addr): |
|
96 | 96 | if hasattr(socket, 'inet_pton'): |
|
97 | 97 | try: |
|
98 | 98 | socket.inet_pton(socket.AF_INET6, ip_addr) |
|
99 | 99 | except socket.error: |
|
100 | 100 | return False |
|
101 | 101 | else: |
|
102 | 102 | # fallback to ipaddress |
|
103 | 103 | try: |
|
104 | 104 | ipaddress.IPv6Address(ip_addr) |
|
105 | 105 | except Exception: |
|
106 | 106 | return False |
|
107 | 107 | return True |
|
108 | 108 | |
|
109 | 109 | if ':' not in ip: # must be ipv4 pure ip |
|
110 | 110 | return ip |
|
111 | 111 | |
|
112 | 112 | if '[' in ip and ']' in ip: # ipv6 with port |
|
113 | 113 | return ip.split(']')[0][1:].lower() |
|
114 | 114 | |
|
115 | 115 | # must be ipv6 or ipv4 with port |
|
116 | 116 | if is_ipv6(ip): |
|
117 | 117 | return ip |
|
118 | 118 | else: |
|
119 | 119 | ip, _port = ip.split(':')[:2] # means ipv4+port |
|
120 | 120 | return ip |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | def get_ip_addr(environ): |
|
124 | 124 | proxy_key = 'HTTP_X_REAL_IP' |
|
125 | 125 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
126 | 126 | def_key = 'REMOTE_ADDR' |
|
127 | 127 | _filters = lambda x: _filter_port(_filter_proxy(x)) |
|
128 | 128 | |
|
129 | 129 | ip = environ.get(proxy_key) |
|
130 | 130 | if ip: |
|
131 | 131 | return _filters(ip) |
|
132 | 132 | |
|
133 | 133 | ip = environ.get(proxy_key2) |
|
134 | 134 | if ip: |
|
135 | 135 | return _filters(ip) |
|
136 | 136 | |
|
137 | 137 | ip = environ.get(def_key, '0.0.0.0') |
|
138 | 138 | return _filters(ip) |
|
139 | 139 | |
|
140 | 140 | |
|
141 | 141 | def get_server_ip_addr(environ, log_errors=True): |
|
142 | 142 | hostname = environ.get('SERVER_NAME') |
|
143 | 143 | try: |
|
144 | 144 | return socket.gethostbyname(hostname) |
|
145 | 145 | except Exception as e: |
|
146 | 146 | if log_errors: |
|
147 | 147 | # in some cases this lookup is not possible, and we don't want to |
|
148 | 148 | # make it an exception in logs |
|
149 | 149 | log.exception('Could not retrieve server ip address: %s', e) |
|
150 | 150 | return hostname |
|
151 | 151 | |
|
152 | 152 | |
|
153 | 153 | def get_server_port(environ): |
|
154 | 154 | return environ.get('SERVER_PORT') |
|
155 | 155 | |
|
156 | 156 | |
|
157 | 157 | def get_access_path(environ): |
|
158 | 158 | path = environ.get('PATH_INFO') |
|
159 | 159 | org_req = environ.get('pylons.original_request') |
|
160 | 160 | if org_req: |
|
161 | 161 | path = org_req.environ.get('PATH_INFO') |
|
162 | 162 | return path |
|
163 | 163 | |
|
164 | 164 | |
|
165 | 165 | def get_user_agent(environ): |
|
166 | 166 | return environ.get('HTTP_USER_AGENT') |
|
167 | 167 | |
|
168 | 168 | |
|
169 | 169 | def vcs_operation_context( |
|
170 | 170 | environ, repo_name, username, action, scm, check_locking=True, |
|
171 | 171 | is_shadow_repo=False): |
|
172 | 172 | """ |
|
173 | 173 | Generate the context for a vcs operation, e.g. push or pull. |
|
174 | 174 | |
|
175 | 175 | This context is passed over the layers so that hooks triggered by the |
|
176 | 176 | vcs operation know details like the user, the user's IP address etc. |
|
177 | 177 | |
|
178 | 178 | :param check_locking: Allows to switch of the computation of the locking |
|
179 | 179 | data. This serves mainly the need of the simplevcs middleware to be |
|
180 | 180 | able to disable this for certain operations. |
|
181 | 181 | |
|
182 | 182 | """ |
|
183 | 183 | # Tri-state value: False: unlock, None: nothing, True: lock |
|
184 | 184 | make_lock = None |
|
185 | 185 | locked_by = [None, None, None] |
|
186 | 186 | is_anonymous = username == User.DEFAULT_USER |
|
187 | 187 | if not is_anonymous and check_locking: |
|
188 | 188 | log.debug('Checking locking on repository "%s"', repo_name) |
|
189 | 189 | user = User.get_by_username(username) |
|
190 | 190 | repo = Repository.get_by_repo_name(repo_name) |
|
191 | 191 | make_lock, __, locked_by = repo.get_locking_state( |
|
192 | 192 | action, user.user_id) |
|
193 | 193 | |
|
194 | 194 | settings_model = VcsSettingsModel(repo=repo_name) |
|
195 | 195 | ui_settings = settings_model.get_ui_settings() |
|
196 | 196 | |
|
197 | 197 | extras = { |
|
198 | 198 | 'ip': get_ip_addr(environ), |
|
199 | 199 | 'username': username, |
|
200 | 200 | 'action': action, |
|
201 | 201 | 'repository': repo_name, |
|
202 | 202 | 'scm': scm, |
|
203 | 203 | 'config': rhodecode.CONFIG['__file__'], |
|
204 | 204 | 'make_lock': make_lock, |
|
205 | 205 | 'locked_by': locked_by, |
|
206 | 206 | 'server_url': utils2.get_server_url(environ), |
|
207 | 207 | 'user_agent': get_user_agent(environ), |
|
208 | 208 | 'hooks': get_enabled_hook_classes(ui_settings), |
|
209 | 209 | 'is_shadow_repo': is_shadow_repo, |
|
210 | 210 | } |
|
211 | 211 | return extras |
|
212 | 212 | |
|
213 | 213 | |
|
214 | 214 | class BasicAuth(AuthBasicAuthenticator): |
|
215 | 215 | |
|
216 | 216 | def __init__(self, realm, authfunc, registry, auth_http_code=None, |
|
217 | 217 | initial_call_detection=False, acl_repo_name=None): |
|
218 | 218 | self.realm = realm |
|
219 | 219 | self.initial_call = initial_call_detection |
|
220 | 220 | self.authfunc = authfunc |
|
221 | 221 | self.registry = registry |
|
222 | 222 | self.acl_repo_name = acl_repo_name |
|
223 | 223 | self._rc_auth_http_code = auth_http_code |
|
224 | 224 | |
|
225 | 225 | def _get_response_from_code(self, http_code): |
|
226 | 226 | try: |
|
227 | 227 | return get_exception(safe_int(http_code)) |
|
228 | 228 | except Exception: |
|
229 | 229 | log.exception('Failed to fetch response for code %s' % http_code) |
|
230 | 230 | return HTTPForbidden |
|
231 | 231 | |
|
232 | 232 | def build_authentication(self): |
|
233 | 233 | head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) |
|
234 | 234 | if self._rc_auth_http_code and not self.initial_call: |
|
235 | 235 | # return alternative HTTP code if alternative http return code |
|
236 | 236 | # is specified in RhodeCode config, but ONLY if it's not the |
|
237 | 237 | # FIRST call |
|
238 | 238 | custom_response_klass = self._get_response_from_code( |
|
239 | 239 | self._rc_auth_http_code) |
|
240 | 240 | return custom_response_klass(headers=head) |
|
241 | 241 | return HTTPUnauthorized(headers=head) |
|
242 | 242 | |
|
243 | 243 | def authenticate(self, environ): |
|
244 | 244 | authorization = AUTHORIZATION(environ) |
|
245 | 245 | if not authorization: |
|
246 | 246 | return self.build_authentication() |
|
247 | 247 | (authmeth, auth) = authorization.split(' ', 1) |
|
248 | 248 | if 'basic' != authmeth.lower(): |
|
249 | 249 | return self.build_authentication() |
|
250 | 250 | auth = auth.strip().decode('base64') |
|
251 | 251 | _parts = auth.split(':', 1) |
|
252 | 252 | if len(_parts) == 2: |
|
253 | 253 | username, password = _parts |
|
254 | 254 | if self.authfunc( |
|
255 | 255 | username, password, environ, VCS_TYPE, |
|
256 | 256 | registry=self.registry, acl_repo_name=self.acl_repo_name): |
|
257 | 257 | return username |
|
258 | 258 | if username and password: |
|
259 | 259 | # we mark that we actually executed authentication once, at |
|
260 | 260 | # that point we can use the alternative auth code |
|
261 | 261 | self.initial_call = False |
|
262 | 262 | |
|
263 | 263 | return self.build_authentication() |
|
264 | 264 | |
|
265 | 265 | __call__ = authenticate |
|
266 | 266 | |
|
267 | 267 | |
|
268 | 268 | def attach_context_attributes(context, request, user_id): |
|
269 | 269 | """ |
|
270 | 270 | Attach variables into template context called `c`, please note that |
|
271 | 271 | request could be pylons or pyramid request in here. |
|
272 | 272 | """ |
|
273 | 273 | rc_config = SettingsModel().get_all_settings(cache=True) |
|
274 | 274 | |
|
275 | 275 | context.rhodecode_version = rhodecode.__version__ |
|
276 | 276 | context.rhodecode_edition = config.get('rhodecode.edition') |
|
277 | 277 | # unique secret + version does not leak the version but keep consistency |
|
278 | 278 | context.rhodecode_version_hash = md5( |
|
279 | 279 | config.get('beaker.session.secret', '') + |
|
280 | 280 | rhodecode.__version__)[:8] |
|
281 | 281 | |
|
282 | 282 | # Default language set for the incoming request |
|
283 | 283 | context.language = translation.get_lang()[0] |
|
284 | 284 | |
|
285 | 285 | # Visual options |
|
286 | 286 | context.visual = AttributeDict({}) |
|
287 | 287 | |
|
288 | 288 | # DB stored Visual Items |
|
289 | 289 | context.visual.show_public_icon = str2bool( |
|
290 | 290 | rc_config.get('rhodecode_show_public_icon')) |
|
291 | 291 | context.visual.show_private_icon = str2bool( |
|
292 | 292 | rc_config.get('rhodecode_show_private_icon')) |
|
293 | 293 | context.visual.stylify_metatags = str2bool( |
|
294 | 294 | rc_config.get('rhodecode_stylify_metatags')) |
|
295 | 295 | context.visual.dashboard_items = safe_int( |
|
296 | 296 | rc_config.get('rhodecode_dashboard_items', 100)) |
|
297 | 297 | context.visual.admin_grid_items = safe_int( |
|
298 | 298 | rc_config.get('rhodecode_admin_grid_items', 100)) |
|
299 | 299 | context.visual.repository_fields = str2bool( |
|
300 | 300 | rc_config.get('rhodecode_repository_fields')) |
|
301 | 301 | context.visual.show_version = str2bool( |
|
302 | 302 | rc_config.get('rhodecode_show_version')) |
|
303 | 303 | context.visual.use_gravatar = str2bool( |
|
304 | 304 | rc_config.get('rhodecode_use_gravatar')) |
|
305 | 305 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') |
|
306 | 306 | context.visual.default_renderer = rc_config.get( |
|
307 | 307 | 'rhodecode_markup_renderer', 'rst') |
|
308 | 308 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES |
|
309 | 309 | context.visual.rhodecode_support_url = \ |
|
310 | 310 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') |
|
311 | 311 | |
|
312 | 312 | context.pre_code = rc_config.get('rhodecode_pre_code') |
|
313 | 313 | context.post_code = rc_config.get('rhodecode_post_code') |
|
314 | 314 | context.rhodecode_name = rc_config.get('rhodecode_title') |
|
315 | 315 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') |
|
316 | 316 | # if we have specified default_encoding in the request, it has more |
|
317 | 317 | # priority |
|
318 | 318 | if request.GET.get('default_encoding'): |
|
319 | 319 | context.default_encodings.insert(0, request.GET.get('default_encoding')) |
|
320 | 320 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') |
|
321 | 321 | |
|
322 | 322 | # INI stored |
|
323 | 323 | context.labs_active = str2bool( |
|
324 | 324 | config.get('labs_settings_active', 'false')) |
|
325 | 325 | context.visual.allow_repo_location_change = str2bool( |
|
326 | 326 | config.get('allow_repo_location_change', True)) |
|
327 | 327 | context.visual.allow_custom_hooks_settings = str2bool( |
|
328 | 328 | config.get('allow_custom_hooks_settings', True)) |
|
329 | 329 | context.debug_style = str2bool(config.get('debug_style', False)) |
|
330 | 330 | |
|
331 | 331 | context.rhodecode_instanceid = config.get('instance_id') |
|
332 | 332 | |
|
333 | 333 | # AppEnlight |
|
334 | 334 | context.appenlight_enabled = str2bool(config.get('appenlight', 'false')) |
|
335 | 335 | context.appenlight_api_public_key = config.get( |
|
336 | 336 | 'appenlight.api_public_key', '') |
|
337 | 337 | context.appenlight_server_url = config.get('appenlight.server_url', '') |
|
338 | 338 | |
|
339 | 339 | # JS template context |
|
340 | 340 | context.template_context = { |
|
341 | 341 | 'repo_name': None, |
|
342 | 342 | 'repo_type': None, |
|
343 | 343 | 'repo_landing_commit': None, |
|
344 | 344 | 'rhodecode_user': { |
|
345 | 345 | 'username': None, |
|
346 | 346 | 'email': None, |
|
347 | 347 | 'notification_status': False |
|
348 | 348 | }, |
|
349 | 349 | 'visual': { |
|
350 | 350 | 'default_renderer': None |
|
351 | 351 | }, |
|
352 | 352 | 'commit_data': { |
|
353 | 353 | 'commit_id': None |
|
354 | 354 | }, |
|
355 | 355 | 'pull_request_data': {'pull_request_id': None}, |
|
356 | 356 | 'timeago': { |
|
357 | 357 | 'refresh_time': 120 * 1000, |
|
358 | 358 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 |
|
359 | 359 | }, |
|
360 | 360 | 'pylons_dispatch': { |
|
361 | 361 | # 'controller': request.environ['pylons.routes_dict']['controller'], |
|
362 | 362 | # 'action': request.environ['pylons.routes_dict']['action'], |
|
363 | 363 | }, |
|
364 | 364 | 'pyramid_dispatch': { |
|
365 | 365 | |
|
366 | 366 | }, |
|
367 | 367 | 'extra': {'plugins': {}} |
|
368 | 368 | } |
|
369 | 369 | # END CONFIG VARS |
|
370 | 370 | |
|
371 | 371 | # TODO: This dosn't work when called from pylons compatibility tween. |
|
372 | 372 | # Fix this and remove it from base controller. |
|
373 | 373 | # context.repo_name = get_repo_slug(request) # can be empty |
|
374 | 374 | |
|
375 | 375 | diffmode = 'sideside' |
|
376 | 376 | if request.GET.get('diffmode'): |
|
377 | 377 | if request.GET['diffmode'] == 'unified': |
|
378 | 378 | diffmode = 'unified' |
|
379 | 379 | elif request.session.get('diffmode'): |
|
380 | 380 | diffmode = request.session['diffmode'] |
|
381 | 381 | |
|
382 | 382 | context.diffmode = diffmode |
|
383 | 383 | |
|
384 | 384 | if request.session.get('diffmode') != diffmode: |
|
385 | 385 | request.session['diffmode'] = diffmode |
|
386 | 386 | |
|
387 | 387 | context.csrf_token = auth.get_csrf_token() |
|
388 | 388 | context.backends = rhodecode.BACKENDS.keys() |
|
389 | 389 | context.backends.sort() |
|
390 | 390 | context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id) |
|
391 | 391 | context.pyramid_request = pyramid.threadlocal.get_current_request() |
|
392 | 392 | |
|
393 | 393 | |
|
394 | 394 | def get_auth_user(environ): |
|
395 | 395 | ip_addr = get_ip_addr(environ) |
|
396 | 396 | # make sure that we update permissions each time we call controller |
|
397 | 397 | _auth_token = (request.GET.get('auth_token', '') or |
|
398 | 398 | request.GET.get('api_key', '')) |
|
399 | 399 | |
|
400 | 400 | if _auth_token: |
|
401 | 401 | # when using API_KEY we assume user exists, and |
|
402 | 402 | # doesn't need auth based on cookies. |
|
403 | 403 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) |
|
404 | 404 | authenticated = False |
|
405 | 405 | else: |
|
406 | 406 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
407 | 407 | try: |
|
408 | 408 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), |
|
409 | 409 | ip_addr=ip_addr) |
|
410 | 410 | except UserCreationError as e: |
|
411 | 411 | h.flash(e, 'error') |
|
412 | 412 | # container auth or other auth functions that create users |
|
413 | 413 | # on the fly can throw this exception signaling that there's |
|
414 | 414 | # issue with user creation, explanation should be provided |
|
415 | 415 | # in Exception itself. We then create a simple blank |
|
416 | 416 | # AuthUser |
|
417 | 417 | auth_user = AuthUser(ip_addr=ip_addr) |
|
418 | 418 | |
|
419 | 419 | if password_changed(auth_user, session): |
|
420 | 420 | session.invalidate() |
|
421 | 421 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
422 | 422 | auth_user = AuthUser(ip_addr=ip_addr) |
|
423 | 423 | |
|
424 | 424 | authenticated = cookie_store.get('is_authenticated') |
|
425 | 425 | |
|
426 | 426 | if not auth_user.is_authenticated and auth_user.is_user_object: |
|
427 | 427 | # user is not authenticated and not empty |
|
428 | 428 | auth_user.set_authenticated(authenticated) |
|
429 | 429 | |
|
430 | 430 | return auth_user |
|
431 | 431 | |
|
432 | 432 | |
|
433 | 433 | class BaseController(WSGIController): |
|
434 | 434 | |
|
435 | 435 | def __before__(self): |
|
436 | 436 | """ |
|
437 | 437 | __before__ is called before controller methods and after __call__ |
|
438 | 438 | """ |
|
439 | 439 | # on each call propagate settings calls into global settings. |
|
440 | 440 | set_rhodecode_config(config) |
|
441 | 441 | attach_context_attributes(c, request, c.rhodecode_user.user_id) |
|
442 | 442 | |
|
443 | 443 | # TODO: Remove this when fixed in attach_context_attributes() |
|
444 | 444 | c.repo_name = get_repo_slug(request) # can be empty |
|
445 | 445 | |
|
446 | 446 | self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff')) |
|
447 | 447 | self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file')) |
|
448 | 448 | self.sa = meta.Session |
|
449 | 449 | self.scm_model = ScmModel(self.sa) |
|
450 | 450 | |
|
451 | 451 | # set user language |
|
452 | 452 | user_lang = getattr(c.pyramid_request, '_LOCALE_', None) |
|
453 | 453 | if user_lang: |
|
454 | 454 | translation.set_lang(user_lang) |
|
455 | 455 | log.debug('set language to %s for user %s', |
|
456 | 456 | user_lang, self._rhodecode_user) |
|
457 | 457 | |
|
458 | 458 | def _dispatch_redirect(self, with_url, environ, start_response): |
|
459 | 459 | resp = HTTPFound(with_url) |
|
460 | 460 | environ['SCRIPT_NAME'] = '' # handle prefix middleware |
|
461 | 461 | environ['PATH_INFO'] = with_url |
|
462 | 462 | return resp(environ, start_response) |
|
463 | 463 | |
|
464 | 464 | def __call__(self, environ, start_response): |
|
465 | 465 | """Invoke the Controller""" |
|
466 | 466 | # WSGIController.__call__ dispatches to the Controller method |
|
467 | 467 | # the request is routed to. This routing information is |
|
468 | 468 | # available in environ['pylons.routes_dict'] |
|
469 | 469 | from rhodecode.lib import helpers as h |
|
470 | 470 | |
|
471 | 471 | # Provide the Pylons context to Pyramid's debugtoolbar if it asks |
|
472 | 472 | if environ.get('debugtoolbar.wants_pylons_context', False): |
|
473 | 473 | environ['debugtoolbar.pylons_context'] = c._current_obj() |
|
474 | 474 | |
|
475 | 475 | _route_name = '.'.join([environ['pylons.routes_dict']['controller'], |
|
476 | 476 | environ['pylons.routes_dict']['action']]) |
|
477 | 477 | |
|
478 | 478 | self.rc_config = SettingsModel().get_all_settings(cache=True) |
|
479 | 479 | self.ip_addr = get_ip_addr(environ) |
|
480 | 480 | |
|
481 | 481 | # The rhodecode auth user is looked up and passed through the |
|
482 | 482 | # environ by the pylons compatibility tween in pyramid. |
|
483 | 483 | # So we can just grab it from there. |
|
484 | 484 | auth_user = environ['rc_auth_user'] |
|
485 | 485 | |
|
486 | 486 | # set globals for auth user |
|
487 | 487 | request.user = auth_user |
|
488 | 488 | c.rhodecode_user = self._rhodecode_user = auth_user |
|
489 | 489 | |
|
490 | 490 | log.info('IP: %s User: %s accessed %s [%s]' % ( |
|
491 | 491 | self.ip_addr, auth_user, safe_unicode(get_access_path(environ)), |
|
492 | 492 | _route_name) |
|
493 | 493 | ) |
|
494 | 494 | |
|
495 | 495 | user_obj = auth_user.get_instance() |
|
496 | 496 | if user_obj and user_obj.user_data.get('force_password_change'): |
|
497 | 497 | h.flash('You are required to change your password', 'warning', |
|
498 | 498 | ignore_duplicate=True) |
|
499 | 499 | return self._dispatch_redirect( |
|
500 | 500 | url('my_account_password'), environ, start_response) |
|
501 | 501 | |
|
502 | 502 | return WSGIController.__call__(self, environ, start_response) |
|
503 | 503 | |
|
504 | 504 | |
|
505 | 505 | class BaseRepoController(BaseController): |
|
506 | 506 | """ |
|
507 | 507 | Base class for controllers responsible for loading all needed data for |
|
508 | 508 | repository loaded items are |
|
509 | 509 | |
|
510 | 510 | c.rhodecode_repo: instance of scm repository |
|
511 | 511 | c.rhodecode_db_repo: instance of db |
|
512 | 512 | c.repository_requirements_missing: shows that repository specific data |
|
513 | 513 | could not be displayed due to the missing requirements |
|
514 | 514 | c.repository_pull_requests: show number of open pull requests |
|
515 | 515 | """ |
|
516 | 516 | |
|
517 | 517 | def __before__(self): |
|
518 | 518 | super(BaseRepoController, self).__before__() |
|
519 | 519 | if c.repo_name: # extracted from routes |
|
520 | 520 | db_repo = Repository.get_by_repo_name(c.repo_name) |
|
521 | 521 | if not db_repo: |
|
522 | 522 | return |
|
523 | 523 | |
|
524 | 524 | log.debug( |
|
525 | 525 | 'Found repository in database %s with state `%s`', |
|
526 | 526 | safe_unicode(db_repo), safe_unicode(db_repo.repo_state)) |
|
527 | 527 | route = getattr(request.environ.get('routes.route'), 'name', '') |
|
528 | 528 | |
|
529 | 529 | # allow to delete repos that are somehow damages in filesystem |
|
530 | 530 | if route in ['delete_repo']: |
|
531 | 531 | return |
|
532 | 532 | |
|
533 | 533 | if db_repo.repo_state in [Repository.STATE_PENDING]: |
|
534 | 534 | if route in ['repo_creating_home']: |
|
535 | 535 | return |
|
536 | 536 | check_url = url('repo_creating_home', repo_name=c.repo_name) |
|
537 | 537 | return redirect(check_url) |
|
538 | 538 | |
|
539 | 539 | self.rhodecode_db_repo = db_repo |
|
540 | 540 | |
|
541 | 541 | missing_requirements = False |
|
542 | 542 | try: |
|
543 | 543 | self.rhodecode_repo = self.rhodecode_db_repo.scm_instance() |
|
544 | 544 | except RepositoryRequirementError as e: |
|
545 | 545 | missing_requirements = True |
|
546 | 546 | self._handle_missing_requirements(e) |
|
547 | 547 | |
|
548 | 548 | if self.rhodecode_repo is None and not missing_requirements: |
|
549 | 549 | log.error('%s this repository is present in database but it ' |
|
550 | 550 | 'cannot be created as an scm instance', c.repo_name) |
|
551 | 551 | |
|
552 | 552 | h.flash(_( |
|
553 | 553 | "The repository at %(repo_name)s cannot be located.") % |
|
554 | 554 | {'repo_name': c.repo_name}, |
|
555 | 555 | category='error', ignore_duplicate=True) |
|
556 | 556 | redirect(h.route_path('home')) |
|
557 | 557 | |
|
558 | 558 | # update last change according to VCS data |
|
559 | 559 | if not missing_requirements: |
|
560 | 560 | commit = db_repo.get_commit( |
|
561 | 561 | pre_load=["author", "date", "message", "parents"]) |
|
562 | 562 | db_repo.update_commit_cache(commit) |
|
563 | 563 | |
|
564 | 564 | # Prepare context |
|
565 | 565 | c.rhodecode_db_repo = db_repo |
|
566 | 566 | c.rhodecode_repo = self.rhodecode_repo |
|
567 | 567 | c.repository_requirements_missing = missing_requirements |
|
568 | 568 | |
|
569 | 569 | self._update_global_counters(self.scm_model, db_repo) |
|
570 | 570 | |
|
571 | 571 | def _update_global_counters(self, scm_model, db_repo): |
|
572 | 572 | """ |
|
573 | 573 | Base variables that are exposed to every page of repository |
|
574 | 574 | """ |
|
575 | 575 | c.repository_pull_requests = scm_model.get_pull_requests(db_repo) |
|
576 | 576 | |
|
577 | 577 | def _handle_missing_requirements(self, error): |
|
578 | 578 | self.rhodecode_repo = None |
|
579 | 579 | log.error( |
|
580 | 580 | 'Requirements are missing for repository %s: %s', |
|
581 | 581 | c.repo_name, error.message) |
|
582 | 582 | |
|
583 |
summary_url = |
|
|
583 | summary_url = h.route_path('repo_summary', repo_name=c.repo_name) | |
|
584 | 584 | statistics_url = url('edit_repo_statistics', repo_name=c.repo_name) |
|
585 | 585 | settings_update_url = url('repo', repo_name=c.repo_name) |
|
586 | 586 | path = request.path |
|
587 | 587 | should_redirect = ( |
|
588 | 588 | path not in (summary_url, settings_update_url) |
|
589 | 589 | and '/settings' not in path or path == statistics_url |
|
590 | 590 | ) |
|
591 | 591 | if should_redirect: |
|
592 | 592 | redirect(summary_url) |
@@ -1,2028 +1,2028 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Helper functions |
|
23 | 23 | |
|
24 | 24 | Consists of functions to typically be used within templates, but also |
|
25 | 25 | available to Controllers. This module is available to both as 'h'. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import random |
|
29 | 29 | import hashlib |
|
30 | 30 | import StringIO |
|
31 | 31 | import urllib |
|
32 | 32 | import math |
|
33 | 33 | import logging |
|
34 | 34 | import re |
|
35 | 35 | import urlparse |
|
36 | 36 | import time |
|
37 | 37 | import string |
|
38 | 38 | import hashlib |
|
39 | 39 | from collections import OrderedDict |
|
40 | 40 | |
|
41 | 41 | import pygments |
|
42 | 42 | import itertools |
|
43 | 43 | import fnmatch |
|
44 | 44 | |
|
45 | 45 | from datetime import datetime |
|
46 | 46 | from functools import partial |
|
47 | 47 | from pygments.formatters.html import HtmlFormatter |
|
48 | 48 | from pygments import highlight as code_highlight |
|
49 | 49 | from pygments.lexers import ( |
|
50 | 50 | get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype) |
|
51 | 51 | from pylons import url as pylons_url |
|
52 | 52 | from pylons.i18n.translation import _, ungettext |
|
53 | 53 | from pyramid.threadlocal import get_current_request |
|
54 | 54 | |
|
55 | 55 | from webhelpers.html import literal, HTML, escape |
|
56 | 56 | from webhelpers.html.tools import * |
|
57 | 57 | from webhelpers.html.builder import make_tag |
|
58 | 58 | from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \ |
|
59 | 59 | end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \ |
|
60 | 60 | link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \ |
|
61 | 61 | submit, text, password, textarea, title, ul, xml_declaration, radio |
|
62 | 62 | from webhelpers.html.tools import auto_link, button_to, highlight, \ |
|
63 | 63 | js_obfuscate, mail_to, strip_links, strip_tags, tag_re |
|
64 | 64 | from webhelpers.pylonslib import Flash as _Flash |
|
65 | 65 | from webhelpers.text import chop_at, collapse, convert_accented_entities, \ |
|
66 | 66 | convert_misc_entities, lchop, plural, rchop, remove_formatting, \ |
|
67 | 67 | replace_whitespace, urlify, truncate, wrap_paragraphs |
|
68 | 68 | from webhelpers.date import time_ago_in_words |
|
69 | 69 | from webhelpers.paginate import Page as _Page |
|
70 | 70 | from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \ |
|
71 | 71 | convert_boolean_attrs, NotGiven, _make_safe_id_component |
|
72 | 72 | from webhelpers2.number import format_byte_size |
|
73 | 73 | |
|
74 | 74 | from rhodecode.lib.action_parser import action_parser |
|
75 | 75 | from rhodecode.lib.ext_json import json |
|
76 | 76 | from rhodecode.lib.utils import repo_name_slug, get_custom_lexer |
|
77 | 77 | from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \ |
|
78 | 78 | get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \ |
|
79 | 79 | AttributeDict, safe_int, md5, md5_safe |
|
80 | 80 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links |
|
81 | 81 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
82 | 82 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit |
|
83 | 83 | from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT |
|
84 | 84 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
85 | 85 | from rhodecode.model.db import Permission, User, Repository |
|
86 | 86 | from rhodecode.model.repo_group import RepoGroupModel |
|
87 | 87 | from rhodecode.model.settings import IssueTrackerSettingsModel |
|
88 | 88 | |
|
89 | 89 | log = logging.getLogger(__name__) |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | DEFAULT_USER = User.DEFAULT_USER |
|
93 | 93 | DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL |
|
94 | 94 | |
|
95 | 95 | |
|
96 | 96 | def url(*args, **kw): |
|
97 | 97 | return pylons_url(*args, **kw) |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | def pylons_url_current(*args, **kw): |
|
101 | 101 | """ |
|
102 | 102 | This function overrides pylons.url.current() which returns the current |
|
103 | 103 | path so that it will also work from a pyramid only context. This |
|
104 | 104 | should be removed once port to pyramid is complete. |
|
105 | 105 | """ |
|
106 | 106 | if not args and not kw: |
|
107 | 107 | request = get_current_request() |
|
108 | 108 | return request.path |
|
109 | 109 | return pylons_url.current(*args, **kw) |
|
110 | 110 | |
|
111 | 111 | url.current = pylons_url_current |
|
112 | 112 | |
|
113 | 113 | |
|
114 | 114 | def url_replace(**qargs): |
|
115 | 115 | """ Returns the current request url while replacing query string args """ |
|
116 | 116 | |
|
117 | 117 | request = get_current_request() |
|
118 | 118 | new_args = request.GET.mixed() |
|
119 | 119 | new_args.update(qargs) |
|
120 | 120 | return url('', **new_args) |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | def asset(path, ver=None, **kwargs): |
|
124 | 124 | """ |
|
125 | 125 | Helper to generate a static asset file path for rhodecode assets |
|
126 | 126 | |
|
127 | 127 | eg. h.asset('images/image.png', ver='3923') |
|
128 | 128 | |
|
129 | 129 | :param path: path of asset |
|
130 | 130 | :param ver: optional version query param to append as ?ver= |
|
131 | 131 | """ |
|
132 | 132 | request = get_current_request() |
|
133 | 133 | query = {} |
|
134 | 134 | query.update(kwargs) |
|
135 | 135 | if ver: |
|
136 | 136 | query = {'ver': ver} |
|
137 | 137 | return request.static_path( |
|
138 | 138 | 'rhodecode:public/{}'.format(path), _query=query) |
|
139 | 139 | |
|
140 | 140 | |
|
141 | 141 | default_html_escape_table = { |
|
142 | 142 | ord('&'): u'&', |
|
143 | 143 | ord('<'): u'<', |
|
144 | 144 | ord('>'): u'>', |
|
145 | 145 | ord('"'): u'"', |
|
146 | 146 | ord("'"): u''', |
|
147 | 147 | } |
|
148 | 148 | |
|
149 | 149 | |
|
150 | 150 | def html_escape(text, html_escape_table=default_html_escape_table): |
|
151 | 151 | """Produce entities within text.""" |
|
152 | 152 | return text.translate(html_escape_table) |
|
153 | 153 | |
|
154 | 154 | |
|
155 | 155 | def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None): |
|
156 | 156 | """ |
|
157 | 157 | Truncate string ``s`` at the first occurrence of ``sub``. |
|
158 | 158 | |
|
159 | 159 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. |
|
160 | 160 | """ |
|
161 | 161 | suffix_if_chopped = suffix_if_chopped or '' |
|
162 | 162 | pos = s.find(sub) |
|
163 | 163 | if pos == -1: |
|
164 | 164 | return s |
|
165 | 165 | |
|
166 | 166 | if inclusive: |
|
167 | 167 | pos += len(sub) |
|
168 | 168 | |
|
169 | 169 | chopped = s[:pos] |
|
170 | 170 | left = s[pos:].strip() |
|
171 | 171 | |
|
172 | 172 | if left and suffix_if_chopped: |
|
173 | 173 | chopped += suffix_if_chopped |
|
174 | 174 | |
|
175 | 175 | return chopped |
|
176 | 176 | |
|
177 | 177 | |
|
178 | 178 | def shorter(text, size=20): |
|
179 | 179 | postfix = '...' |
|
180 | 180 | if len(text) > size: |
|
181 | 181 | return text[:size - len(postfix)] + postfix |
|
182 | 182 | return text |
|
183 | 183 | |
|
184 | 184 | |
|
185 | 185 | def _reset(name, value=None, id=NotGiven, type="reset", **attrs): |
|
186 | 186 | """ |
|
187 | 187 | Reset button |
|
188 | 188 | """ |
|
189 | 189 | _set_input_attrs(attrs, type, name, value) |
|
190 | 190 | _set_id_attr(attrs, id, name) |
|
191 | 191 | convert_boolean_attrs(attrs, ["disabled"]) |
|
192 | 192 | return HTML.input(**attrs) |
|
193 | 193 | |
|
194 | 194 | reset = _reset |
|
195 | 195 | safeid = _make_safe_id_component |
|
196 | 196 | |
|
197 | 197 | |
|
198 | 198 | def branding(name, length=40): |
|
199 | 199 | return truncate(name, length, indicator="") |
|
200 | 200 | |
|
201 | 201 | |
|
202 | 202 | def FID(raw_id, path): |
|
203 | 203 | """ |
|
204 | 204 | Creates a unique ID for filenode based on it's hash of path and commit |
|
205 | 205 | it's safe to use in urls |
|
206 | 206 | |
|
207 | 207 | :param raw_id: |
|
208 | 208 | :param path: |
|
209 | 209 | """ |
|
210 | 210 | |
|
211 | 211 | return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12]) |
|
212 | 212 | |
|
213 | 213 | |
|
214 | 214 | class _GetError(object): |
|
215 | 215 | """Get error from form_errors, and represent it as span wrapped error |
|
216 | 216 | message |
|
217 | 217 | |
|
218 | 218 | :param field_name: field to fetch errors for |
|
219 | 219 | :param form_errors: form errors dict |
|
220 | 220 | """ |
|
221 | 221 | |
|
222 | 222 | def __call__(self, field_name, form_errors): |
|
223 | 223 | tmpl = """<span class="error_msg">%s</span>""" |
|
224 | 224 | if form_errors and field_name in form_errors: |
|
225 | 225 | return literal(tmpl % form_errors.get(field_name)) |
|
226 | 226 | |
|
227 | 227 | get_error = _GetError() |
|
228 | 228 | |
|
229 | 229 | |
|
230 | 230 | class _ToolTip(object): |
|
231 | 231 | |
|
232 | 232 | def __call__(self, tooltip_title, trim_at=50): |
|
233 | 233 | """ |
|
234 | 234 | Special function just to wrap our text into nice formatted |
|
235 | 235 | autowrapped text |
|
236 | 236 | |
|
237 | 237 | :param tooltip_title: |
|
238 | 238 | """ |
|
239 | 239 | tooltip_title = escape(tooltip_title) |
|
240 | 240 | tooltip_title = tooltip_title.replace('<', '<').replace('>', '>') |
|
241 | 241 | return tooltip_title |
|
242 | 242 | tooltip = _ToolTip() |
|
243 | 243 | |
|
244 | 244 | |
|
245 | 245 | def files_breadcrumbs(repo_name, commit_id, file_path): |
|
246 | 246 | if isinstance(file_path, str): |
|
247 | 247 | file_path = safe_unicode(file_path) |
|
248 | 248 | |
|
249 | 249 | # TODO: johbo: Is this always a url like path, or is this operating |
|
250 | 250 | # system dependent? |
|
251 | 251 | path_segments = file_path.split('/') |
|
252 | 252 | |
|
253 | 253 | repo_name_html = escape(repo_name) |
|
254 | 254 | if len(path_segments) == 1 and path_segments[0] == '': |
|
255 | 255 | url_segments = [repo_name_html] |
|
256 | 256 | else: |
|
257 | 257 | url_segments = [ |
|
258 | 258 | link_to( |
|
259 | 259 | repo_name_html, |
|
260 | 260 | url('files_home', |
|
261 | 261 | repo_name=repo_name, |
|
262 | 262 | revision=commit_id, |
|
263 | 263 | f_path=''), |
|
264 | 264 | class_='pjax-link')] |
|
265 | 265 | |
|
266 | 266 | last_cnt = len(path_segments) - 1 |
|
267 | 267 | for cnt, segment in enumerate(path_segments): |
|
268 | 268 | if not segment: |
|
269 | 269 | continue |
|
270 | 270 | segment_html = escape(segment) |
|
271 | 271 | |
|
272 | 272 | if cnt != last_cnt: |
|
273 | 273 | url_segments.append( |
|
274 | 274 | link_to( |
|
275 | 275 | segment_html, |
|
276 | 276 | url('files_home', |
|
277 | 277 | repo_name=repo_name, |
|
278 | 278 | revision=commit_id, |
|
279 | 279 | f_path='/'.join(path_segments[:cnt + 1])), |
|
280 | 280 | class_='pjax-link')) |
|
281 | 281 | else: |
|
282 | 282 | url_segments.append(segment_html) |
|
283 | 283 | |
|
284 | 284 | return literal('/'.join(url_segments)) |
|
285 | 285 | |
|
286 | 286 | |
|
287 | 287 | class CodeHtmlFormatter(HtmlFormatter): |
|
288 | 288 | """ |
|
289 | 289 | My code Html Formatter for source codes |
|
290 | 290 | """ |
|
291 | 291 | |
|
292 | 292 | def wrap(self, source, outfile): |
|
293 | 293 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) |
|
294 | 294 | |
|
295 | 295 | def _wrap_code(self, source): |
|
296 | 296 | for cnt, it in enumerate(source): |
|
297 | 297 | i, t = it |
|
298 | 298 | t = '<div id="L%s">%s</div>' % (cnt + 1, t) |
|
299 | 299 | yield i, t |
|
300 | 300 | |
|
301 | 301 | def _wrap_tablelinenos(self, inner): |
|
302 | 302 | dummyoutfile = StringIO.StringIO() |
|
303 | 303 | lncount = 0 |
|
304 | 304 | for t, line in inner: |
|
305 | 305 | if t: |
|
306 | 306 | lncount += 1 |
|
307 | 307 | dummyoutfile.write(line) |
|
308 | 308 | |
|
309 | 309 | fl = self.linenostart |
|
310 | 310 | mw = len(str(lncount + fl - 1)) |
|
311 | 311 | sp = self.linenospecial |
|
312 | 312 | st = self.linenostep |
|
313 | 313 | la = self.lineanchors |
|
314 | 314 | aln = self.anchorlinenos |
|
315 | 315 | nocls = self.noclasses |
|
316 | 316 | if sp: |
|
317 | 317 | lines = [] |
|
318 | 318 | |
|
319 | 319 | for i in range(fl, fl + lncount): |
|
320 | 320 | if i % st == 0: |
|
321 | 321 | if i % sp == 0: |
|
322 | 322 | if aln: |
|
323 | 323 | lines.append('<a href="#%s%d" class="special">%*d</a>' % |
|
324 | 324 | (la, i, mw, i)) |
|
325 | 325 | else: |
|
326 | 326 | lines.append('<span class="special">%*d</span>' % (mw, i)) |
|
327 | 327 | else: |
|
328 | 328 | if aln: |
|
329 | 329 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
330 | 330 | else: |
|
331 | 331 | lines.append('%*d' % (mw, i)) |
|
332 | 332 | else: |
|
333 | 333 | lines.append('') |
|
334 | 334 | ls = '\n'.join(lines) |
|
335 | 335 | else: |
|
336 | 336 | lines = [] |
|
337 | 337 | for i in range(fl, fl + lncount): |
|
338 | 338 | if i % st == 0: |
|
339 | 339 | if aln: |
|
340 | 340 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
341 | 341 | else: |
|
342 | 342 | lines.append('%*d' % (mw, i)) |
|
343 | 343 | else: |
|
344 | 344 | lines.append('') |
|
345 | 345 | ls = '\n'.join(lines) |
|
346 | 346 | |
|
347 | 347 | # in case you wonder about the seemingly redundant <div> here: since the |
|
348 | 348 | # content in the other cell also is wrapped in a div, some browsers in |
|
349 | 349 | # some configurations seem to mess up the formatting... |
|
350 | 350 | if nocls: |
|
351 | 351 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
352 | 352 | '<tr><td><div class="linenodiv" ' |
|
353 | 353 | 'style="background-color: #f0f0f0; padding-right: 10px">' |
|
354 | 354 | '<pre style="line-height: 125%">' + |
|
355 | 355 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
356 | 356 | else: |
|
357 | 357 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
358 | 358 | '<tr><td class="linenos"><div class="linenodiv"><pre>' + |
|
359 | 359 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
360 | 360 | yield 0, dummyoutfile.getvalue() |
|
361 | 361 | yield 0, '</td></tr></table>' |
|
362 | 362 | |
|
363 | 363 | |
|
364 | 364 | class SearchContentCodeHtmlFormatter(CodeHtmlFormatter): |
|
365 | 365 | def __init__(self, **kw): |
|
366 | 366 | # only show these line numbers if set |
|
367 | 367 | self.only_lines = kw.pop('only_line_numbers', []) |
|
368 | 368 | self.query_terms = kw.pop('query_terms', []) |
|
369 | 369 | self.max_lines = kw.pop('max_lines', 5) |
|
370 | 370 | self.line_context = kw.pop('line_context', 3) |
|
371 | 371 | self.url = kw.pop('url', None) |
|
372 | 372 | |
|
373 | 373 | super(CodeHtmlFormatter, self).__init__(**kw) |
|
374 | 374 | |
|
375 | 375 | def _wrap_code(self, source): |
|
376 | 376 | for cnt, it in enumerate(source): |
|
377 | 377 | i, t = it |
|
378 | 378 | t = '<pre>%s</pre>' % t |
|
379 | 379 | yield i, t |
|
380 | 380 | |
|
381 | 381 | def _wrap_tablelinenos(self, inner): |
|
382 | 382 | yield 0, '<table class="code-highlight %stable">' % self.cssclass |
|
383 | 383 | |
|
384 | 384 | last_shown_line_number = 0 |
|
385 | 385 | current_line_number = 1 |
|
386 | 386 | |
|
387 | 387 | for t, line in inner: |
|
388 | 388 | if not t: |
|
389 | 389 | yield t, line |
|
390 | 390 | continue |
|
391 | 391 | |
|
392 | 392 | if current_line_number in self.only_lines: |
|
393 | 393 | if last_shown_line_number + 1 != current_line_number: |
|
394 | 394 | yield 0, '<tr>' |
|
395 | 395 | yield 0, '<td class="line">...</td>' |
|
396 | 396 | yield 0, '<td id="hlcode" class="code"></td>' |
|
397 | 397 | yield 0, '</tr>' |
|
398 | 398 | |
|
399 | 399 | yield 0, '<tr>' |
|
400 | 400 | if self.url: |
|
401 | 401 | yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % ( |
|
402 | 402 | self.url, current_line_number, current_line_number) |
|
403 | 403 | else: |
|
404 | 404 | yield 0, '<td class="line"><a href="">%i</a></td>' % ( |
|
405 | 405 | current_line_number) |
|
406 | 406 | yield 0, '<td id="hlcode" class="code">' + line + '</td>' |
|
407 | 407 | yield 0, '</tr>' |
|
408 | 408 | |
|
409 | 409 | last_shown_line_number = current_line_number |
|
410 | 410 | |
|
411 | 411 | current_line_number += 1 |
|
412 | 412 | |
|
413 | 413 | |
|
414 | 414 | yield 0, '</table>' |
|
415 | 415 | |
|
416 | 416 | |
|
417 | 417 | def extract_phrases(text_query): |
|
418 | 418 | """ |
|
419 | 419 | Extracts phrases from search term string making sure phrases |
|
420 | 420 | contained in double quotes are kept together - and discarding empty values |
|
421 | 421 | or fully whitespace values eg. |
|
422 | 422 | |
|
423 | 423 | 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more'] |
|
424 | 424 | |
|
425 | 425 | """ |
|
426 | 426 | |
|
427 | 427 | in_phrase = False |
|
428 | 428 | buf = '' |
|
429 | 429 | phrases = [] |
|
430 | 430 | for char in text_query: |
|
431 | 431 | if in_phrase: |
|
432 | 432 | if char == '"': # end phrase |
|
433 | 433 | phrases.append(buf) |
|
434 | 434 | buf = '' |
|
435 | 435 | in_phrase = False |
|
436 | 436 | continue |
|
437 | 437 | else: |
|
438 | 438 | buf += char |
|
439 | 439 | continue |
|
440 | 440 | else: |
|
441 | 441 | if char == '"': # start phrase |
|
442 | 442 | in_phrase = True |
|
443 | 443 | phrases.append(buf) |
|
444 | 444 | buf = '' |
|
445 | 445 | continue |
|
446 | 446 | elif char == ' ': |
|
447 | 447 | phrases.append(buf) |
|
448 | 448 | buf = '' |
|
449 | 449 | continue |
|
450 | 450 | else: |
|
451 | 451 | buf += char |
|
452 | 452 | |
|
453 | 453 | phrases.append(buf) |
|
454 | 454 | phrases = [phrase.strip() for phrase in phrases if phrase.strip()] |
|
455 | 455 | return phrases |
|
456 | 456 | |
|
457 | 457 | |
|
458 | 458 | def get_matching_offsets(text, phrases): |
|
459 | 459 | """ |
|
460 | 460 | Returns a list of string offsets in `text` that the list of `terms` match |
|
461 | 461 | |
|
462 | 462 | >>> get_matching_offsets('some text here', ['some', 'here']) |
|
463 | 463 | [(0, 4), (10, 14)] |
|
464 | 464 | |
|
465 | 465 | """ |
|
466 | 466 | offsets = [] |
|
467 | 467 | for phrase in phrases: |
|
468 | 468 | for match in re.finditer(phrase, text): |
|
469 | 469 | offsets.append((match.start(), match.end())) |
|
470 | 470 | |
|
471 | 471 | return offsets |
|
472 | 472 | |
|
473 | 473 | |
|
474 | 474 | def normalize_text_for_matching(x): |
|
475 | 475 | """ |
|
476 | 476 | Replaces all non alnum characters to spaces and lower cases the string, |
|
477 | 477 | useful for comparing two text strings without punctuation |
|
478 | 478 | """ |
|
479 | 479 | return re.sub(r'[^\w]', ' ', x.lower()) |
|
480 | 480 | |
|
481 | 481 | |
|
482 | 482 | def get_matching_line_offsets(lines, terms): |
|
483 | 483 | """ Return a set of `lines` indices (starting from 1) matching a |
|
484 | 484 | text search query, along with `context` lines above/below matching lines |
|
485 | 485 | |
|
486 | 486 | :param lines: list of strings representing lines |
|
487 | 487 | :param terms: search term string to match in lines eg. 'some text' |
|
488 | 488 | :param context: number of lines above/below a matching line to add to result |
|
489 | 489 | :param max_lines: cut off for lines of interest |
|
490 | 490 | eg. |
|
491 | 491 | |
|
492 | 492 | text = ''' |
|
493 | 493 | words words words |
|
494 | 494 | words words words |
|
495 | 495 | some text some |
|
496 | 496 | words words words |
|
497 | 497 | words words words |
|
498 | 498 | text here what |
|
499 | 499 | ''' |
|
500 | 500 | get_matching_line_offsets(text, 'text', context=1) |
|
501 | 501 | {3: [(5, 9)], 6: [(0, 4)]] |
|
502 | 502 | |
|
503 | 503 | """ |
|
504 | 504 | matching_lines = {} |
|
505 | 505 | phrases = [normalize_text_for_matching(phrase) |
|
506 | 506 | for phrase in extract_phrases(terms)] |
|
507 | 507 | |
|
508 | 508 | for line_index, line in enumerate(lines, start=1): |
|
509 | 509 | match_offsets = get_matching_offsets( |
|
510 | 510 | normalize_text_for_matching(line), phrases) |
|
511 | 511 | if match_offsets: |
|
512 | 512 | matching_lines[line_index] = match_offsets |
|
513 | 513 | |
|
514 | 514 | return matching_lines |
|
515 | 515 | |
|
516 | 516 | |
|
517 | 517 | def hsv_to_rgb(h, s, v): |
|
518 | 518 | """ Convert hsv color values to rgb """ |
|
519 | 519 | |
|
520 | 520 | if s == 0.0: |
|
521 | 521 | return v, v, v |
|
522 | 522 | i = int(h * 6.0) # XXX assume int() truncates! |
|
523 | 523 | f = (h * 6.0) - i |
|
524 | 524 | p = v * (1.0 - s) |
|
525 | 525 | q = v * (1.0 - s * f) |
|
526 | 526 | t = v * (1.0 - s * (1.0 - f)) |
|
527 | 527 | i = i % 6 |
|
528 | 528 | if i == 0: |
|
529 | 529 | return v, t, p |
|
530 | 530 | if i == 1: |
|
531 | 531 | return q, v, p |
|
532 | 532 | if i == 2: |
|
533 | 533 | return p, v, t |
|
534 | 534 | if i == 3: |
|
535 | 535 | return p, q, v |
|
536 | 536 | if i == 4: |
|
537 | 537 | return t, p, v |
|
538 | 538 | if i == 5: |
|
539 | 539 | return v, p, q |
|
540 | 540 | |
|
541 | 541 | |
|
542 | 542 | def unique_color_generator(n=10000, saturation=0.10, lightness=0.95): |
|
543 | 543 | """ |
|
544 | 544 | Generator for getting n of evenly distributed colors using |
|
545 | 545 | hsv color and golden ratio. It always return same order of colors |
|
546 | 546 | |
|
547 | 547 | :param n: number of colors to generate |
|
548 | 548 | :param saturation: saturation of returned colors |
|
549 | 549 | :param lightness: lightness of returned colors |
|
550 | 550 | :returns: RGB tuple |
|
551 | 551 | """ |
|
552 | 552 | |
|
553 | 553 | golden_ratio = 0.618033988749895 |
|
554 | 554 | h = 0.22717784590367374 |
|
555 | 555 | |
|
556 | 556 | for _ in xrange(n): |
|
557 | 557 | h += golden_ratio |
|
558 | 558 | h %= 1 |
|
559 | 559 | HSV_tuple = [h, saturation, lightness] |
|
560 | 560 | RGB_tuple = hsv_to_rgb(*HSV_tuple) |
|
561 | 561 | yield map(lambda x: str(int(x * 256)), RGB_tuple) |
|
562 | 562 | |
|
563 | 563 | |
|
564 | 564 | def color_hasher(n=10000, saturation=0.10, lightness=0.95): |
|
565 | 565 | """ |
|
566 | 566 | Returns a function which when called with an argument returns a unique |
|
567 | 567 | color for that argument, eg. |
|
568 | 568 | |
|
569 | 569 | :param n: number of colors to generate |
|
570 | 570 | :param saturation: saturation of returned colors |
|
571 | 571 | :param lightness: lightness of returned colors |
|
572 | 572 | :returns: css RGB string |
|
573 | 573 | |
|
574 | 574 | >>> color_hash = color_hasher() |
|
575 | 575 | >>> color_hash('hello') |
|
576 | 576 | 'rgb(34, 12, 59)' |
|
577 | 577 | >>> color_hash('hello') |
|
578 | 578 | 'rgb(34, 12, 59)' |
|
579 | 579 | >>> color_hash('other') |
|
580 | 580 | 'rgb(90, 224, 159)' |
|
581 | 581 | """ |
|
582 | 582 | |
|
583 | 583 | color_dict = {} |
|
584 | 584 | cgenerator = unique_color_generator( |
|
585 | 585 | saturation=saturation, lightness=lightness) |
|
586 | 586 | |
|
587 | 587 | def get_color_string(thing): |
|
588 | 588 | if thing in color_dict: |
|
589 | 589 | col = color_dict[thing] |
|
590 | 590 | else: |
|
591 | 591 | col = color_dict[thing] = cgenerator.next() |
|
592 | 592 | return "rgb(%s)" % (', '.join(col)) |
|
593 | 593 | |
|
594 | 594 | return get_color_string |
|
595 | 595 | |
|
596 | 596 | |
|
597 | 597 | def get_lexer_safe(mimetype=None, filepath=None): |
|
598 | 598 | """ |
|
599 | 599 | Tries to return a relevant pygments lexer using mimetype/filepath name, |
|
600 | 600 | defaulting to plain text if none could be found |
|
601 | 601 | """ |
|
602 | 602 | lexer = None |
|
603 | 603 | try: |
|
604 | 604 | if mimetype: |
|
605 | 605 | lexer = get_lexer_for_mimetype(mimetype) |
|
606 | 606 | if not lexer: |
|
607 | 607 | lexer = get_lexer_for_filename(filepath) |
|
608 | 608 | except pygments.util.ClassNotFound: |
|
609 | 609 | pass |
|
610 | 610 | |
|
611 | 611 | if not lexer: |
|
612 | 612 | lexer = get_lexer_by_name('text') |
|
613 | 613 | |
|
614 | 614 | return lexer |
|
615 | 615 | |
|
616 | 616 | |
|
617 | 617 | def get_lexer_for_filenode(filenode): |
|
618 | 618 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer |
|
619 | 619 | return lexer |
|
620 | 620 | |
|
621 | 621 | |
|
622 | 622 | def pygmentize(filenode, **kwargs): |
|
623 | 623 | """ |
|
624 | 624 | pygmentize function using pygments |
|
625 | 625 | |
|
626 | 626 | :param filenode: |
|
627 | 627 | """ |
|
628 | 628 | lexer = get_lexer_for_filenode(filenode) |
|
629 | 629 | return literal(code_highlight(filenode.content, lexer, |
|
630 | 630 | CodeHtmlFormatter(**kwargs))) |
|
631 | 631 | |
|
632 | 632 | |
|
633 | 633 | def is_following_repo(repo_name, user_id): |
|
634 | 634 | from rhodecode.model.scm import ScmModel |
|
635 | 635 | return ScmModel().is_following_repo(repo_name, user_id) |
|
636 | 636 | |
|
637 | 637 | |
|
638 | 638 | class _Message(object): |
|
639 | 639 | """A message returned by ``Flash.pop_messages()``. |
|
640 | 640 | |
|
641 | 641 | Converting the message to a string returns the message text. Instances |
|
642 | 642 | also have the following attributes: |
|
643 | 643 | |
|
644 | 644 | * ``message``: the message text. |
|
645 | 645 | * ``category``: the category specified when the message was created. |
|
646 | 646 | """ |
|
647 | 647 | |
|
648 | 648 | def __init__(self, category, message): |
|
649 | 649 | self.category = category |
|
650 | 650 | self.message = message |
|
651 | 651 | |
|
652 | 652 | def __str__(self): |
|
653 | 653 | return self.message |
|
654 | 654 | |
|
655 | 655 | __unicode__ = __str__ |
|
656 | 656 | |
|
657 | 657 | def __html__(self): |
|
658 | 658 | return escape(safe_unicode(self.message)) |
|
659 | 659 | |
|
660 | 660 | |
|
661 | 661 | class Flash(_Flash): |
|
662 | 662 | |
|
663 | 663 | def pop_messages(self): |
|
664 | 664 | """Return all accumulated messages and delete them from the session. |
|
665 | 665 | |
|
666 | 666 | The return value is a list of ``Message`` objects. |
|
667 | 667 | """ |
|
668 | 668 | from pylons import session |
|
669 | 669 | |
|
670 | 670 | messages = [] |
|
671 | 671 | |
|
672 | 672 | # Pop the 'old' pylons flash messages. They are tuples of the form |
|
673 | 673 | # (category, message) |
|
674 | 674 | for cat, msg in session.pop(self.session_key, []): |
|
675 | 675 | messages.append(_Message(cat, msg)) |
|
676 | 676 | |
|
677 | 677 | # Pop the 'new' pyramid flash messages for each category as list |
|
678 | 678 | # of strings. |
|
679 | 679 | for cat in self.categories: |
|
680 | 680 | for msg in session.pop_flash(queue=cat): |
|
681 | 681 | messages.append(_Message(cat, msg)) |
|
682 | 682 | # Map messages from the default queue to the 'notice' category. |
|
683 | 683 | for msg in session.pop_flash(): |
|
684 | 684 | messages.append(_Message('notice', msg)) |
|
685 | 685 | |
|
686 | 686 | session.save() |
|
687 | 687 | return messages |
|
688 | 688 | |
|
689 | 689 | def json_alerts(self): |
|
690 | 690 | payloads = [] |
|
691 | 691 | messages = flash.pop_messages() |
|
692 | 692 | if messages: |
|
693 | 693 | for message in messages: |
|
694 | 694 | subdata = {} |
|
695 | 695 | if hasattr(message.message, 'rsplit'): |
|
696 | 696 | flash_data = message.message.rsplit('|DELIM|', 1) |
|
697 | 697 | org_message = flash_data[0] |
|
698 | 698 | if len(flash_data) > 1: |
|
699 | 699 | subdata = json.loads(flash_data[1]) |
|
700 | 700 | else: |
|
701 | 701 | org_message = message.message |
|
702 | 702 | payloads.append({ |
|
703 | 703 | 'message': { |
|
704 | 704 | 'message': u'{}'.format(org_message), |
|
705 | 705 | 'level': message.category, |
|
706 | 706 | 'force': True, |
|
707 | 707 | 'subdata': subdata |
|
708 | 708 | } |
|
709 | 709 | }) |
|
710 | 710 | return json.dumps(payloads) |
|
711 | 711 | |
|
712 | 712 | flash = Flash() |
|
713 | 713 | |
|
714 | 714 | #============================================================================== |
|
715 | 715 | # SCM FILTERS available via h. |
|
716 | 716 | #============================================================================== |
|
717 | 717 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
718 | 718 | from rhodecode.lib.utils2 import credentials_filter, age as _age |
|
719 | 719 | from rhodecode.model.db import User, ChangesetStatus |
|
720 | 720 | |
|
721 | 721 | age = _age |
|
722 | 722 | capitalize = lambda x: x.capitalize() |
|
723 | 723 | email = author_email |
|
724 | 724 | short_id = lambda x: x[:12] |
|
725 | 725 | hide_credentials = lambda x: ''.join(credentials_filter(x)) |
|
726 | 726 | |
|
727 | 727 | |
|
728 | 728 | def age_component(datetime_iso, value=None, time_is_local=False): |
|
729 | 729 | title = value or format_date(datetime_iso) |
|
730 | 730 | tzinfo = '+00:00' |
|
731 | 731 | |
|
732 | 732 | # detect if we have a timezone info, otherwise, add it |
|
733 | 733 | if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo: |
|
734 | 734 | if time_is_local: |
|
735 | 735 | tzinfo = time.strftime("+%H:%M", |
|
736 | 736 | time.gmtime( |
|
737 | 737 | (datetime.now() - datetime.utcnow()).seconds + 1 |
|
738 | 738 | ) |
|
739 | 739 | ) |
|
740 | 740 | |
|
741 | 741 | return literal( |
|
742 | 742 | '<time class="timeago tooltip" ' |
|
743 | 743 | 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format( |
|
744 | 744 | datetime_iso, title, tzinfo)) |
|
745 | 745 | |
|
746 | 746 | |
|
747 | 747 | def _shorten_commit_id(commit_id): |
|
748 | 748 | from rhodecode import CONFIG |
|
749 | 749 | def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12)) |
|
750 | 750 | return commit_id[:def_len] |
|
751 | 751 | |
|
752 | 752 | |
|
753 | 753 | def show_id(commit): |
|
754 | 754 | """ |
|
755 | 755 | Configurable function that shows ID |
|
756 | 756 | by default it's r123:fffeeefffeee |
|
757 | 757 | |
|
758 | 758 | :param commit: commit instance |
|
759 | 759 | """ |
|
760 | 760 | from rhodecode import CONFIG |
|
761 | 761 | show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True)) |
|
762 | 762 | |
|
763 | 763 | raw_id = _shorten_commit_id(commit.raw_id) |
|
764 | 764 | if show_idx: |
|
765 | 765 | return 'r%s:%s' % (commit.idx, raw_id) |
|
766 | 766 | else: |
|
767 | 767 | return '%s' % (raw_id, ) |
|
768 | 768 | |
|
769 | 769 | |
|
770 | 770 | def format_date(date): |
|
771 | 771 | """ |
|
772 | 772 | use a standardized formatting for dates used in RhodeCode |
|
773 | 773 | |
|
774 | 774 | :param date: date/datetime object |
|
775 | 775 | :return: formatted date |
|
776 | 776 | """ |
|
777 | 777 | |
|
778 | 778 | if date: |
|
779 | 779 | _fmt = "%a, %d %b %Y %H:%M:%S" |
|
780 | 780 | return safe_unicode(date.strftime(_fmt)) |
|
781 | 781 | |
|
782 | 782 | return u"" |
|
783 | 783 | |
|
784 | 784 | |
|
785 | 785 | class _RepoChecker(object): |
|
786 | 786 | |
|
787 | 787 | def __init__(self, backend_alias): |
|
788 | 788 | self._backend_alias = backend_alias |
|
789 | 789 | |
|
790 | 790 | def __call__(self, repository): |
|
791 | 791 | if hasattr(repository, 'alias'): |
|
792 | 792 | _type = repository.alias |
|
793 | 793 | elif hasattr(repository, 'repo_type'): |
|
794 | 794 | _type = repository.repo_type |
|
795 | 795 | else: |
|
796 | 796 | _type = repository |
|
797 | 797 | return _type == self._backend_alias |
|
798 | 798 | |
|
799 | 799 | is_git = _RepoChecker('git') |
|
800 | 800 | is_hg = _RepoChecker('hg') |
|
801 | 801 | is_svn = _RepoChecker('svn') |
|
802 | 802 | |
|
803 | 803 | |
|
804 | 804 | def get_repo_type_by_name(repo_name): |
|
805 | 805 | repo = Repository.get_by_repo_name(repo_name) |
|
806 | 806 | return repo.repo_type |
|
807 | 807 | |
|
808 | 808 | |
|
809 | 809 | def is_svn_without_proxy(repository): |
|
810 | 810 | if is_svn(repository): |
|
811 | 811 | from rhodecode.model.settings import VcsSettingsModel |
|
812 | 812 | conf = VcsSettingsModel().get_ui_settings_as_config_obj() |
|
813 | 813 | return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled')) |
|
814 | 814 | return False |
|
815 | 815 | |
|
816 | 816 | |
|
817 | 817 | def discover_user(author): |
|
818 | 818 | """ |
|
819 | 819 | Tries to discover RhodeCode User based on the autho string. Author string |
|
820 | 820 | is typically `FirstName LastName <email@address.com>` |
|
821 | 821 | """ |
|
822 | 822 | |
|
823 | 823 | # if author is already an instance use it for extraction |
|
824 | 824 | if isinstance(author, User): |
|
825 | 825 | return author |
|
826 | 826 | |
|
827 | 827 | # Valid email in the attribute passed, see if they're in the system |
|
828 | 828 | _email = author_email(author) |
|
829 | 829 | if _email != '': |
|
830 | 830 | user = User.get_by_email(_email, case_insensitive=True, cache=True) |
|
831 | 831 | if user is not None: |
|
832 | 832 | return user |
|
833 | 833 | |
|
834 | 834 | # Maybe it's a username, we try to extract it and fetch by username ? |
|
835 | 835 | _author = author_name(author) |
|
836 | 836 | user = User.get_by_username(_author, case_insensitive=True, cache=True) |
|
837 | 837 | if user is not None: |
|
838 | 838 | return user |
|
839 | 839 | |
|
840 | 840 | return None |
|
841 | 841 | |
|
842 | 842 | |
|
843 | 843 | def email_or_none(author): |
|
844 | 844 | # extract email from the commit string |
|
845 | 845 | _email = author_email(author) |
|
846 | 846 | |
|
847 | 847 | # If we have an email, use it, otherwise |
|
848 | 848 | # see if it contains a username we can get an email from |
|
849 | 849 | if _email != '': |
|
850 | 850 | return _email |
|
851 | 851 | else: |
|
852 | 852 | user = User.get_by_username( |
|
853 | 853 | author_name(author), case_insensitive=True, cache=True) |
|
854 | 854 | |
|
855 | 855 | if user is not None: |
|
856 | 856 | return user.email |
|
857 | 857 | |
|
858 | 858 | # No valid email, not a valid user in the system, none! |
|
859 | 859 | return None |
|
860 | 860 | |
|
861 | 861 | |
|
862 | 862 | def link_to_user(author, length=0, **kwargs): |
|
863 | 863 | user = discover_user(author) |
|
864 | 864 | # user can be None, but if we have it already it means we can re-use it |
|
865 | 865 | # in the person() function, so we save 1 intensive-query |
|
866 | 866 | if user: |
|
867 | 867 | author = user |
|
868 | 868 | |
|
869 | 869 | display_person = person(author, 'username_or_name_or_email') |
|
870 | 870 | if length: |
|
871 | 871 | display_person = shorter(display_person, length) |
|
872 | 872 | |
|
873 | 873 | if user: |
|
874 | 874 | return link_to( |
|
875 | 875 | escape(display_person), |
|
876 | 876 | route_path('user_profile', username=user.username), |
|
877 | 877 | **kwargs) |
|
878 | 878 | else: |
|
879 | 879 | return escape(display_person) |
|
880 | 880 | |
|
881 | 881 | |
|
882 | 882 | def person(author, show_attr="username_and_name"): |
|
883 | 883 | user = discover_user(author) |
|
884 | 884 | if user: |
|
885 | 885 | return getattr(user, show_attr) |
|
886 | 886 | else: |
|
887 | 887 | _author = author_name(author) |
|
888 | 888 | _email = email(author) |
|
889 | 889 | return _author or _email |
|
890 | 890 | |
|
891 | 891 | |
|
892 | 892 | def author_string(email): |
|
893 | 893 | if email: |
|
894 | 894 | user = User.get_by_email(email, case_insensitive=True, cache=True) |
|
895 | 895 | if user: |
|
896 | 896 | if user.firstname or user.lastname: |
|
897 | 897 | return '%s %s <%s>' % ( |
|
898 | 898 | escape(user.firstname), escape(user.lastname), email) |
|
899 | 899 | else: |
|
900 | 900 | return email |
|
901 | 901 | else: |
|
902 | 902 | return email |
|
903 | 903 | else: |
|
904 | 904 | return None |
|
905 | 905 | |
|
906 | 906 | |
|
907 | 907 | def person_by_id(id_, show_attr="username_and_name"): |
|
908 | 908 | # attr to return from fetched user |
|
909 | 909 | person_getter = lambda usr: getattr(usr, show_attr) |
|
910 | 910 | |
|
911 | 911 | #maybe it's an ID ? |
|
912 | 912 | if str(id_).isdigit() or isinstance(id_, int): |
|
913 | 913 | id_ = int(id_) |
|
914 | 914 | user = User.get(id_) |
|
915 | 915 | if user is not None: |
|
916 | 916 | return person_getter(user) |
|
917 | 917 | return id_ |
|
918 | 918 | |
|
919 | 919 | |
|
920 | 920 | def gravatar_with_user(author, show_disabled=False): |
|
921 | 921 | from rhodecode.lib.utils import PartialRenderer |
|
922 | 922 | _render = PartialRenderer('base/base.mako') |
|
923 | 923 | return _render('gravatar_with_user', author, show_disabled=show_disabled) |
|
924 | 924 | |
|
925 | 925 | |
|
926 | 926 | def desc_stylize(value): |
|
927 | 927 | """ |
|
928 | 928 | converts tags from value into html equivalent |
|
929 | 929 | |
|
930 | 930 | :param value: |
|
931 | 931 | """ |
|
932 | 932 | if not value: |
|
933 | 933 | return '' |
|
934 | 934 | |
|
935 | 935 | value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
936 | 936 | '<div class="metatag" tag="see">see => \\1 </div>', value) |
|
937 | 937 | value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
938 | 938 | '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value) |
|
939 | 939 | value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]', |
|
940 | 940 | '<div class="metatag" tag="\\1">\\1 => <a href="/\\2">\\2</a></div>', value) |
|
941 | 941 | value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]', |
|
942 | 942 | '<div class="metatag" tag="lang">\\2</div>', value) |
|
943 | 943 | value = re.sub(r'\[([a-z]+)\]', |
|
944 | 944 | '<div class="metatag" tag="\\1">\\1</div>', value) |
|
945 | 945 | |
|
946 | 946 | return value |
|
947 | 947 | |
|
948 | 948 | |
|
949 | 949 | def escaped_stylize(value): |
|
950 | 950 | """ |
|
951 | 951 | converts tags from value into html equivalent, but escaping its value first |
|
952 | 952 | """ |
|
953 | 953 | if not value: |
|
954 | 954 | return '' |
|
955 | 955 | |
|
956 | 956 | # Using default webhelper escape method, but has to force it as a |
|
957 | 957 | # plain unicode instead of a markup tag to be used in regex expressions |
|
958 | 958 | value = unicode(escape(safe_unicode(value))) |
|
959 | 959 | |
|
960 | 960 | value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
961 | 961 | '<div class="metatag" tag="see">see => \\1 </div>', value) |
|
962 | 962 | value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
963 | 963 | '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value) |
|
964 | 964 | value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]', |
|
965 | 965 | '<div class="metatag" tag="\\1">\\1 => <a href="/\\2">\\2</a></div>', value) |
|
966 | 966 | value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]', |
|
967 | 967 | '<div class="metatag" tag="lang">\\2</div>', value) |
|
968 | 968 | value = re.sub(r'\[([a-z]+)\]', |
|
969 | 969 | '<div class="metatag" tag="\\1">\\1</div>', value) |
|
970 | 970 | |
|
971 | 971 | return value |
|
972 | 972 | |
|
973 | 973 | |
|
974 | 974 | def bool2icon(value): |
|
975 | 975 | """ |
|
976 | 976 | Returns boolean value of a given value, represented as html element with |
|
977 | 977 | classes that will represent icons |
|
978 | 978 | |
|
979 | 979 | :param value: given value to convert to html node |
|
980 | 980 | """ |
|
981 | 981 | |
|
982 | 982 | if value: # does bool conversion |
|
983 | 983 | return HTML.tag('i', class_="icon-true") |
|
984 | 984 | else: # not true as bool |
|
985 | 985 | return HTML.tag('i', class_="icon-false") |
|
986 | 986 | |
|
987 | 987 | |
|
988 | 988 | #============================================================================== |
|
989 | 989 | # PERMS |
|
990 | 990 | #============================================================================== |
|
991 | 991 | from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \ |
|
992 | 992 | HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \ |
|
993 | 993 | HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \ |
|
994 | 994 | csrf_token_key |
|
995 | 995 | |
|
996 | 996 | |
|
997 | 997 | #============================================================================== |
|
998 | 998 | # GRAVATAR URL |
|
999 | 999 | #============================================================================== |
|
1000 | 1000 | class InitialsGravatar(object): |
|
1001 | 1001 | def __init__(self, email_address, first_name, last_name, size=30, |
|
1002 | 1002 | background=None, text_color='#fff'): |
|
1003 | 1003 | self.size = size |
|
1004 | 1004 | self.first_name = first_name |
|
1005 | 1005 | self.last_name = last_name |
|
1006 | 1006 | self.email_address = email_address |
|
1007 | 1007 | self.background = background or self.str2color(email_address) |
|
1008 | 1008 | self.text_color = text_color |
|
1009 | 1009 | |
|
1010 | 1010 | def get_color_bank(self): |
|
1011 | 1011 | """ |
|
1012 | 1012 | returns a predefined list of colors that gravatars can use. |
|
1013 | 1013 | Those are randomized distinct colors that guarantee readability and |
|
1014 | 1014 | uniqueness. |
|
1015 | 1015 | |
|
1016 | 1016 | generated with: http://phrogz.net/css/distinct-colors.html |
|
1017 | 1017 | """ |
|
1018 | 1018 | return [ |
|
1019 | 1019 | '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000', |
|
1020 | 1020 | '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320', |
|
1021 | 1021 | '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300', |
|
1022 | 1022 | '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140', |
|
1023 | 1023 | '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c', |
|
1024 | 1024 | '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020', |
|
1025 | 1025 | '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039', |
|
1026 | 1026 | '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f', |
|
1027 | 1027 | '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340', |
|
1028 | 1028 | '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98', |
|
1029 | 1029 | '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c', |
|
1030 | 1030 | '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200', |
|
1031 | 1031 | '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a', |
|
1032 | 1032 | '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959', |
|
1033 | 1033 | '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3', |
|
1034 | 1034 | '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626', |
|
1035 | 1035 | '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000', |
|
1036 | 1036 | '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362', |
|
1037 | 1037 | '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3', |
|
1038 | 1038 | '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a', |
|
1039 | 1039 | '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939', |
|
1040 | 1040 | '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39', |
|
1041 | 1041 | '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953', |
|
1042 | 1042 | '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9', |
|
1043 | 1043 | '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1', |
|
1044 | 1044 | '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900', |
|
1045 | 1045 | '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00', |
|
1046 | 1046 | '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3', |
|
1047 | 1047 | '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59', |
|
1048 | 1048 | '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079', |
|
1049 | 1049 | '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700', |
|
1050 | 1050 | '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d', |
|
1051 | 1051 | '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2', |
|
1052 | 1052 | '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff', |
|
1053 | 1053 | '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20', |
|
1054 | 1054 | '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626', |
|
1055 | 1055 | '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23', |
|
1056 | 1056 | '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff', |
|
1057 | 1057 | '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6', |
|
1058 | 1058 | '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a', |
|
1059 | 1059 | '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c', |
|
1060 | 1060 | '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600', |
|
1061 | 1061 | '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff', |
|
1062 | 1062 | '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539', |
|
1063 | 1063 | '#4f8c46', '#368dd9', '#5c0073' |
|
1064 | 1064 | ] |
|
1065 | 1065 | |
|
1066 | 1066 | def rgb_to_hex_color(self, rgb_tuple): |
|
1067 | 1067 | """ |
|
1068 | 1068 | Converts an rgb_tuple passed to an hex color. |
|
1069 | 1069 | |
|
1070 | 1070 | :param rgb_tuple: tuple with 3 ints represents rgb color space |
|
1071 | 1071 | """ |
|
1072 | 1072 | return '#' + ("".join(map(chr, rgb_tuple)).encode('hex')) |
|
1073 | 1073 | |
|
1074 | 1074 | def email_to_int_list(self, email_str): |
|
1075 | 1075 | """ |
|
1076 | 1076 | Get every byte of the hex digest value of email and turn it to integer. |
|
1077 | 1077 | It's going to be always between 0-255 |
|
1078 | 1078 | """ |
|
1079 | 1079 | digest = md5_safe(email_str.lower()) |
|
1080 | 1080 | return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)] |
|
1081 | 1081 | |
|
1082 | 1082 | def pick_color_bank_index(self, email_str, color_bank): |
|
1083 | 1083 | return self.email_to_int_list(email_str)[0] % len(color_bank) |
|
1084 | 1084 | |
|
1085 | 1085 | def str2color(self, email_str): |
|
1086 | 1086 | """ |
|
1087 | 1087 | Tries to map in a stable algorithm an email to color |
|
1088 | 1088 | |
|
1089 | 1089 | :param email_str: |
|
1090 | 1090 | """ |
|
1091 | 1091 | color_bank = self.get_color_bank() |
|
1092 | 1092 | # pick position (module it's length so we always find it in the |
|
1093 | 1093 | # bank even if it's smaller than 256 values |
|
1094 | 1094 | pos = self.pick_color_bank_index(email_str, color_bank) |
|
1095 | 1095 | return color_bank[pos] |
|
1096 | 1096 | |
|
1097 | 1097 | def normalize_email(self, email_address): |
|
1098 | 1098 | import unicodedata |
|
1099 | 1099 | # default host used to fill in the fake/missing email |
|
1100 | 1100 | default_host = u'localhost' |
|
1101 | 1101 | |
|
1102 | 1102 | if not email_address: |
|
1103 | 1103 | email_address = u'%s@%s' % (User.DEFAULT_USER, default_host) |
|
1104 | 1104 | |
|
1105 | 1105 | email_address = safe_unicode(email_address) |
|
1106 | 1106 | |
|
1107 | 1107 | if u'@' not in email_address: |
|
1108 | 1108 | email_address = u'%s@%s' % (email_address, default_host) |
|
1109 | 1109 | |
|
1110 | 1110 | if email_address.endswith(u'@'): |
|
1111 | 1111 | email_address = u'%s%s' % (email_address, default_host) |
|
1112 | 1112 | |
|
1113 | 1113 | email_address = unicodedata.normalize('NFKD', email_address)\ |
|
1114 | 1114 | .encode('ascii', 'ignore') |
|
1115 | 1115 | return email_address |
|
1116 | 1116 | |
|
1117 | 1117 | def get_initials(self): |
|
1118 | 1118 | """ |
|
1119 | 1119 | Returns 2 letter initials calculated based on the input. |
|
1120 | 1120 | The algorithm picks first given email address, and takes first letter |
|
1121 | 1121 | of part before @, and then the first letter of server name. In case |
|
1122 | 1122 | the part before @ is in a format of `somestring.somestring2` it replaces |
|
1123 | 1123 | the server letter with first letter of somestring2 |
|
1124 | 1124 | |
|
1125 | 1125 | In case function was initialized with both first and lastname, this |
|
1126 | 1126 | overrides the extraction from email by first letter of the first and |
|
1127 | 1127 | last name. We add special logic to that functionality, In case Full name |
|
1128 | 1128 | is compound, like Guido Von Rossum, we use last part of the last name |
|
1129 | 1129 | (Von Rossum) picking `R`. |
|
1130 | 1130 | |
|
1131 | 1131 | Function also normalizes the non-ascii characters to they ascii |
|
1132 | 1132 | representation, eg Ą => A |
|
1133 | 1133 | """ |
|
1134 | 1134 | import unicodedata |
|
1135 | 1135 | # replace non-ascii to ascii |
|
1136 | 1136 | first_name = unicodedata.normalize( |
|
1137 | 1137 | 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore') |
|
1138 | 1138 | last_name = unicodedata.normalize( |
|
1139 | 1139 | 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore') |
|
1140 | 1140 | |
|
1141 | 1141 | # do NFKD encoding, and also make sure email has proper format |
|
1142 | 1142 | email_address = self.normalize_email(self.email_address) |
|
1143 | 1143 | |
|
1144 | 1144 | # first push the email initials |
|
1145 | 1145 | prefix, server = email_address.split('@', 1) |
|
1146 | 1146 | |
|
1147 | 1147 | # check if prefix is maybe a 'firstname.lastname' syntax |
|
1148 | 1148 | _dot_split = prefix.rsplit('.', 1) |
|
1149 | 1149 | if len(_dot_split) == 2: |
|
1150 | 1150 | initials = [_dot_split[0][0], _dot_split[1][0]] |
|
1151 | 1151 | else: |
|
1152 | 1152 | initials = [prefix[0], server[0]] |
|
1153 | 1153 | |
|
1154 | 1154 | # then try to replace either firtname or lastname |
|
1155 | 1155 | fn_letter = (first_name or " ")[0].strip() |
|
1156 | 1156 | ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip() |
|
1157 | 1157 | |
|
1158 | 1158 | if fn_letter: |
|
1159 | 1159 | initials[0] = fn_letter |
|
1160 | 1160 | |
|
1161 | 1161 | if ln_letter: |
|
1162 | 1162 | initials[1] = ln_letter |
|
1163 | 1163 | |
|
1164 | 1164 | return ''.join(initials).upper() |
|
1165 | 1165 | |
|
1166 | 1166 | def get_img_data_by_type(self, font_family, img_type): |
|
1167 | 1167 | default_user = """ |
|
1168 | 1168 | <svg xmlns="http://www.w3.org/2000/svg" |
|
1169 | 1169 | version="1.1" x="0px" y="0px" width="{size}" height="{size}" |
|
1170 | 1170 | viewBox="-15 -10 439.165 429.164" |
|
1171 | 1171 | |
|
1172 | 1172 | xml:space="preserve" |
|
1173 | 1173 | style="background:{background};" > |
|
1174 | 1174 | |
|
1175 | 1175 | <path d="M204.583,216.671c50.664,0,91.74-48.075, |
|
1176 | 1176 | 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377 |
|
1177 | 1177 | c-50.668,0-91.74,25.14-91.74,107.377C112.844, |
|
1178 | 1178 | 168.596,153.916,216.671, |
|
1179 | 1179 | 204.583,216.671z" fill="{text_color}"/> |
|
1180 | 1180 | <path d="M407.164,374.717L360.88, |
|
1181 | 1181 | 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392 |
|
1182 | 1182 | c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316, |
|
1183 | 1183 | 15.366-44.203,23.488-69.076,23.488c-24.877, |
|
1184 | 1184 | 0-48.762-8.122-69.078-23.488 |
|
1185 | 1185 | c-1.428-1.078-3.346-1.238-4.93-0.415L58.75, |
|
1186 | 1186 | 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717 |
|
1187 | 1187 | c-3.191,7.188-2.537,15.412,1.75,22.005c4.285, |
|
1188 | 1188 | 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936, |
|
1189 | 1189 | 19.402-10.527 C409.699,390.129, |
|
1190 | 1190 | 410.355,381.902,407.164,374.717z" fill="{text_color}"/> |
|
1191 | 1191 | </svg>""".format( |
|
1192 | 1192 | size=self.size, |
|
1193 | 1193 | background='#979797', # @grey4 |
|
1194 | 1194 | text_color=self.text_color, |
|
1195 | 1195 | font_family=font_family) |
|
1196 | 1196 | |
|
1197 | 1197 | return { |
|
1198 | 1198 | "default_user": default_user |
|
1199 | 1199 | }[img_type] |
|
1200 | 1200 | |
|
1201 | 1201 | def get_img_data(self, svg_type=None): |
|
1202 | 1202 | """ |
|
1203 | 1203 | generates the svg metadata for image |
|
1204 | 1204 | """ |
|
1205 | 1205 | |
|
1206 | 1206 | font_family = ','.join([ |
|
1207 | 1207 | 'proximanovaregular', |
|
1208 | 1208 | 'Proxima Nova Regular', |
|
1209 | 1209 | 'Proxima Nova', |
|
1210 | 1210 | 'Arial', |
|
1211 | 1211 | 'Lucida Grande', |
|
1212 | 1212 | 'sans-serif' |
|
1213 | 1213 | ]) |
|
1214 | 1214 | if svg_type: |
|
1215 | 1215 | return self.get_img_data_by_type(font_family, svg_type) |
|
1216 | 1216 | |
|
1217 | 1217 | initials = self.get_initials() |
|
1218 | 1218 | img_data = """ |
|
1219 | 1219 | <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none" |
|
1220 | 1220 | width="{size}" height="{size}" |
|
1221 | 1221 | style="width: 100%; height: 100%; background-color: {background}" |
|
1222 | 1222 | viewBox="0 0 {size} {size}"> |
|
1223 | 1223 | <text text-anchor="middle" y="50%" x="50%" dy="0.35em" |
|
1224 | 1224 | pointer-events="auto" fill="{text_color}" |
|
1225 | 1225 | font-family="{font_family}" |
|
1226 | 1226 | style="font-weight: 400; font-size: {f_size}px;">{text} |
|
1227 | 1227 | </text> |
|
1228 | 1228 | </svg>""".format( |
|
1229 | 1229 | size=self.size, |
|
1230 | 1230 | f_size=self.size/1.85, # scale the text inside the box nicely |
|
1231 | 1231 | background=self.background, |
|
1232 | 1232 | text_color=self.text_color, |
|
1233 | 1233 | text=initials.upper(), |
|
1234 | 1234 | font_family=font_family) |
|
1235 | 1235 | |
|
1236 | 1236 | return img_data |
|
1237 | 1237 | |
|
1238 | 1238 | def generate_svg(self, svg_type=None): |
|
1239 | 1239 | img_data = self.get_img_data(svg_type) |
|
1240 | 1240 | return "data:image/svg+xml;base64,%s" % img_data.encode('base64') |
|
1241 | 1241 | |
|
1242 | 1242 | |
|
1243 | 1243 | def initials_gravatar(email_address, first_name, last_name, size=30): |
|
1244 | 1244 | svg_type = None |
|
1245 | 1245 | if email_address == User.DEFAULT_USER_EMAIL: |
|
1246 | 1246 | svg_type = 'default_user' |
|
1247 | 1247 | klass = InitialsGravatar(email_address, first_name, last_name, size) |
|
1248 | 1248 | return klass.generate_svg(svg_type=svg_type) |
|
1249 | 1249 | |
|
1250 | 1250 | |
|
1251 | 1251 | def gravatar_url(email_address, size=30): |
|
1252 | 1252 | # doh, we need to re-import those to mock it later |
|
1253 | 1253 | from pylons import tmpl_context as c |
|
1254 | 1254 | |
|
1255 | 1255 | _use_gravatar = c.visual.use_gravatar |
|
1256 | 1256 | _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL |
|
1257 | 1257 | |
|
1258 | 1258 | email_address = email_address or User.DEFAULT_USER_EMAIL |
|
1259 | 1259 | if isinstance(email_address, unicode): |
|
1260 | 1260 | # hashlib crashes on unicode items |
|
1261 | 1261 | email_address = safe_str(email_address) |
|
1262 | 1262 | |
|
1263 | 1263 | # empty email or default user |
|
1264 | 1264 | if not email_address or email_address == User.DEFAULT_USER_EMAIL: |
|
1265 | 1265 | return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size) |
|
1266 | 1266 | |
|
1267 | 1267 | if _use_gravatar: |
|
1268 | 1268 | # TODO: Disuse pyramid thread locals. Think about another solution to |
|
1269 | 1269 | # get the host and schema here. |
|
1270 | 1270 | request = get_current_request() |
|
1271 | 1271 | tmpl = safe_str(_gravatar_url) |
|
1272 | 1272 | tmpl = tmpl.replace('{email}', email_address)\ |
|
1273 | 1273 | .replace('{md5email}', md5_safe(email_address.lower())) \ |
|
1274 | 1274 | .replace('{netloc}', request.host)\ |
|
1275 | 1275 | .replace('{scheme}', request.scheme)\ |
|
1276 | 1276 | .replace('{size}', safe_str(size)) |
|
1277 | 1277 | return tmpl |
|
1278 | 1278 | else: |
|
1279 | 1279 | return initials_gravatar(email_address, '', '', size=size) |
|
1280 | 1280 | |
|
1281 | 1281 | |
|
1282 | 1282 | class Page(_Page): |
|
1283 | 1283 | """ |
|
1284 | 1284 | Custom pager to match rendering style with paginator |
|
1285 | 1285 | """ |
|
1286 | 1286 | |
|
1287 | 1287 | def _get_pos(self, cur_page, max_page, items): |
|
1288 | 1288 | edge = (items / 2) + 1 |
|
1289 | 1289 | if (cur_page <= edge): |
|
1290 | 1290 | radius = max(items / 2, items - cur_page) |
|
1291 | 1291 | elif (max_page - cur_page) < edge: |
|
1292 | 1292 | radius = (items - 1) - (max_page - cur_page) |
|
1293 | 1293 | else: |
|
1294 | 1294 | radius = items / 2 |
|
1295 | 1295 | |
|
1296 | 1296 | left = max(1, (cur_page - (radius))) |
|
1297 | 1297 | right = min(max_page, cur_page + (radius)) |
|
1298 | 1298 | return left, cur_page, right |
|
1299 | 1299 | |
|
1300 | 1300 | def _range(self, regexp_match): |
|
1301 | 1301 | """ |
|
1302 | 1302 | Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8'). |
|
1303 | 1303 | |
|
1304 | 1304 | Arguments: |
|
1305 | 1305 | |
|
1306 | 1306 | regexp_match |
|
1307 | 1307 | A "re" (regular expressions) match object containing the |
|
1308 | 1308 | radius of linked pages around the current page in |
|
1309 | 1309 | regexp_match.group(1) as a string |
|
1310 | 1310 | |
|
1311 | 1311 | This function is supposed to be called as a callable in |
|
1312 | 1312 | re.sub. |
|
1313 | 1313 | |
|
1314 | 1314 | """ |
|
1315 | 1315 | radius = int(regexp_match.group(1)) |
|
1316 | 1316 | |
|
1317 | 1317 | # Compute the first and last page number within the radius |
|
1318 | 1318 | # e.g. '1 .. 5 6 [7] 8 9 .. 12' |
|
1319 | 1319 | # -> leftmost_page = 5 |
|
1320 | 1320 | # -> rightmost_page = 9 |
|
1321 | 1321 | leftmost_page, _cur, rightmost_page = self._get_pos(self.page, |
|
1322 | 1322 | self.last_page, |
|
1323 | 1323 | (radius * 2) + 1) |
|
1324 | 1324 | nav_items = [] |
|
1325 | 1325 | |
|
1326 | 1326 | # Create a link to the first page (unless we are on the first page |
|
1327 | 1327 | # or there would be no need to insert '..' spacers) |
|
1328 | 1328 | if self.page != self.first_page and self.first_page < leftmost_page: |
|
1329 | 1329 | nav_items.append(self._pagerlink(self.first_page, self.first_page)) |
|
1330 | 1330 | |
|
1331 | 1331 | # Insert dots if there are pages between the first page |
|
1332 | 1332 | # and the currently displayed page range |
|
1333 | 1333 | if leftmost_page - self.first_page > 1: |
|
1334 | 1334 | # Wrap in a SPAN tag if nolink_attr is set |
|
1335 | 1335 | text = '..' |
|
1336 | 1336 | if self.dotdot_attr: |
|
1337 | 1337 | text = HTML.span(c=text, **self.dotdot_attr) |
|
1338 | 1338 | nav_items.append(text) |
|
1339 | 1339 | |
|
1340 | 1340 | for thispage in xrange(leftmost_page, rightmost_page + 1): |
|
1341 | 1341 | # Hilight the current page number and do not use a link |
|
1342 | 1342 | if thispage == self.page: |
|
1343 | 1343 | text = '%s' % (thispage,) |
|
1344 | 1344 | # Wrap in a SPAN tag if nolink_attr is set |
|
1345 | 1345 | if self.curpage_attr: |
|
1346 | 1346 | text = HTML.span(c=text, **self.curpage_attr) |
|
1347 | 1347 | nav_items.append(text) |
|
1348 | 1348 | # Otherwise create just a link to that page |
|
1349 | 1349 | else: |
|
1350 | 1350 | text = '%s' % (thispage,) |
|
1351 | 1351 | nav_items.append(self._pagerlink(thispage, text)) |
|
1352 | 1352 | |
|
1353 | 1353 | # Insert dots if there are pages between the displayed |
|
1354 | 1354 | # page numbers and the end of the page range |
|
1355 | 1355 | if self.last_page - rightmost_page > 1: |
|
1356 | 1356 | text = '..' |
|
1357 | 1357 | # Wrap in a SPAN tag if nolink_attr is set |
|
1358 | 1358 | if self.dotdot_attr: |
|
1359 | 1359 | text = HTML.span(c=text, **self.dotdot_attr) |
|
1360 | 1360 | nav_items.append(text) |
|
1361 | 1361 | |
|
1362 | 1362 | # Create a link to the very last page (unless we are on the last |
|
1363 | 1363 | # page or there would be no need to insert '..' spacers) |
|
1364 | 1364 | if self.page != self.last_page and rightmost_page < self.last_page: |
|
1365 | 1365 | nav_items.append(self._pagerlink(self.last_page, self.last_page)) |
|
1366 | 1366 | |
|
1367 | 1367 | ## prerender links |
|
1368 | 1368 | #_page_link = url.current() |
|
1369 | 1369 | #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1)))) |
|
1370 | 1370 | #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1)))) |
|
1371 | 1371 | return self.separator.join(nav_items) |
|
1372 | 1372 | |
|
1373 | 1373 | def pager(self, format='~2~', page_param='page', partial_param='partial', |
|
1374 | 1374 | show_if_single_page=False, separator=' ', onclick=None, |
|
1375 | 1375 | symbol_first='<<', symbol_last='>>', |
|
1376 | 1376 | symbol_previous='<', symbol_next='>', |
|
1377 | 1377 | link_attr={'class': 'pager_link', 'rel': 'prerender'}, |
|
1378 | 1378 | curpage_attr={'class': 'pager_curpage'}, |
|
1379 | 1379 | dotdot_attr={'class': 'pager_dotdot'}, **kwargs): |
|
1380 | 1380 | |
|
1381 | 1381 | self.curpage_attr = curpage_attr |
|
1382 | 1382 | self.separator = separator |
|
1383 | 1383 | self.pager_kwargs = kwargs |
|
1384 | 1384 | self.page_param = page_param |
|
1385 | 1385 | self.partial_param = partial_param |
|
1386 | 1386 | self.onclick = onclick |
|
1387 | 1387 | self.link_attr = link_attr |
|
1388 | 1388 | self.dotdot_attr = dotdot_attr |
|
1389 | 1389 | |
|
1390 | 1390 | # Don't show navigator if there is no more than one page |
|
1391 | 1391 | if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page): |
|
1392 | 1392 | return '' |
|
1393 | 1393 | |
|
1394 | 1394 | from string import Template |
|
1395 | 1395 | # Replace ~...~ in token format by range of pages |
|
1396 | 1396 | result = re.sub(r'~(\d+)~', self._range, format) |
|
1397 | 1397 | |
|
1398 | 1398 | # Interpolate '%' variables |
|
1399 | 1399 | result = Template(result).safe_substitute({ |
|
1400 | 1400 | 'first_page': self.first_page, |
|
1401 | 1401 | 'last_page': self.last_page, |
|
1402 | 1402 | 'page': self.page, |
|
1403 | 1403 | 'page_count': self.page_count, |
|
1404 | 1404 | 'items_per_page': self.items_per_page, |
|
1405 | 1405 | 'first_item': self.first_item, |
|
1406 | 1406 | 'last_item': self.last_item, |
|
1407 | 1407 | 'item_count': self.item_count, |
|
1408 | 1408 | 'link_first': self.page > self.first_page and \ |
|
1409 | 1409 | self._pagerlink(self.first_page, symbol_first) or '', |
|
1410 | 1410 | 'link_last': self.page < self.last_page and \ |
|
1411 | 1411 | self._pagerlink(self.last_page, symbol_last) or '', |
|
1412 | 1412 | 'link_previous': self.previous_page and \ |
|
1413 | 1413 | self._pagerlink(self.previous_page, symbol_previous) \ |
|
1414 | 1414 | or HTML.span(symbol_previous, class_="pg-previous disabled"), |
|
1415 | 1415 | 'link_next': self.next_page and \ |
|
1416 | 1416 | self._pagerlink(self.next_page, symbol_next) \ |
|
1417 | 1417 | or HTML.span(symbol_next, class_="pg-next disabled") |
|
1418 | 1418 | }) |
|
1419 | 1419 | |
|
1420 | 1420 | return literal(result) |
|
1421 | 1421 | |
|
1422 | 1422 | |
|
1423 | 1423 | #============================================================================== |
|
1424 | 1424 | # REPO PAGER, PAGER FOR REPOSITORY |
|
1425 | 1425 | #============================================================================== |
|
1426 | 1426 | class RepoPage(Page): |
|
1427 | 1427 | |
|
1428 | 1428 | def __init__(self, collection, page=1, items_per_page=20, |
|
1429 | 1429 | item_count=None, url=None, **kwargs): |
|
1430 | 1430 | |
|
1431 | 1431 | """Create a "RepoPage" instance. special pager for paging |
|
1432 | 1432 | repository |
|
1433 | 1433 | """ |
|
1434 | 1434 | self._url_generator = url |
|
1435 | 1435 | |
|
1436 | 1436 | # Safe the kwargs class-wide so they can be used in the pager() method |
|
1437 | 1437 | self.kwargs = kwargs |
|
1438 | 1438 | |
|
1439 | 1439 | # Save a reference to the collection |
|
1440 | 1440 | self.original_collection = collection |
|
1441 | 1441 | |
|
1442 | 1442 | self.collection = collection |
|
1443 | 1443 | |
|
1444 | 1444 | # The self.page is the number of the current page. |
|
1445 | 1445 | # The first page has the number 1! |
|
1446 | 1446 | try: |
|
1447 | 1447 | self.page = int(page) # make it int() if we get it as a string |
|
1448 | 1448 | except (ValueError, TypeError): |
|
1449 | 1449 | self.page = 1 |
|
1450 | 1450 | |
|
1451 | 1451 | self.items_per_page = items_per_page |
|
1452 | 1452 | |
|
1453 | 1453 | # Unless the user tells us how many items the collections has |
|
1454 | 1454 | # we calculate that ourselves. |
|
1455 | 1455 | if item_count is not None: |
|
1456 | 1456 | self.item_count = item_count |
|
1457 | 1457 | else: |
|
1458 | 1458 | self.item_count = len(self.collection) |
|
1459 | 1459 | |
|
1460 | 1460 | # Compute the number of the first and last available page |
|
1461 | 1461 | if self.item_count > 0: |
|
1462 | 1462 | self.first_page = 1 |
|
1463 | 1463 | self.page_count = int(math.ceil(float(self.item_count) / |
|
1464 | 1464 | self.items_per_page)) |
|
1465 | 1465 | self.last_page = self.first_page + self.page_count - 1 |
|
1466 | 1466 | |
|
1467 | 1467 | # Make sure that the requested page number is the range of |
|
1468 | 1468 | # valid pages |
|
1469 | 1469 | if self.page > self.last_page: |
|
1470 | 1470 | self.page = self.last_page |
|
1471 | 1471 | elif self.page < self.first_page: |
|
1472 | 1472 | self.page = self.first_page |
|
1473 | 1473 | |
|
1474 | 1474 | # Note: the number of items on this page can be less than |
|
1475 | 1475 | # items_per_page if the last page is not full |
|
1476 | 1476 | self.first_item = max(0, (self.item_count) - (self.page * |
|
1477 | 1477 | items_per_page)) |
|
1478 | 1478 | self.last_item = ((self.item_count - 1) - items_per_page * |
|
1479 | 1479 | (self.page - 1)) |
|
1480 | 1480 | |
|
1481 | 1481 | self.items = list(self.collection[self.first_item:self.last_item + 1]) |
|
1482 | 1482 | |
|
1483 | 1483 | # Links to previous and next page |
|
1484 | 1484 | if self.page > self.first_page: |
|
1485 | 1485 | self.previous_page = self.page - 1 |
|
1486 | 1486 | else: |
|
1487 | 1487 | self.previous_page = None |
|
1488 | 1488 | |
|
1489 | 1489 | if self.page < self.last_page: |
|
1490 | 1490 | self.next_page = self.page + 1 |
|
1491 | 1491 | else: |
|
1492 | 1492 | self.next_page = None |
|
1493 | 1493 | |
|
1494 | 1494 | # No items available |
|
1495 | 1495 | else: |
|
1496 | 1496 | self.first_page = None |
|
1497 | 1497 | self.page_count = 0 |
|
1498 | 1498 | self.last_page = None |
|
1499 | 1499 | self.first_item = None |
|
1500 | 1500 | self.last_item = None |
|
1501 | 1501 | self.previous_page = None |
|
1502 | 1502 | self.next_page = None |
|
1503 | 1503 | self.items = [] |
|
1504 | 1504 | |
|
1505 | 1505 | # This is a subclass of the 'list' type. Initialise the list now. |
|
1506 | 1506 | list.__init__(self, reversed(self.items)) |
|
1507 | 1507 | |
|
1508 | 1508 | |
|
1509 | 1509 | def changed_tooltip(nodes): |
|
1510 | 1510 | """ |
|
1511 | 1511 | Generates a html string for changed nodes in commit page. |
|
1512 | 1512 | It limits the output to 30 entries |
|
1513 | 1513 | |
|
1514 | 1514 | :param nodes: LazyNodesGenerator |
|
1515 | 1515 | """ |
|
1516 | 1516 | if nodes: |
|
1517 | 1517 | pref = ': <br/> ' |
|
1518 | 1518 | suf = '' |
|
1519 | 1519 | if len(nodes) > 30: |
|
1520 | 1520 | suf = '<br/>' + _(' and %s more') % (len(nodes) - 30) |
|
1521 | 1521 | return literal(pref + '<br/> '.join([safe_unicode(x.path) |
|
1522 | 1522 | for x in nodes[:30]]) + suf) |
|
1523 | 1523 | else: |
|
1524 | 1524 | return ': ' + _('No Files') |
|
1525 | 1525 | |
|
1526 | 1526 | |
|
1527 | 1527 | def breadcrumb_repo_link(repo): |
|
1528 | 1528 | """ |
|
1529 | 1529 | Makes a breadcrumbs path link to repo |
|
1530 | 1530 | |
|
1531 | 1531 | ex:: |
|
1532 | 1532 | group >> subgroup >> repo |
|
1533 | 1533 | |
|
1534 | 1534 | :param repo: a Repository instance |
|
1535 | 1535 | """ |
|
1536 | 1536 | |
|
1537 | 1537 | path = [ |
|
1538 | 1538 | link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name)) |
|
1539 | 1539 | for group in repo.groups_with_parents |
|
1540 | 1540 | ] + [ |
|
1541 |
link_to(repo.just_name, |
|
|
1541 | link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name)) | |
|
1542 | 1542 | ] |
|
1543 | 1543 | |
|
1544 | 1544 | return literal(' » '.join(path)) |
|
1545 | 1545 | |
|
1546 | 1546 | |
|
1547 | 1547 | def format_byte_size_binary(file_size): |
|
1548 | 1548 | """ |
|
1549 | 1549 | Formats file/folder sizes to standard. |
|
1550 | 1550 | """ |
|
1551 | 1551 | formatted_size = format_byte_size(file_size, binary=True) |
|
1552 | 1552 | return formatted_size |
|
1553 | 1553 | |
|
1554 | 1554 | |
|
1555 | 1555 | def urlify_text(text_, safe=True): |
|
1556 | 1556 | """ |
|
1557 | 1557 | Extrac urls from text and make html links out of them |
|
1558 | 1558 | |
|
1559 | 1559 | :param text_: |
|
1560 | 1560 | """ |
|
1561 | 1561 | |
|
1562 | 1562 | url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]''' |
|
1563 | 1563 | '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''') |
|
1564 | 1564 | |
|
1565 | 1565 | def url_func(match_obj): |
|
1566 | 1566 | url_full = match_obj.groups()[0] |
|
1567 | 1567 | return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full}) |
|
1568 | 1568 | _newtext = url_pat.sub(url_func, text_) |
|
1569 | 1569 | if safe: |
|
1570 | 1570 | return literal(_newtext) |
|
1571 | 1571 | return _newtext |
|
1572 | 1572 | |
|
1573 | 1573 | |
|
1574 | 1574 | def urlify_commits(text_, repository): |
|
1575 | 1575 | """ |
|
1576 | 1576 | Extract commit ids from text and make link from them |
|
1577 | 1577 | |
|
1578 | 1578 | :param text_: |
|
1579 | 1579 | :param repository: repo name to build the URL with |
|
1580 | 1580 | """ |
|
1581 | 1581 | from pylons import url # doh, we need to re-import url to mock it later |
|
1582 | 1582 | URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)') |
|
1583 | 1583 | |
|
1584 | 1584 | def url_func(match_obj): |
|
1585 | 1585 | commit_id = match_obj.groups()[1] |
|
1586 | 1586 | pref = match_obj.groups()[0] |
|
1587 | 1587 | suf = match_obj.groups()[2] |
|
1588 | 1588 | |
|
1589 | 1589 | tmpl = ( |
|
1590 | 1590 | '%(pref)s<a class="%(cls)s" href="%(url)s">' |
|
1591 | 1591 | '%(commit_id)s</a>%(suf)s' |
|
1592 | 1592 | ) |
|
1593 | 1593 | return tmpl % { |
|
1594 | 1594 | 'pref': pref, |
|
1595 | 1595 | 'cls': 'revision-link', |
|
1596 | 1596 | 'url': url('changeset_home', repo_name=repository, |
|
1597 | 1597 | revision=commit_id, qualified=True), |
|
1598 | 1598 | 'commit_id': commit_id, |
|
1599 | 1599 | 'suf': suf |
|
1600 | 1600 | } |
|
1601 | 1601 | |
|
1602 | 1602 | newtext = URL_PAT.sub(url_func, text_) |
|
1603 | 1603 | |
|
1604 | 1604 | return newtext |
|
1605 | 1605 | |
|
1606 | 1606 | |
|
1607 | 1607 | def _process_url_func(match_obj, repo_name, uid, entry, |
|
1608 | 1608 | return_raw_data=False, link_format='html'): |
|
1609 | 1609 | pref = '' |
|
1610 | 1610 | if match_obj.group().startswith(' '): |
|
1611 | 1611 | pref = ' ' |
|
1612 | 1612 | |
|
1613 | 1613 | issue_id = ''.join(match_obj.groups()) |
|
1614 | 1614 | |
|
1615 | 1615 | if link_format == 'html': |
|
1616 | 1616 | tmpl = ( |
|
1617 | 1617 | '%(pref)s<a class="%(cls)s" href="%(url)s">' |
|
1618 | 1618 | '%(issue-prefix)s%(id-repr)s' |
|
1619 | 1619 | '</a>') |
|
1620 | 1620 | elif link_format == 'rst': |
|
1621 | 1621 | tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_' |
|
1622 | 1622 | elif link_format == 'markdown': |
|
1623 | 1623 | tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)' |
|
1624 | 1624 | else: |
|
1625 | 1625 | raise ValueError('Bad link_format:{}'.format(link_format)) |
|
1626 | 1626 | |
|
1627 | 1627 | (repo_name_cleaned, |
|
1628 | 1628 | parent_group_name) = RepoGroupModel().\ |
|
1629 | 1629 | _get_group_name_and_parent(repo_name) |
|
1630 | 1630 | |
|
1631 | 1631 | # variables replacement |
|
1632 | 1632 | named_vars = { |
|
1633 | 1633 | 'id': issue_id, |
|
1634 | 1634 | 'repo': repo_name, |
|
1635 | 1635 | 'repo_name': repo_name_cleaned, |
|
1636 | 1636 | 'group_name': parent_group_name |
|
1637 | 1637 | } |
|
1638 | 1638 | # named regex variables |
|
1639 | 1639 | named_vars.update(match_obj.groupdict()) |
|
1640 | 1640 | _url = string.Template(entry['url']).safe_substitute(**named_vars) |
|
1641 | 1641 | |
|
1642 | 1642 | data = { |
|
1643 | 1643 | 'pref': pref, |
|
1644 | 1644 | 'cls': 'issue-tracker-link', |
|
1645 | 1645 | 'url': _url, |
|
1646 | 1646 | 'id-repr': issue_id, |
|
1647 | 1647 | 'issue-prefix': entry['pref'], |
|
1648 | 1648 | 'serv': entry['url'], |
|
1649 | 1649 | } |
|
1650 | 1650 | if return_raw_data: |
|
1651 | 1651 | return { |
|
1652 | 1652 | 'id': issue_id, |
|
1653 | 1653 | 'url': _url |
|
1654 | 1654 | } |
|
1655 | 1655 | return tmpl % data |
|
1656 | 1656 | |
|
1657 | 1657 | |
|
1658 | 1658 | def process_patterns(text_string, repo_name, link_format='html'): |
|
1659 | 1659 | allowed_formats = ['html', 'rst', 'markdown'] |
|
1660 | 1660 | if link_format not in allowed_formats: |
|
1661 | 1661 | raise ValueError('Link format can be only one of:{} got {}'.format( |
|
1662 | 1662 | allowed_formats, link_format)) |
|
1663 | 1663 | |
|
1664 | 1664 | repo = None |
|
1665 | 1665 | if repo_name: |
|
1666 | 1666 | # Retrieving repo_name to avoid invalid repo_name to explode on |
|
1667 | 1667 | # IssueTrackerSettingsModel but still passing invalid name further down |
|
1668 | 1668 | repo = Repository.get_by_repo_name(repo_name, cache=True) |
|
1669 | 1669 | |
|
1670 | 1670 | settings_model = IssueTrackerSettingsModel(repo=repo) |
|
1671 | 1671 | active_entries = settings_model.get_settings(cache=True) |
|
1672 | 1672 | |
|
1673 | 1673 | issues_data = [] |
|
1674 | 1674 | newtext = text_string |
|
1675 | 1675 | |
|
1676 | 1676 | for uid, entry in active_entries.items(): |
|
1677 | 1677 | log.debug('found issue tracker entry with uid %s' % (uid,)) |
|
1678 | 1678 | |
|
1679 | 1679 | if not (entry['pat'] and entry['url']): |
|
1680 | 1680 | log.debug('skipping due to missing data') |
|
1681 | 1681 | continue |
|
1682 | 1682 | |
|
1683 | 1683 | log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s' |
|
1684 | 1684 | % (uid, entry['pat'], entry['url'], entry['pref'])) |
|
1685 | 1685 | |
|
1686 | 1686 | try: |
|
1687 | 1687 | pattern = re.compile(r'%s' % entry['pat']) |
|
1688 | 1688 | except re.error: |
|
1689 | 1689 | log.exception( |
|
1690 | 1690 | 'issue tracker pattern: `%s` failed to compile', |
|
1691 | 1691 | entry['pat']) |
|
1692 | 1692 | continue |
|
1693 | 1693 | |
|
1694 | 1694 | data_func = partial( |
|
1695 | 1695 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1696 | 1696 | return_raw_data=True) |
|
1697 | 1697 | |
|
1698 | 1698 | for match_obj in pattern.finditer(text_string): |
|
1699 | 1699 | issues_data.append(data_func(match_obj)) |
|
1700 | 1700 | |
|
1701 | 1701 | url_func = partial( |
|
1702 | 1702 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1703 | 1703 | link_format=link_format) |
|
1704 | 1704 | |
|
1705 | 1705 | newtext = pattern.sub(url_func, newtext) |
|
1706 | 1706 | log.debug('processed prefix:uid `%s`' % (uid,)) |
|
1707 | 1707 | |
|
1708 | 1708 | return newtext, issues_data |
|
1709 | 1709 | |
|
1710 | 1710 | |
|
1711 | 1711 | def urlify_commit_message(commit_text, repository=None): |
|
1712 | 1712 | """ |
|
1713 | 1713 | Parses given text message and makes proper links. |
|
1714 | 1714 | issues are linked to given issue-server, and rest is a commit link |
|
1715 | 1715 | |
|
1716 | 1716 | :param commit_text: |
|
1717 | 1717 | :param repository: |
|
1718 | 1718 | """ |
|
1719 | 1719 | from pylons import url # doh, we need to re-import url to mock it later |
|
1720 | 1720 | |
|
1721 | 1721 | def escaper(string): |
|
1722 | 1722 | return string.replace('<', '<').replace('>', '>') |
|
1723 | 1723 | |
|
1724 | 1724 | newtext = escaper(commit_text) |
|
1725 | 1725 | |
|
1726 | 1726 | # extract http/https links and make them real urls |
|
1727 | 1727 | newtext = urlify_text(newtext, safe=False) |
|
1728 | 1728 | |
|
1729 | 1729 | # urlify commits - extract commit ids and make link out of them, if we have |
|
1730 | 1730 | # the scope of repository present. |
|
1731 | 1731 | if repository: |
|
1732 | 1732 | newtext = urlify_commits(newtext, repository) |
|
1733 | 1733 | |
|
1734 | 1734 | # process issue tracker patterns |
|
1735 | 1735 | newtext, issues = process_patterns(newtext, repository or '') |
|
1736 | 1736 | |
|
1737 | 1737 | return literal(newtext) |
|
1738 | 1738 | |
|
1739 | 1739 | |
|
1740 | 1740 | def render_binary(repo_name, file_obj): |
|
1741 | 1741 | """ |
|
1742 | 1742 | Choose how to render a binary file |
|
1743 | 1743 | """ |
|
1744 | 1744 | filename = file_obj.name |
|
1745 | 1745 | |
|
1746 | 1746 | # images |
|
1747 | 1747 | for ext in ['*.png', '*.jpg', '*.ico', '*.gif']: |
|
1748 | 1748 | if fnmatch.fnmatch(filename, pat=ext): |
|
1749 | 1749 | alt = filename |
|
1750 | 1750 | src = url('files_raw_home', repo_name=repo_name, |
|
1751 | 1751 | revision=file_obj.commit.raw_id, f_path=file_obj.path) |
|
1752 | 1752 | return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src)) |
|
1753 | 1753 | |
|
1754 | 1754 | |
|
1755 | 1755 | def renderer_from_filename(filename, exclude=None): |
|
1756 | 1756 | """ |
|
1757 | 1757 | choose a renderer based on filename, this works only for text based files |
|
1758 | 1758 | """ |
|
1759 | 1759 | |
|
1760 | 1760 | # ipython |
|
1761 | 1761 | for ext in ['*.ipynb']: |
|
1762 | 1762 | if fnmatch.fnmatch(filename, pat=ext): |
|
1763 | 1763 | return 'jupyter' |
|
1764 | 1764 | |
|
1765 | 1765 | is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude) |
|
1766 | 1766 | if is_markup: |
|
1767 | 1767 | return is_markup |
|
1768 | 1768 | return None |
|
1769 | 1769 | |
|
1770 | 1770 | |
|
1771 | 1771 | def render(source, renderer='rst', mentions=False, relative_url=None, |
|
1772 | 1772 | repo_name=None): |
|
1773 | 1773 | |
|
1774 | 1774 | def maybe_convert_relative_links(html_source): |
|
1775 | 1775 | if relative_url: |
|
1776 | 1776 | return relative_links(html_source, relative_url) |
|
1777 | 1777 | return html_source |
|
1778 | 1778 | |
|
1779 | 1779 | if renderer == 'rst': |
|
1780 | 1780 | if repo_name: |
|
1781 | 1781 | # process patterns on comments if we pass in repo name |
|
1782 | 1782 | source, issues = process_patterns( |
|
1783 | 1783 | source, repo_name, link_format='rst') |
|
1784 | 1784 | |
|
1785 | 1785 | return literal( |
|
1786 | 1786 | '<div class="rst-block">%s</div>' % |
|
1787 | 1787 | maybe_convert_relative_links( |
|
1788 | 1788 | MarkupRenderer.rst(source, mentions=mentions))) |
|
1789 | 1789 | elif renderer == 'markdown': |
|
1790 | 1790 | if repo_name: |
|
1791 | 1791 | # process patterns on comments if we pass in repo name |
|
1792 | 1792 | source, issues = process_patterns( |
|
1793 | 1793 | source, repo_name, link_format='markdown') |
|
1794 | 1794 | |
|
1795 | 1795 | return literal( |
|
1796 | 1796 | '<div class="markdown-block">%s</div>' % |
|
1797 | 1797 | maybe_convert_relative_links( |
|
1798 | 1798 | MarkupRenderer.markdown(source, flavored=True, |
|
1799 | 1799 | mentions=mentions))) |
|
1800 | 1800 | elif renderer == 'jupyter': |
|
1801 | 1801 | return literal( |
|
1802 | 1802 | '<div class="ipynb">%s</div>' % |
|
1803 | 1803 | maybe_convert_relative_links( |
|
1804 | 1804 | MarkupRenderer.jupyter(source))) |
|
1805 | 1805 | |
|
1806 | 1806 | # None means just show the file-source |
|
1807 | 1807 | return None |
|
1808 | 1808 | |
|
1809 | 1809 | |
|
1810 | 1810 | def commit_status(repo, commit_id): |
|
1811 | 1811 | return ChangesetStatusModel().get_status(repo, commit_id) |
|
1812 | 1812 | |
|
1813 | 1813 | |
|
1814 | 1814 | def commit_status_lbl(commit_status): |
|
1815 | 1815 | return dict(ChangesetStatus.STATUSES).get(commit_status) |
|
1816 | 1816 | |
|
1817 | 1817 | |
|
1818 | 1818 | def commit_time(repo_name, commit_id): |
|
1819 | 1819 | repo = Repository.get_by_repo_name(repo_name) |
|
1820 | 1820 | commit = repo.get_commit(commit_id=commit_id) |
|
1821 | 1821 | return commit.date |
|
1822 | 1822 | |
|
1823 | 1823 | |
|
1824 | 1824 | def get_permission_name(key): |
|
1825 | 1825 | return dict(Permission.PERMS).get(key) |
|
1826 | 1826 | |
|
1827 | 1827 | |
|
1828 | 1828 | def journal_filter_help(): |
|
1829 | 1829 | return _( |
|
1830 | 1830 | 'Example filter terms:\n' + |
|
1831 | 1831 | ' repository:vcs\n' + |
|
1832 | 1832 | ' username:marcin\n' + |
|
1833 | 1833 | ' action:*push*\n' + |
|
1834 | 1834 | ' ip:127.0.0.1\n' + |
|
1835 | 1835 | ' date:20120101\n' + |
|
1836 | 1836 | ' date:[20120101100000 TO 20120102]\n' + |
|
1837 | 1837 | '\n' + |
|
1838 | 1838 | 'Generate wildcards using \'*\' character:\n' + |
|
1839 | 1839 | ' "repository:vcs*" - search everything starting with \'vcs\'\n' + |
|
1840 | 1840 | ' "repository:*vcs*" - search for repository containing \'vcs\'\n' + |
|
1841 | 1841 | '\n' + |
|
1842 | 1842 | 'Optional AND / OR operators in queries\n' + |
|
1843 | 1843 | ' "repository:vcs OR repository:test"\n' + |
|
1844 | 1844 | ' "username:test AND repository:test*"\n' |
|
1845 | 1845 | ) |
|
1846 | 1846 | |
|
1847 | 1847 | |
|
1848 | 1848 | def search_filter_help(searcher): |
|
1849 | 1849 | |
|
1850 | 1850 | terms = '' |
|
1851 | 1851 | return _( |
|
1852 | 1852 | 'Example filter terms for `{searcher}` search:\n' + |
|
1853 | 1853 | '{terms}\n' + |
|
1854 | 1854 | 'Generate wildcards using \'*\' character:\n' + |
|
1855 | 1855 | ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' + |
|
1856 | 1856 | ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' + |
|
1857 | 1857 | '\n' + |
|
1858 | 1858 | 'Optional AND / OR operators in queries\n' + |
|
1859 | 1859 | ' "repo_name:vcs OR repo_name:test"\n' + |
|
1860 | 1860 | ' "owner:test AND repo_name:test*"\n' + |
|
1861 | 1861 | 'More: {search_doc}' |
|
1862 | 1862 | ).format(searcher=searcher.name, |
|
1863 | 1863 | terms=terms, search_doc=searcher.query_lang_doc) |
|
1864 | 1864 | |
|
1865 | 1865 | |
|
1866 | 1866 | def not_mapped_error(repo_name): |
|
1867 | 1867 | flash(_('%s repository is not mapped to db perhaps' |
|
1868 | 1868 | ' it was created or renamed from the filesystem' |
|
1869 | 1869 | ' please run the application again' |
|
1870 | 1870 | ' in order to rescan repositories') % repo_name, category='error') |
|
1871 | 1871 | |
|
1872 | 1872 | |
|
1873 | 1873 | def ip_range(ip_addr): |
|
1874 | 1874 | from rhodecode.model.db import UserIpMap |
|
1875 | 1875 | s, e = UserIpMap._get_ip_range(ip_addr) |
|
1876 | 1876 | return '%s - %s' % (s, e) |
|
1877 | 1877 | |
|
1878 | 1878 | |
|
1879 | 1879 | def form(url, method='post', needs_csrf_token=True, **attrs): |
|
1880 | 1880 | """Wrapper around webhelpers.tags.form to prevent CSRF attacks.""" |
|
1881 | 1881 | if method.lower() != 'get' and needs_csrf_token: |
|
1882 | 1882 | raise Exception( |
|
1883 | 1883 | 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' + |
|
1884 | 1884 | 'CSRF token. If the endpoint does not require such token you can ' + |
|
1885 | 1885 | 'explicitly set the parameter needs_csrf_token to false.') |
|
1886 | 1886 | |
|
1887 | 1887 | return wh_form(url, method=method, **attrs) |
|
1888 | 1888 | |
|
1889 | 1889 | |
|
1890 | 1890 | def secure_form(url, method="POST", multipart=False, **attrs): |
|
1891 | 1891 | """Start a form tag that points the action to an url. This |
|
1892 | 1892 | form tag will also include the hidden field containing |
|
1893 | 1893 | the auth token. |
|
1894 | 1894 | |
|
1895 | 1895 | The url options should be given either as a string, or as a |
|
1896 | 1896 | ``url()`` function. The method for the form defaults to POST. |
|
1897 | 1897 | |
|
1898 | 1898 | Options: |
|
1899 | 1899 | |
|
1900 | 1900 | ``multipart`` |
|
1901 | 1901 | If set to True, the enctype is set to "multipart/form-data". |
|
1902 | 1902 | ``method`` |
|
1903 | 1903 | The method to use when submitting the form, usually either |
|
1904 | 1904 | "GET" or "POST". If "PUT", "DELETE", or another verb is used, a |
|
1905 | 1905 | hidden input with name _method is added to simulate the verb |
|
1906 | 1906 | over POST. |
|
1907 | 1907 | |
|
1908 | 1908 | """ |
|
1909 | 1909 | from webhelpers.pylonslib.secure_form import insecure_form |
|
1910 | 1910 | form = insecure_form(url, method, multipart, **attrs) |
|
1911 | 1911 | token = csrf_input() |
|
1912 | 1912 | return literal("%s\n%s" % (form, token)) |
|
1913 | 1913 | |
|
1914 | 1914 | def csrf_input(): |
|
1915 | 1915 | return literal( |
|
1916 | 1916 | '<input type="hidden" id="{}" name="{}" value="{}">'.format( |
|
1917 | 1917 | csrf_token_key, csrf_token_key, get_csrf_token())) |
|
1918 | 1918 | |
|
1919 | 1919 | def dropdownmenu(name, selected, options, enable_filter=False, **attrs): |
|
1920 | 1920 | select_html = select(name, selected, options, **attrs) |
|
1921 | 1921 | select2 = """ |
|
1922 | 1922 | <script> |
|
1923 | 1923 | $(document).ready(function() { |
|
1924 | 1924 | $('#%s').select2({ |
|
1925 | 1925 | containerCssClass: 'drop-menu', |
|
1926 | 1926 | dropdownCssClass: 'drop-menu-dropdown', |
|
1927 | 1927 | dropdownAutoWidth: true%s |
|
1928 | 1928 | }); |
|
1929 | 1929 | }); |
|
1930 | 1930 | </script> |
|
1931 | 1931 | """ |
|
1932 | 1932 | filter_option = """, |
|
1933 | 1933 | minimumResultsForSearch: -1 |
|
1934 | 1934 | """ |
|
1935 | 1935 | input_id = attrs.get('id') or name |
|
1936 | 1936 | filter_enabled = "" if enable_filter else filter_option |
|
1937 | 1937 | select_script = literal(select2 % (input_id, filter_enabled)) |
|
1938 | 1938 | |
|
1939 | 1939 | return literal(select_html+select_script) |
|
1940 | 1940 | |
|
1941 | 1941 | |
|
1942 | 1942 | def get_visual_attr(tmpl_context_var, attr_name): |
|
1943 | 1943 | """ |
|
1944 | 1944 | A safe way to get a variable from visual variable of template context |
|
1945 | 1945 | |
|
1946 | 1946 | :param tmpl_context_var: instance of tmpl_context, usually present as `c` |
|
1947 | 1947 | :param attr_name: name of the attribute we fetch from the c.visual |
|
1948 | 1948 | """ |
|
1949 | 1949 | visual = getattr(tmpl_context_var, 'visual', None) |
|
1950 | 1950 | if not visual: |
|
1951 | 1951 | return |
|
1952 | 1952 | else: |
|
1953 | 1953 | return getattr(visual, attr_name, None) |
|
1954 | 1954 | |
|
1955 | 1955 | |
|
1956 | 1956 | def get_last_path_part(file_node): |
|
1957 | 1957 | if not file_node.path: |
|
1958 | 1958 | return u'' |
|
1959 | 1959 | |
|
1960 | 1960 | path = safe_unicode(file_node.path.split('/')[-1]) |
|
1961 | 1961 | return u'../' + path |
|
1962 | 1962 | |
|
1963 | 1963 | |
|
1964 | 1964 | def route_url(*args, **kwargs): |
|
1965 | 1965 | """ |
|
1966 | 1966 | Wrapper around pyramids `route_url` (fully qualified url) function. |
|
1967 | 1967 | It is used to generate URLs from within pylons views or templates. |
|
1968 | 1968 | This will be removed when pyramid migration if finished. |
|
1969 | 1969 | """ |
|
1970 | 1970 | req = get_current_request() |
|
1971 | 1971 | return req.route_url(*args, **kwargs) |
|
1972 | 1972 | |
|
1973 | 1973 | |
|
1974 | 1974 | def route_path(*args, **kwargs): |
|
1975 | 1975 | """ |
|
1976 | 1976 | Wrapper around pyramids `route_path` function. It is used to generate |
|
1977 | 1977 | URLs from within pylons views or templates. This will be removed when |
|
1978 | 1978 | pyramid migration if finished. |
|
1979 | 1979 | """ |
|
1980 | 1980 | req = get_current_request() |
|
1981 | 1981 | return req.route_path(*args, **kwargs) |
|
1982 | 1982 | |
|
1983 | 1983 | |
|
1984 | 1984 | def route_path_or_none(*args, **kwargs): |
|
1985 | 1985 | try: |
|
1986 | 1986 | return route_path(*args, **kwargs) |
|
1987 | 1987 | except KeyError: |
|
1988 | 1988 | return None |
|
1989 | 1989 | |
|
1990 | 1990 | |
|
1991 | 1991 | def static_url(*args, **kwds): |
|
1992 | 1992 | """ |
|
1993 | 1993 | Wrapper around pyramids `route_path` function. It is used to generate |
|
1994 | 1994 | URLs from within pylons views or templates. This will be removed when |
|
1995 | 1995 | pyramid migration if finished. |
|
1996 | 1996 | """ |
|
1997 | 1997 | req = get_current_request() |
|
1998 | 1998 | return req.static_url(*args, **kwds) |
|
1999 | 1999 | |
|
2000 | 2000 | |
|
2001 | 2001 | def resource_path(*args, **kwds): |
|
2002 | 2002 | """ |
|
2003 | 2003 | Wrapper around pyramids `route_path` function. It is used to generate |
|
2004 | 2004 | URLs from within pylons views or templates. This will be removed when |
|
2005 | 2005 | pyramid migration if finished. |
|
2006 | 2006 | """ |
|
2007 | 2007 | req = get_current_request() |
|
2008 | 2008 | return req.resource_path(*args, **kwds) |
|
2009 | 2009 | |
|
2010 | 2010 | |
|
2011 | 2011 | def api_call_example(method, args): |
|
2012 | 2012 | """ |
|
2013 | 2013 | Generates an API call example via CURL |
|
2014 | 2014 | """ |
|
2015 | 2015 | args_json = json.dumps(OrderedDict([ |
|
2016 | 2016 | ('id', 1), |
|
2017 | 2017 | ('auth_token', 'SECRET'), |
|
2018 | 2018 | ('method', method), |
|
2019 | 2019 | ('args', args) |
|
2020 | 2020 | ])) |
|
2021 | 2021 | return literal( |
|
2022 | 2022 | "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'" |
|
2023 | 2023 | "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, " |
|
2024 | 2024 | "and needs to be of `api calls` role." |
|
2025 | 2025 | .format( |
|
2026 | 2026 | api_url=route_url('apiv2'), |
|
2027 | 2027 | token_url=route_url('my_account_auth_tokens'), |
|
2028 | 2028 | data=args_json)) |
@@ -1,635 +1,632 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | comments model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import traceback |
|
27 | 27 | import collections |
|
28 | 28 | |
|
29 | 29 | from datetime import datetime |
|
30 | 30 | |
|
31 | 31 | from pylons.i18n.translation import _ |
|
32 | 32 | from pyramid.threadlocal import get_current_registry |
|
33 | 33 | from sqlalchemy.sql.expression import null |
|
34 | 34 | from sqlalchemy.sql.functions import coalesce |
|
35 | 35 | |
|
36 | 36 | from rhodecode.lib import helpers as h, diffs |
|
37 | 37 | from rhodecode.lib.channelstream import channelstream_request |
|
38 | 38 | from rhodecode.lib.utils import action_logger |
|
39 | 39 | from rhodecode.lib.utils2 import extract_mentioned_users |
|
40 | 40 | from rhodecode.model import BaseModel |
|
41 | 41 | from rhodecode.model.db import ( |
|
42 | 42 | ChangesetComment, User, Notification, PullRequest, AttributeDict) |
|
43 | 43 | from rhodecode.model.notification import NotificationModel |
|
44 | 44 | from rhodecode.model.meta import Session |
|
45 | 45 | from rhodecode.model.settings import VcsSettingsModel |
|
46 | 46 | from rhodecode.model.notification import EmailNotificationModel |
|
47 | 47 | from rhodecode.model.validation_schema.schemas import comment_schema |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class CommentsModel(BaseModel): |
|
54 | 54 | |
|
55 | 55 | cls = ChangesetComment |
|
56 | 56 | |
|
57 | 57 | DIFF_CONTEXT_BEFORE = 3 |
|
58 | 58 | DIFF_CONTEXT_AFTER = 3 |
|
59 | 59 | |
|
60 | 60 | def __get_commit_comment(self, changeset_comment): |
|
61 | 61 | return self._get_instance(ChangesetComment, changeset_comment) |
|
62 | 62 | |
|
63 | 63 | def __get_pull_request(self, pull_request): |
|
64 | 64 | return self._get_instance(PullRequest, pull_request) |
|
65 | 65 | |
|
66 | 66 | def _extract_mentions(self, s): |
|
67 | 67 | user_objects = [] |
|
68 | 68 | for username in extract_mentioned_users(s): |
|
69 | 69 | user_obj = User.get_by_username(username, case_insensitive=True) |
|
70 | 70 | if user_obj: |
|
71 | 71 | user_objects.append(user_obj) |
|
72 | 72 | return user_objects |
|
73 | 73 | |
|
74 | 74 | def _get_renderer(self, global_renderer='rst'): |
|
75 | 75 | try: |
|
76 | 76 | # try reading from visual context |
|
77 | 77 | from pylons import tmpl_context |
|
78 | 78 | global_renderer = tmpl_context.visual.default_renderer |
|
79 | 79 | except AttributeError: |
|
80 | 80 | log.debug("Renderer not set, falling back " |
|
81 | 81 | "to default renderer '%s'", global_renderer) |
|
82 | 82 | except Exception: |
|
83 | 83 | log.error(traceback.format_exc()) |
|
84 | 84 | return global_renderer |
|
85 | 85 | |
|
86 | 86 | def aggregate_comments(self, comments, versions, show_version, inline=False): |
|
87 | 87 | # group by versions, and count until, and display objects |
|
88 | 88 | |
|
89 | 89 | comment_groups = collections.defaultdict(list) |
|
90 | 90 | [comment_groups[ |
|
91 | 91 | _co.pull_request_version_id].append(_co) for _co in comments] |
|
92 | 92 | |
|
93 | 93 | def yield_comments(pos): |
|
94 | 94 | for co in comment_groups[pos]: |
|
95 | 95 | yield co |
|
96 | 96 | |
|
97 | 97 | comment_versions = collections.defaultdict( |
|
98 | 98 | lambda: collections.defaultdict(list)) |
|
99 | 99 | prev_prvid = -1 |
|
100 | 100 | # fake last entry with None, to aggregate on "latest" version which |
|
101 | 101 | # doesn't have an pull_request_version_id |
|
102 | 102 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: |
|
103 | 103 | prvid = ver.pull_request_version_id |
|
104 | 104 | if prev_prvid == -1: |
|
105 | 105 | prev_prvid = prvid |
|
106 | 106 | |
|
107 | 107 | for co in yield_comments(prvid): |
|
108 | 108 | comment_versions[prvid]['at'].append(co) |
|
109 | 109 | |
|
110 | 110 | # save until |
|
111 | 111 | current = comment_versions[prvid]['at'] |
|
112 | 112 | prev_until = comment_versions[prev_prvid]['until'] |
|
113 | 113 | cur_until = prev_until + current |
|
114 | 114 | comment_versions[prvid]['until'].extend(cur_until) |
|
115 | 115 | |
|
116 | 116 | # save outdated |
|
117 | 117 | if inline: |
|
118 | 118 | outdated = [x for x in cur_until |
|
119 | 119 | if x.outdated_at_version(show_version)] |
|
120 | 120 | else: |
|
121 | 121 | outdated = [x for x in cur_until |
|
122 | 122 | if x.older_than_version(show_version)] |
|
123 | 123 | display = [x for x in cur_until if x not in outdated] |
|
124 | 124 | |
|
125 | 125 | comment_versions[prvid]['outdated'] = outdated |
|
126 | 126 | comment_versions[prvid]['display'] = display |
|
127 | 127 | |
|
128 | 128 | prev_prvid = prvid |
|
129 | 129 | |
|
130 | 130 | return comment_versions |
|
131 | 131 | |
|
132 | 132 | def get_unresolved_todos(self, pull_request, show_outdated=True): |
|
133 | 133 | |
|
134 | 134 | todos = Session().query(ChangesetComment) \ |
|
135 | 135 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
136 | 136 | .filter(ChangesetComment.resolved_by == None) \ |
|
137 | 137 | .filter(ChangesetComment.comment_type |
|
138 | 138 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
139 | 139 | |
|
140 | 140 | if not show_outdated: |
|
141 | 141 | todos = todos.filter( |
|
142 | 142 | coalesce(ChangesetComment.display_state, '') != |
|
143 | 143 | ChangesetComment.COMMENT_OUTDATED) |
|
144 | 144 | |
|
145 | 145 | todos = todos.all() |
|
146 | 146 | |
|
147 | 147 | return todos |
|
148 | 148 | |
|
149 | 149 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True): |
|
150 | 150 | |
|
151 | 151 | todos = Session().query(ChangesetComment) \ |
|
152 | 152 | .filter(ChangesetComment.revision == commit_id) \ |
|
153 | 153 | .filter(ChangesetComment.resolved_by == None) \ |
|
154 | 154 | .filter(ChangesetComment.comment_type |
|
155 | 155 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
156 | 156 | |
|
157 | 157 | if not show_outdated: |
|
158 | 158 | todos = todos.filter( |
|
159 | 159 | coalesce(ChangesetComment.display_state, '') != |
|
160 | 160 | ChangesetComment.COMMENT_OUTDATED) |
|
161 | 161 | |
|
162 | 162 | todos = todos.all() |
|
163 | 163 | |
|
164 | 164 | return todos |
|
165 | 165 | |
|
166 | 166 | def create(self, text, repo, user, commit_id=None, pull_request=None, |
|
167 | 167 | f_path=None, line_no=None, status_change=None, |
|
168 | 168 | status_change_type=None, comment_type=None, |
|
169 | 169 | resolves_comment_id=None, closing_pr=False, send_email=True, |
|
170 | 170 | renderer=None): |
|
171 | 171 | """ |
|
172 | 172 | Creates new comment for commit or pull request. |
|
173 | 173 | IF status_change is not none this comment is associated with a |
|
174 | 174 | status change of commit or commit associated with pull request |
|
175 | 175 | |
|
176 | 176 | :param text: |
|
177 | 177 | :param repo: |
|
178 | 178 | :param user: |
|
179 | 179 | :param commit_id: |
|
180 | 180 | :param pull_request: |
|
181 | 181 | :param f_path: |
|
182 | 182 | :param line_no: |
|
183 | 183 | :param status_change: Label for status change |
|
184 | 184 | :param comment_type: Type of comment |
|
185 | 185 | :param status_change_type: type of status change |
|
186 | 186 | :param closing_pr: |
|
187 | 187 | :param send_email: |
|
188 | 188 | :param renderer: pick renderer for this comment |
|
189 | 189 | """ |
|
190 | 190 | if not text: |
|
191 | 191 | log.warning('Missing text for comment, skipping...') |
|
192 | 192 | return |
|
193 | 193 | |
|
194 | 194 | if not renderer: |
|
195 | 195 | renderer = self._get_renderer() |
|
196 | 196 | |
|
197 | 197 | repo = self._get_repo(repo) |
|
198 | 198 | user = self._get_user(user) |
|
199 | 199 | |
|
200 | 200 | schema = comment_schema.CommentSchema() |
|
201 | 201 | validated_kwargs = schema.deserialize(dict( |
|
202 | 202 | comment_body=text, |
|
203 | 203 | comment_type=comment_type, |
|
204 | 204 | comment_file=f_path, |
|
205 | 205 | comment_line=line_no, |
|
206 | 206 | renderer_type=renderer, |
|
207 | 207 | status_change=status_change_type, |
|
208 | 208 | resolves_comment_id=resolves_comment_id, |
|
209 | 209 | repo=repo.repo_id, |
|
210 | 210 | user=user.user_id, |
|
211 | 211 | )) |
|
212 | 212 | |
|
213 | 213 | comment = ChangesetComment() |
|
214 | 214 | comment.renderer = validated_kwargs['renderer_type'] |
|
215 | 215 | comment.text = validated_kwargs['comment_body'] |
|
216 | 216 | comment.f_path = validated_kwargs['comment_file'] |
|
217 | 217 | comment.line_no = validated_kwargs['comment_line'] |
|
218 | 218 | comment.comment_type = validated_kwargs['comment_type'] |
|
219 | 219 | |
|
220 | 220 | comment.repo = repo |
|
221 | 221 | comment.author = user |
|
222 | 222 | comment.resolved_comment = self.__get_commit_comment( |
|
223 | 223 | validated_kwargs['resolves_comment_id']) |
|
224 | 224 | |
|
225 | 225 | pull_request_id = pull_request |
|
226 | 226 | |
|
227 | 227 | commit_obj = None |
|
228 | 228 | pull_request_obj = None |
|
229 | 229 | |
|
230 | 230 | if commit_id: |
|
231 | 231 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT |
|
232 | 232 | # do a lookup, so we don't pass something bad here |
|
233 | 233 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) |
|
234 | 234 | comment.revision = commit_obj.raw_id |
|
235 | 235 | |
|
236 | 236 | elif pull_request_id: |
|
237 | 237 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
238 | 238 | pull_request_obj = self.__get_pull_request(pull_request_id) |
|
239 | 239 | comment.pull_request = pull_request_obj |
|
240 | 240 | else: |
|
241 | 241 | raise Exception('Please specify commit or pull_request_id') |
|
242 | 242 | |
|
243 | 243 | Session().add(comment) |
|
244 | 244 | Session().flush() |
|
245 | 245 | kwargs = { |
|
246 | 246 | 'user': user, |
|
247 | 247 | 'renderer_type': renderer, |
|
248 | 248 | 'repo_name': repo.repo_name, |
|
249 | 249 | 'status_change': status_change, |
|
250 | 250 | 'status_change_type': status_change_type, |
|
251 | 251 | 'comment_body': text, |
|
252 | 252 | 'comment_file': f_path, |
|
253 | 253 | 'comment_line': line_no, |
|
254 | 254 | 'comment_type': comment_type or 'note' |
|
255 | 255 | } |
|
256 | 256 | |
|
257 | 257 | if commit_obj: |
|
258 | 258 | recipients = ChangesetComment.get_users( |
|
259 | 259 | revision=commit_obj.raw_id) |
|
260 | 260 | # add commit author if it's in RhodeCode system |
|
261 | 261 | cs_author = User.get_from_cs_author(commit_obj.author) |
|
262 | 262 | if not cs_author: |
|
263 | 263 | # use repo owner if we cannot extract the author correctly |
|
264 | 264 | cs_author = repo.user |
|
265 | 265 | recipients += [cs_author] |
|
266 | 266 | |
|
267 | 267 | commit_comment_url = self.get_url(comment) |
|
268 | 268 | |
|
269 | 269 | target_repo_url = h.link_to( |
|
270 | 270 | repo.repo_name, |
|
271 |
h.url('summary |
|
|
272 | repo_name=repo.repo_name, qualified=True)) | |
|
271 | h.route_url('repo_summary', repo_name=repo.repo_name)) | |
|
273 | 272 | |
|
274 | 273 | # commit specifics |
|
275 | 274 | kwargs.update({ |
|
276 | 275 | 'commit': commit_obj, |
|
277 | 276 | 'commit_message': commit_obj.message, |
|
278 | 277 | 'commit_target_repo': target_repo_url, |
|
279 | 278 | 'commit_comment_url': commit_comment_url, |
|
280 | 279 | }) |
|
281 | 280 | |
|
282 | 281 | elif pull_request_obj: |
|
283 | 282 | # get the current participants of this pull request |
|
284 | 283 | recipients = ChangesetComment.get_users( |
|
285 | 284 | pull_request_id=pull_request_obj.pull_request_id) |
|
286 | 285 | # add pull request author |
|
287 | 286 | recipients += [pull_request_obj.author] |
|
288 | 287 | |
|
289 | 288 | # add the reviewers to notification |
|
290 | 289 | recipients += [x.user for x in pull_request_obj.reviewers] |
|
291 | 290 | |
|
292 | 291 | pr_target_repo = pull_request_obj.target_repo |
|
293 | 292 | pr_source_repo = pull_request_obj.source_repo |
|
294 | 293 | |
|
295 | 294 | pr_comment_url = h.url( |
|
296 | 295 | 'pullrequest_show', |
|
297 | 296 | repo_name=pr_target_repo.repo_name, |
|
298 | 297 | pull_request_id=pull_request_obj.pull_request_id, |
|
299 | 298 | anchor='comment-%s' % comment.comment_id, |
|
300 | 299 | qualified=True,) |
|
301 | 300 | |
|
302 | 301 | # set some variables for email notification |
|
303 | pr_target_repo_url = h.url( | |
|
304 |
'summary |
|
|
305 | qualified=True) | |
|
302 | pr_target_repo_url = h.route_url( | |
|
303 | 'repo_summary', repo_name=pr_target_repo.repo_name) | |
|
306 | 304 | |
|
307 | pr_source_repo_url = h.url( | |
|
308 |
'summary |
|
|
309 | qualified=True) | |
|
305 | pr_source_repo_url = h.route_url( | |
|
306 | 'repo_summary', repo_name=pr_source_repo.repo_name) | |
|
310 | 307 | |
|
311 | 308 | # pull request specifics |
|
312 | 309 | kwargs.update({ |
|
313 | 310 | 'pull_request': pull_request_obj, |
|
314 | 311 | 'pr_id': pull_request_obj.pull_request_id, |
|
315 | 312 | 'pr_target_repo': pr_target_repo, |
|
316 | 313 | 'pr_target_repo_url': pr_target_repo_url, |
|
317 | 314 | 'pr_source_repo': pr_source_repo, |
|
318 | 315 | 'pr_source_repo_url': pr_source_repo_url, |
|
319 | 316 | 'pr_comment_url': pr_comment_url, |
|
320 | 317 | 'pr_closing': closing_pr, |
|
321 | 318 | }) |
|
322 | 319 | if send_email: |
|
323 | 320 | # pre-generate the subject for notification itself |
|
324 | 321 | (subject, |
|
325 | 322 | _h, _e, # we don't care about those |
|
326 | 323 | body_plaintext) = EmailNotificationModel().render_email( |
|
327 | 324 | notification_type, **kwargs) |
|
328 | 325 | |
|
329 | 326 | mention_recipients = set( |
|
330 | 327 | self._extract_mentions(text)).difference(recipients) |
|
331 | 328 | |
|
332 | 329 | # create notification objects, and emails |
|
333 | 330 | NotificationModel().create( |
|
334 | 331 | created_by=user, |
|
335 | 332 | notification_subject=subject, |
|
336 | 333 | notification_body=body_plaintext, |
|
337 | 334 | notification_type=notification_type, |
|
338 | 335 | recipients=recipients, |
|
339 | 336 | mention_recipients=mention_recipients, |
|
340 | 337 | email_kwargs=kwargs, |
|
341 | 338 | ) |
|
342 | 339 | |
|
343 | 340 | action = ( |
|
344 | 341 | 'user_commented_pull_request:{}'.format( |
|
345 | 342 | comment.pull_request.pull_request_id) |
|
346 | 343 | if comment.pull_request |
|
347 | 344 | else 'user_commented_revision:{}'.format(comment.revision) |
|
348 | 345 | ) |
|
349 | 346 | action_logger(user, action, comment.repo) |
|
350 | 347 | |
|
351 | 348 | registry = get_current_registry() |
|
352 | 349 | rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {}) |
|
353 | 350 | channelstream_config = rhodecode_plugins.get('channelstream', {}) |
|
354 | 351 | msg_url = '' |
|
355 | 352 | if commit_obj: |
|
356 | 353 | msg_url = commit_comment_url |
|
357 | 354 | repo_name = repo.repo_name |
|
358 | 355 | elif pull_request_obj: |
|
359 | 356 | msg_url = pr_comment_url |
|
360 | 357 | repo_name = pr_target_repo.repo_name |
|
361 | 358 | |
|
362 | 359 | if channelstream_config.get('enabled'): |
|
363 | 360 | message = '<strong>{}</strong> {} - ' \ |
|
364 | 361 | '<a onclick="window.location=\'{}\';' \ |
|
365 | 362 | 'window.location.reload()">' \ |
|
366 | 363 | '<strong>{}</strong></a>' |
|
367 | 364 | message = message.format( |
|
368 | 365 | user.username, _('made a comment'), msg_url, |
|
369 | 366 | _('Show it now')) |
|
370 | 367 | channel = '/repo${}$/pr/{}'.format( |
|
371 | 368 | repo_name, |
|
372 | 369 | pull_request_id |
|
373 | 370 | ) |
|
374 | 371 | payload = { |
|
375 | 372 | 'type': 'message', |
|
376 | 373 | 'timestamp': datetime.utcnow(), |
|
377 | 374 | 'user': 'system', |
|
378 | 375 | 'exclude_users': [user.username], |
|
379 | 376 | 'channel': channel, |
|
380 | 377 | 'message': { |
|
381 | 378 | 'message': message, |
|
382 | 379 | 'level': 'info', |
|
383 | 380 | 'topic': '/notifications' |
|
384 | 381 | } |
|
385 | 382 | } |
|
386 | 383 | channelstream_request(channelstream_config, [payload], |
|
387 | 384 | '/message', raise_exc=False) |
|
388 | 385 | |
|
389 | 386 | return comment |
|
390 | 387 | |
|
391 | 388 | def delete(self, comment): |
|
392 | 389 | """ |
|
393 | 390 | Deletes given comment |
|
394 | 391 | |
|
395 | 392 | :param comment_id: |
|
396 | 393 | """ |
|
397 | 394 | comment = self.__get_commit_comment(comment) |
|
398 | 395 | Session().delete(comment) |
|
399 | 396 | |
|
400 | 397 | return comment |
|
401 | 398 | |
|
402 | 399 | def get_all_comments(self, repo_id, revision=None, pull_request=None): |
|
403 | 400 | q = ChangesetComment.query()\ |
|
404 | 401 | .filter(ChangesetComment.repo_id == repo_id) |
|
405 | 402 | if revision: |
|
406 | 403 | q = q.filter(ChangesetComment.revision == revision) |
|
407 | 404 | elif pull_request: |
|
408 | 405 | pull_request = self.__get_pull_request(pull_request) |
|
409 | 406 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
410 | 407 | else: |
|
411 | 408 | raise Exception('Please specify commit or pull_request') |
|
412 | 409 | q = q.order_by(ChangesetComment.created_on) |
|
413 | 410 | return q.all() |
|
414 | 411 | |
|
415 | 412 | def get_url(self, comment): |
|
416 | 413 | comment = self.__get_commit_comment(comment) |
|
417 | 414 | if comment.pull_request: |
|
418 | 415 | return h.url( |
|
419 | 416 | 'pullrequest_show', |
|
420 | 417 | repo_name=comment.pull_request.target_repo.repo_name, |
|
421 | 418 | pull_request_id=comment.pull_request.pull_request_id, |
|
422 | 419 | anchor='comment-%s' % comment.comment_id, |
|
423 | 420 | qualified=True,) |
|
424 | 421 | else: |
|
425 | 422 | return h.url( |
|
426 | 423 | 'changeset_home', |
|
427 | 424 | repo_name=comment.repo.repo_name, |
|
428 | 425 | revision=comment.revision, |
|
429 | 426 | anchor='comment-%s' % comment.comment_id, |
|
430 | 427 | qualified=True,) |
|
431 | 428 | |
|
432 | 429 | def get_comments(self, repo_id, revision=None, pull_request=None): |
|
433 | 430 | """ |
|
434 | 431 | Gets main comments based on revision or pull_request_id |
|
435 | 432 | |
|
436 | 433 | :param repo_id: |
|
437 | 434 | :param revision: |
|
438 | 435 | :param pull_request: |
|
439 | 436 | """ |
|
440 | 437 | |
|
441 | 438 | q = ChangesetComment.query()\ |
|
442 | 439 | .filter(ChangesetComment.repo_id == repo_id)\ |
|
443 | 440 | .filter(ChangesetComment.line_no == None)\ |
|
444 | 441 | .filter(ChangesetComment.f_path == None) |
|
445 | 442 | if revision: |
|
446 | 443 | q = q.filter(ChangesetComment.revision == revision) |
|
447 | 444 | elif pull_request: |
|
448 | 445 | pull_request = self.__get_pull_request(pull_request) |
|
449 | 446 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
450 | 447 | else: |
|
451 | 448 | raise Exception('Please specify commit or pull_request') |
|
452 | 449 | q = q.order_by(ChangesetComment.created_on) |
|
453 | 450 | return q.all() |
|
454 | 451 | |
|
455 | 452 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): |
|
456 | 453 | q = self._get_inline_comments_query(repo_id, revision, pull_request) |
|
457 | 454 | return self._group_comments_by_path_and_line_number(q) |
|
458 | 455 | |
|
459 | 456 | def get_inline_comments_count(self, inline_comments, skip_outdated=True, |
|
460 | 457 | version=None): |
|
461 | 458 | inline_cnt = 0 |
|
462 | 459 | for fname, per_line_comments in inline_comments.iteritems(): |
|
463 | 460 | for lno, comments in per_line_comments.iteritems(): |
|
464 | 461 | for comm in comments: |
|
465 | 462 | if not comm.outdated_at_version(version) and skip_outdated: |
|
466 | 463 | inline_cnt += 1 |
|
467 | 464 | |
|
468 | 465 | return inline_cnt |
|
469 | 466 | |
|
470 | 467 | def get_outdated_comments(self, repo_id, pull_request): |
|
471 | 468 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments |
|
472 | 469 | # of a pull request. |
|
473 | 470 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
474 | 471 | q = q.filter( |
|
475 | 472 | ChangesetComment.display_state == |
|
476 | 473 | ChangesetComment.COMMENT_OUTDATED |
|
477 | 474 | ).order_by(ChangesetComment.comment_id.asc()) |
|
478 | 475 | |
|
479 | 476 | return self._group_comments_by_path_and_line_number(q) |
|
480 | 477 | |
|
481 | 478 | def _get_inline_comments_query(self, repo_id, revision, pull_request): |
|
482 | 479 | # TODO: johbo: Split this into two methods: One for PR and one for |
|
483 | 480 | # commit. |
|
484 | 481 | if revision: |
|
485 | 482 | q = Session().query(ChangesetComment).filter( |
|
486 | 483 | ChangesetComment.repo_id == repo_id, |
|
487 | 484 | ChangesetComment.line_no != null(), |
|
488 | 485 | ChangesetComment.f_path != null(), |
|
489 | 486 | ChangesetComment.revision == revision) |
|
490 | 487 | |
|
491 | 488 | elif pull_request: |
|
492 | 489 | pull_request = self.__get_pull_request(pull_request) |
|
493 | 490 | if not CommentsModel.use_outdated_comments(pull_request): |
|
494 | 491 | q = self._visible_inline_comments_of_pull_request(pull_request) |
|
495 | 492 | else: |
|
496 | 493 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
497 | 494 | |
|
498 | 495 | else: |
|
499 | 496 | raise Exception('Please specify commit or pull_request_id') |
|
500 | 497 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
501 | 498 | return q |
|
502 | 499 | |
|
503 | 500 | def _group_comments_by_path_and_line_number(self, q): |
|
504 | 501 | comments = q.all() |
|
505 | 502 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) |
|
506 | 503 | for co in comments: |
|
507 | 504 | paths[co.f_path][co.line_no].append(co) |
|
508 | 505 | return paths |
|
509 | 506 | |
|
510 | 507 | @classmethod |
|
511 | 508 | def needed_extra_diff_context(cls): |
|
512 | 509 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) |
|
513 | 510 | |
|
514 | 511 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): |
|
515 | 512 | if not CommentsModel.use_outdated_comments(pull_request): |
|
516 | 513 | return |
|
517 | 514 | |
|
518 | 515 | comments = self._visible_inline_comments_of_pull_request(pull_request) |
|
519 | 516 | comments_to_outdate = comments.all() |
|
520 | 517 | |
|
521 | 518 | for comment in comments_to_outdate: |
|
522 | 519 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) |
|
523 | 520 | |
|
524 | 521 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): |
|
525 | 522 | diff_line = _parse_comment_line_number(comment.line_no) |
|
526 | 523 | |
|
527 | 524 | try: |
|
528 | 525 | old_context = old_diff_proc.get_context_of_line( |
|
529 | 526 | path=comment.f_path, diff_line=diff_line) |
|
530 | 527 | new_context = new_diff_proc.get_context_of_line( |
|
531 | 528 | path=comment.f_path, diff_line=diff_line) |
|
532 | 529 | except (diffs.LineNotInDiffException, |
|
533 | 530 | diffs.FileNotInDiffException): |
|
534 | 531 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
535 | 532 | return |
|
536 | 533 | |
|
537 | 534 | if old_context == new_context: |
|
538 | 535 | return |
|
539 | 536 | |
|
540 | 537 | if self._should_relocate_diff_line(diff_line): |
|
541 | 538 | new_diff_lines = new_diff_proc.find_context( |
|
542 | 539 | path=comment.f_path, context=old_context, |
|
543 | 540 | offset=self.DIFF_CONTEXT_BEFORE) |
|
544 | 541 | if not new_diff_lines: |
|
545 | 542 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
546 | 543 | else: |
|
547 | 544 | new_diff_line = self._choose_closest_diff_line( |
|
548 | 545 | diff_line, new_diff_lines) |
|
549 | 546 | comment.line_no = _diff_to_comment_line_number(new_diff_line) |
|
550 | 547 | else: |
|
551 | 548 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
552 | 549 | |
|
553 | 550 | def _should_relocate_diff_line(self, diff_line): |
|
554 | 551 | """ |
|
555 | 552 | Checks if relocation shall be tried for the given `diff_line`. |
|
556 | 553 | |
|
557 | 554 | If a comment points into the first lines, then we can have a situation |
|
558 | 555 | that after an update another line has been added on top. In this case |
|
559 | 556 | we would find the context still and move the comment around. This |
|
560 | 557 | would be wrong. |
|
561 | 558 | """ |
|
562 | 559 | should_relocate = ( |
|
563 | 560 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or |
|
564 | 561 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) |
|
565 | 562 | return should_relocate |
|
566 | 563 | |
|
567 | 564 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): |
|
568 | 565 | candidate = new_diff_lines[0] |
|
569 | 566 | best_delta = _diff_line_delta(diff_line, candidate) |
|
570 | 567 | for new_diff_line in new_diff_lines[1:]: |
|
571 | 568 | delta = _diff_line_delta(diff_line, new_diff_line) |
|
572 | 569 | if delta < best_delta: |
|
573 | 570 | candidate = new_diff_line |
|
574 | 571 | best_delta = delta |
|
575 | 572 | return candidate |
|
576 | 573 | |
|
577 | 574 | def _visible_inline_comments_of_pull_request(self, pull_request): |
|
578 | 575 | comments = self._all_inline_comments_of_pull_request(pull_request) |
|
579 | 576 | comments = comments.filter( |
|
580 | 577 | coalesce(ChangesetComment.display_state, '') != |
|
581 | 578 | ChangesetComment.COMMENT_OUTDATED) |
|
582 | 579 | return comments |
|
583 | 580 | |
|
584 | 581 | def _all_inline_comments_of_pull_request(self, pull_request): |
|
585 | 582 | comments = Session().query(ChangesetComment)\ |
|
586 | 583 | .filter(ChangesetComment.line_no != None)\ |
|
587 | 584 | .filter(ChangesetComment.f_path != None)\ |
|
588 | 585 | .filter(ChangesetComment.pull_request == pull_request) |
|
589 | 586 | return comments |
|
590 | 587 | |
|
591 | 588 | def _all_general_comments_of_pull_request(self, pull_request): |
|
592 | 589 | comments = Session().query(ChangesetComment)\ |
|
593 | 590 | .filter(ChangesetComment.line_no == None)\ |
|
594 | 591 | .filter(ChangesetComment.f_path == None)\ |
|
595 | 592 | .filter(ChangesetComment.pull_request == pull_request) |
|
596 | 593 | return comments |
|
597 | 594 | |
|
598 | 595 | @staticmethod |
|
599 | 596 | def use_outdated_comments(pull_request): |
|
600 | 597 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
601 | 598 | settings = settings_model.get_general_settings() |
|
602 | 599 | return settings.get('rhodecode_use_outdated_comments', False) |
|
603 | 600 | |
|
604 | 601 | |
|
605 | 602 | def _parse_comment_line_number(line_no): |
|
606 | 603 | """ |
|
607 | 604 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. |
|
608 | 605 | """ |
|
609 | 606 | old_line = None |
|
610 | 607 | new_line = None |
|
611 | 608 | if line_no.startswith('o'): |
|
612 | 609 | old_line = int(line_no[1:]) |
|
613 | 610 | elif line_no.startswith('n'): |
|
614 | 611 | new_line = int(line_no[1:]) |
|
615 | 612 | else: |
|
616 | 613 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") |
|
617 | 614 | return diffs.DiffLineNumber(old_line, new_line) |
|
618 | 615 | |
|
619 | 616 | |
|
620 | 617 | def _diff_to_comment_line_number(diff_line): |
|
621 | 618 | if diff_line.new is not None: |
|
622 | 619 | return u'n{}'.format(diff_line.new) |
|
623 | 620 | elif diff_line.old is not None: |
|
624 | 621 | return u'o{}'.format(diff_line.old) |
|
625 | 622 | return u'' |
|
626 | 623 | |
|
627 | 624 | |
|
628 | 625 | def _diff_line_delta(a, b): |
|
629 | 626 | if None not in (a.new, b.new): |
|
630 | 627 | return abs(a.new - b.new) |
|
631 | 628 | elif None not in (a.old, b.old): |
|
632 | 629 | return abs(a.old - b.old) |
|
633 | 630 | else: |
|
634 | 631 | raise ValueError( |
|
635 | 632 | "Cannot compute delta between {} and {}".format(a, b)) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now