##// END OF EJS Templates
repo-summary: re-implemented summary view as pyramid....
marcink -
r1785:1cce4ff2 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,368 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import logging
22 import string
23
24 from pyramid.view import view_config
25
26 from beaker.cache import cache_region
27
28
29 from rhodecode.controllers import utils
30
31 from rhodecode.apps._base import RepoAppView
32 from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP)
33 from rhodecode.lib import caches, helpers as h
34 from rhodecode.lib.helpers import RepoPage
35 from rhodecode.lib.utils2 import safe_str, safe_int
36 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
37 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.vcs.backends.base import EmptyCommit
40 from rhodecode.lib.vcs.exceptions import CommitError, EmptyRepositoryError
41 from rhodecode.model.db import Statistics, CacheKey, User
42 from rhodecode.model.meta import Session
43 from rhodecode.model.repo import ReadmeFinder
44 from rhodecode.model.scm import ScmModel
45
46 log = logging.getLogger(__name__)
47
48
49 class RepoSummaryView(RepoAppView):
50
51 def load_default_context(self):
52 c = self._get_local_tmpl_context(include_app_defaults=True)
53
54 # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead
55 c.repo_info = self.db_repo
56 c.rhodecode_repo = None
57 if not c.repository_requirements_missing:
58 c.rhodecode_repo = self.rhodecode_vcs_repo
59
60 self._register_global_c(c)
61 return c
62
63 def _get_readme_data(self, db_repo, default_renderer):
64 repo_name = db_repo.repo_name
65 log.debug('Looking for README file')
66
67 @cache_region('long_term')
68 def _generate_readme(cache_key):
69 readme_data = None
70 readme_node = None
71 readme_filename = None
72 commit = self._get_landing_commit_or_none(db_repo)
73 if commit:
74 log.debug("Searching for a README file.")
75 readme_node = ReadmeFinder(default_renderer).search(commit)
76 if readme_node:
77 relative_url = h.url('files_raw_home',
78 repo_name=repo_name,
79 revision=commit.raw_id,
80 f_path=readme_node.path)
81 readme_data = self._render_readme_or_none(
82 commit, readme_node, relative_url)
83 readme_filename = readme_node.path
84 return readme_data, readme_filename
85
86 invalidator_context = CacheKey.repo_context_cache(
87 _generate_readme, repo_name, CacheKey.CACHE_TYPE_README)
88
89 with invalidator_context as context:
90 context.invalidate()
91 computed = context.compute()
92
93 return computed
94
95 def _get_landing_commit_or_none(self, db_repo):
96 log.debug("Getting the landing commit.")
97 try:
98 commit = db_repo.get_landing_commit()
99 if not isinstance(commit, EmptyCommit):
100 return commit
101 else:
102 log.debug("Repository is empty, no README to render.")
103 except CommitError:
104 log.exception(
105 "Problem getting commit when trying to render the README.")
106
107 def _render_readme_or_none(self, commit, readme_node, relative_url):
108 log.debug(
109 'Found README file `%s` rendering...', readme_node.path)
110 renderer = MarkupRenderer()
111 try:
112 html_source = renderer.render(
113 readme_node.content, filename=readme_node.path)
114 if relative_url:
115 return relative_links(html_source, relative_url)
116 return html_source
117 except Exception:
118 log.exception(
119 "Exception while trying to render the README")
120
121 def _load_commits_context(self, c):
122 p = safe_int(self.request.GET.get('page'), 1)
123 size = safe_int(self.request.GET.get('size'), 10)
124
125 def url_generator(**kw):
126 query_params = {
127 'size': size
128 }
129 query_params.update(kw)
130 return h.route_path(
131 'repo_summary_commits',
132 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
133
134 pre_load = ['author', 'branch', 'date', 'message']
135 try:
136 collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load)
137 except EmptyRepositoryError:
138 collection = self.rhodecode_vcs_repo
139
140 c.repo_commits = RepoPage(
141 collection, page=p, items_per_page=size, url=url_generator)
142 page_ids = [x.raw_id for x in c.repo_commits]
143 c.comments = self.db_repo.get_comments(page_ids)
144 c.statuses = self.db_repo.statuses(page_ids)
145
146 @LoginRequired()
147 @HasRepoPermissionAnyDecorator(
148 'repository.read', 'repository.write', 'repository.admin')
149 @view_config(
150 route_name='repo_summary_commits', request_method='GET',
151 renderer='rhodecode:templates/summary/summary_commits.mako')
152 def summary_commits(self):
153 c = self.load_default_context()
154 self._load_commits_context(c)
155 return self._get_template_context(c)
156
157 @LoginRequired()
158 @HasRepoPermissionAnyDecorator(
159 'repository.read', 'repository.write', 'repository.admin')
160 @view_config(
161 route_name='repo_summary', request_method='GET',
162 renderer='rhodecode:templates/summary/summary.mako')
163 @view_config(
164 route_name='repo_summary_slash', request_method='GET',
165 renderer='rhodecode:templates/summary/summary.mako')
166 def summary(self):
167 c = self.load_default_context()
168
169 # Prepare the clone URL
170 username = ''
171 if self._rhodecode_user.username != User.DEFAULT_USER:
172 username = safe_str(self._rhodecode_user.username)
173
174 _def_clone_uri = _def_clone_uri_by_id = c.clone_uri_tmpl
175 if '{repo}' in _def_clone_uri:
176 _def_clone_uri_by_id = _def_clone_uri.replace(
177 '{repo}', '_{repoid}')
178 elif '{repoid}' in _def_clone_uri:
179 _def_clone_uri_by_id = _def_clone_uri.replace(
180 '_{repoid}', '{repo}')
181
182 c.clone_repo_url = self.db_repo.clone_url(
183 user=username, uri_tmpl=_def_clone_uri)
184 c.clone_repo_url_id = self.db_repo.clone_url(
185 user=username, uri_tmpl=_def_clone_uri_by_id)
186
187 # If enabled, get statistics data
188
189 c.show_stats = bool(self.db_repo.enable_statistics)
190
191 stats = Session().query(Statistics) \
192 .filter(Statistics.repository == self.db_repo) \
193 .scalar()
194
195 c.stats_percentage = 0
196
197 if stats and stats.languages:
198 c.no_data = False is self.db_repo.enable_statistics
199 lang_stats_d = json.loads(stats.languages)
200
201 # Sort first by decreasing count and second by the file extension,
202 # so we have a consistent output.
203 lang_stats_items = sorted(lang_stats_d.iteritems(),
204 key=lambda k: (-k[1], k[0]))[:10]
205 lang_stats = [(x, {"count": y,
206 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
207 for x, y in lang_stats_items]
208
209 c.trending_languages = json.dumps(lang_stats)
210 else:
211 c.no_data = True
212 c.trending_languages = json.dumps({})
213
214 scm_model = ScmModel()
215 c.enable_downloads = self.db_repo.enable_downloads
216 c.repository_followers = scm_model.get_followers(self.db_repo)
217 c.repository_forks = scm_model.get_forks(self.db_repo)
218 c.repository_is_user_following = scm_model.is_following_repo(
219 self.db_repo_name, self._rhodecode_user.user_id)
220
221 # first interaction with the VCS instance after here...
222 if c.repository_requirements_missing:
223 self.request.override_renderer = \
224 'rhodecode:templates/summary/missing_requirements.mako'
225 return self._get_template_context(c)
226
227 c.readme_data, c.readme_file = \
228 self._get_readme_data(self.db_repo, c.visual.default_renderer)
229
230 # loads the summary commits template context
231 self._load_commits_context(c)
232
233 return self._get_template_context(c)
234
235 def get_request_commit_id(self):
236 return self.request.matchdict['commit_id']
237
238 @LoginRequired()
239 @HasRepoPermissionAnyDecorator(
240 'repository.read', 'repository.write', 'repository.admin')
241 @view_config(
242 route_name='repo_stats', request_method='GET',
243 renderer='json_ext')
244 def repo_stats(self):
245 commit_id = self.get_request_commit_id()
246
247 _namespace = caches.get_repo_namespace_key(
248 caches.SUMMARY_STATS, self.db_repo_name)
249 show_stats = bool(self.db_repo.enable_statistics)
250 cache_manager = caches.get_cache_manager(
251 'repo_cache_long', _namespace)
252 _cache_key = caches.compute_key_from_params(
253 self.db_repo_name, commit_id, show_stats)
254
255 def compute_stats():
256 code_stats = {}
257 size = 0
258 try:
259 scm_instance = self.db_repo.scm_instance()
260 commit = scm_instance.get_commit(commit_id)
261
262 for node in commit.get_filenodes_generator():
263 size += node.size
264 if not show_stats:
265 continue
266 ext = string.lower(node.extension)
267 ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext)
268 if ext_info:
269 if ext in code_stats:
270 code_stats[ext]['count'] += 1
271 else:
272 code_stats[ext] = {"count": 1, "desc": ext_info}
273 except EmptyRepositoryError:
274 pass
275 return {'size': h.format_byte_size_binary(size),
276 'code_stats': code_stats}
277
278 stats = cache_manager.get(_cache_key, createfunc=compute_stats)
279 return stats
280
281 @LoginRequired()
282 @HasRepoPermissionAnyDecorator(
283 'repository.read', 'repository.write', 'repository.admin')
284 @view_config(
285 route_name='repo_refs_data', request_method='GET',
286 renderer='json_ext')
287 def repo_refs_data(self):
288 _ = self.request.translate
289 self.load_default_context()
290
291 repo = self.rhodecode_vcs_repo
292 refs_to_create = [
293 (_("Branch"), repo.branches, 'branch'),
294 (_("Tag"), repo.tags, 'tag'),
295 (_("Bookmark"), repo.bookmarks, 'book'),
296 ]
297 res = self._create_reference_data(
298 repo, self.db_repo_name, refs_to_create)
299 data = {
300 'more': False,
301 'results': res
302 }
303 return data
304
305 @LoginRequired()
306 @HasRepoPermissionAnyDecorator(
307 'repository.read', 'repository.write', 'repository.admin')
308 @view_config(
309 route_name='repo_refs_changelog_data', request_method='GET',
310 renderer='json_ext')
311 def repo_refs_changelog_data(self):
312 _ = self.request.translate
313 self.load_default_context()
314
315 repo = self.rhodecode_vcs_repo
316
317 refs_to_create = [
318 (_("Branches"), repo.branches, 'branch'),
319 (_("Closed branches"), repo.branches_closed, 'branch_closed'),
320 # TODO: enable when vcs can handle bookmarks filters
321 # (_("Bookmarks"), repo.bookmarks, "book"),
322 ]
323 res = self._create_reference_data(
324 repo, self.db_repo_name, refs_to_create)
325 data = {
326 'more': False,
327 'results': res
328 }
329 return data
330
331 def _create_reference_data(self, repo, full_repo_name, refs_to_create):
332 format_ref_id = utils.get_format_ref_id(repo)
333
334 result = []
335 for title, refs, ref_type in refs_to_create:
336 if refs:
337 result.append({
338 'text': title,
339 'children': self._create_reference_items(
340 repo, full_repo_name, refs, ref_type,
341 format_ref_id),
342 })
343 return result
344
345 def _create_reference_items(self, repo, full_repo_name, refs, ref_type,
346 format_ref_id):
347 result = []
348 is_svn = h.is_svn(repo)
349 for ref_name, raw_id in refs.iteritems():
350 files_url = self._create_files_url(
351 repo, full_repo_name, ref_name, raw_id, is_svn)
352 result.append({
353 'text': ref_name,
354 'id': format_ref_id(ref_name, raw_id),
355 'raw_id': raw_id,
356 'type': ref_type,
357 'files_url': files_url,
358 })
359 return result
360
361 def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn):
362 use_commit_id = '/' in ref_name or is_svn
363 return h.url(
364 'files_home',
365 repo_name=full_repo_name,
366 f_path=ref_name if is_svn else '',
367 revision=raw_id if use_commit_id else ref_name,
368 at=ref_name)
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,350 +1,357 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import logging
22 import logging
23 from pylons import tmpl_context as c
23 from pylons import tmpl_context as c
24 from pyramid.httpexceptions import HTTPFound
24 from pyramid.httpexceptions import HTTPFound
25
25
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.lib.utils import PartialRenderer
27 from rhodecode.lib.utils import PartialRenderer
28 from rhodecode.lib.utils2 import StrictAttributeDict, safe_int, datetime_to_time
28 from rhodecode.lib.utils2 import StrictAttributeDict, safe_int, datetime_to_time
29 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
29 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 from rhodecode.lib.ext_json import json
30 from rhodecode.lib.ext_json import json
31 from rhodecode.model import repo
31 from rhodecode.model import repo
32 from rhodecode.model import repo_group
32 from rhodecode.model import repo_group
33 from rhodecode.model.db import User
33 from rhodecode.model.db import User
34 from rhodecode.model.scm import ScmModel
34 from rhodecode.model.scm import ScmModel
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 ADMIN_PREFIX = '/_admin'
39 ADMIN_PREFIX = '/_admin'
40 STATIC_FILE_PREFIX = '/_static'
40 STATIC_FILE_PREFIX = '/_static'
41
41
42
42
43 def add_route_with_slash(config,name, pattern, **kw):
43 def add_route_with_slash(config,name, pattern, **kw):
44 config.add_route(name, pattern, **kw)
44 config.add_route(name, pattern, **kw)
45 if not pattern.endswith('/'):
45 if not pattern.endswith('/'):
46 config.add_route(name + '_slash', pattern + '/', **kw)
46 config.add_route(name + '_slash', pattern + '/', **kw)
47
47
48
48
49 def get_format_ref_id(repo):
49 def get_format_ref_id(repo):
50 """Returns a `repo` specific reference formatter function"""
50 """Returns a `repo` specific reference formatter function"""
51 if h.is_svn(repo):
51 if h.is_svn(repo):
52 return _format_ref_id_svn
52 return _format_ref_id_svn
53 else:
53 else:
54 return _format_ref_id
54 return _format_ref_id
55
55
56
56
57 def _format_ref_id(name, raw_id):
57 def _format_ref_id(name, raw_id):
58 """Default formatting of a given reference `name`"""
58 """Default formatting of a given reference `name`"""
59 return name
59 return name
60
60
61
61
62 def _format_ref_id_svn(name, raw_id):
62 def _format_ref_id_svn(name, raw_id):
63 """Special way of formatting a reference for Subversion including path"""
63 """Special way of formatting a reference for Subversion including path"""
64 return '%s@%s' % (name, raw_id)
64 return '%s@%s' % (name, raw_id)
65
65
66
66
67 class TemplateArgs(StrictAttributeDict):
67 class TemplateArgs(StrictAttributeDict):
68 pass
68 pass
69
69
70
70
71 class BaseAppView(object):
71 class BaseAppView(object):
72
72
73 def __init__(self, context, request):
73 def __init__(self, context, request):
74 self.request = request
74 self.request = request
75 self.context = context
75 self.context = context
76 self.session = request.session
76 self.session = request.session
77 self._rhodecode_user = request.user # auth user
77 self._rhodecode_user = request.user # auth user
78 self._rhodecode_db_user = self._rhodecode_user.get_instance()
78 self._rhodecode_db_user = self._rhodecode_user.get_instance()
79 self._maybe_needs_password_change(
79 self._maybe_needs_password_change(
80 request.matched_route.name, self._rhodecode_db_user)
80 request.matched_route.name, self._rhodecode_db_user)
81
81
82 def _maybe_needs_password_change(self, view_name, user_obj):
82 def _maybe_needs_password_change(self, view_name, user_obj):
83 log.debug('Checking if user %s needs password change on view %s',
83 log.debug('Checking if user %s needs password change on view %s',
84 user_obj, view_name)
84 user_obj, view_name)
85 skip_user_views = [
85 skip_user_views = [
86 'logout', 'login',
86 'logout', 'login',
87 'my_account_password', 'my_account_password_update'
87 'my_account_password', 'my_account_password_update'
88 ]
88 ]
89
89
90 if not user_obj:
90 if not user_obj:
91 return
91 return
92
92
93 if user_obj.username == User.DEFAULT_USER:
93 if user_obj.username == User.DEFAULT_USER:
94 return
94 return
95
95
96 now = time.time()
96 now = time.time()
97 should_change = user_obj.user_data.get('force_password_change')
97 should_change = user_obj.user_data.get('force_password_change')
98 change_after = safe_int(should_change) or 0
98 change_after = safe_int(should_change) or 0
99 if should_change and now > change_after:
99 if should_change and now > change_after:
100 log.debug('User %s requires password change', user_obj)
100 log.debug('User %s requires password change', user_obj)
101 h.flash('You are required to change your password', 'warning',
101 h.flash('You are required to change your password', 'warning',
102 ignore_duplicate=True)
102 ignore_duplicate=True)
103
103
104 if view_name not in skip_user_views:
104 if view_name not in skip_user_views:
105 raise HTTPFound(
105 raise HTTPFound(
106 self.request.route_path('my_account_password'))
106 self.request.route_path('my_account_password'))
107
107
108 def _get_local_tmpl_context(self):
108 def _get_local_tmpl_context(self, include_app_defaults=False):
109 c = TemplateArgs()
109 c = TemplateArgs()
110 c.auth_user = self.request.user
110 c.auth_user = self.request.user
111 if include_app_defaults:
112 # NOTE(marcink): after full pyramid migration include_app_defaults
113 # should be turned on by default
114 from rhodecode.lib.base import attach_context_attributes
115 attach_context_attributes(c, self.request, self.request.user.user_id)
111 return c
116 return c
112
117
113 def _register_global_c(self, tmpl_args):
118 def _register_global_c(self, tmpl_args):
114 """
119 """
115 Registers attributes to pylons global `c`
120 Registers attributes to pylons global `c`
116 """
121 """
117 # TODO(marcink): remove once pyramid migration is finished
122 # TODO(marcink): remove once pyramid migration is finished
118 for k, v in tmpl_args.items():
123 for k, v in tmpl_args.items():
119 setattr(c, k, v)
124 setattr(c, k, v)
120
125
121 def _get_template_context(self, tmpl_args):
126 def _get_template_context(self, tmpl_args):
122 self._register_global_c(tmpl_args)
127 self._register_global_c(tmpl_args)
123
128
124 local_tmpl_args = {
129 local_tmpl_args = {
125 'defaults': {},
130 'defaults': {},
126 'errors': {},
131 'errors': {},
127 }
132 }
128 local_tmpl_args.update(tmpl_args)
133 local_tmpl_args.update(tmpl_args)
129 return local_tmpl_args
134 return local_tmpl_args
130
135
131 def load_default_context(self):
136 def load_default_context(self):
132 """
137 """
133 example:
138 example:
134
139
135 def load_default_context(self):
140 def load_default_context(self):
136 c = self._get_local_tmpl_context()
141 c = self._get_local_tmpl_context()
137 c.custom_var = 'foobar'
142 c.custom_var = 'foobar'
138 self._register_global_c(c)
143 self._register_global_c(c)
139 return c
144 return c
140 """
145 """
141 raise NotImplementedError('Needs implementation in view class')
146 raise NotImplementedError('Needs implementation in view class')
142
147
143
148
144 class RepoAppView(BaseAppView):
149 class RepoAppView(BaseAppView):
145
150
146 def __init__(self, context, request):
151 def __init__(self, context, request):
147 super(RepoAppView, self).__init__(context, request)
152 super(RepoAppView, self).__init__(context, request)
148 self.db_repo = request.db_repo
153 self.db_repo = request.db_repo
149 self.db_repo_name = self.db_repo.repo_name
154 self.db_repo_name = self.db_repo.repo_name
150 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
155 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
151
156
152 def _handle_missing_requirements(self, error):
157 def _handle_missing_requirements(self, error):
153 log.error(
158 log.error(
154 'Requirements are missing for repository %s: %s',
159 'Requirements are missing for repository %s: %s',
155 self.db_repo_name, error.message)
160 self.db_repo_name, error.message)
156
161
157 def _get_local_tmpl_context(self):
162 def _get_local_tmpl_context(self, include_app_defaults=False):
158 c = super(RepoAppView, self)._get_local_tmpl_context()
163 c = super(RepoAppView, self)._get_local_tmpl_context(
164 include_app_defaults=include_app_defaults)
165
159 # register common vars for this type of view
166 # register common vars for this type of view
160 c.rhodecode_db_repo = self.db_repo
167 c.rhodecode_db_repo = self.db_repo
161 c.repo_name = self.db_repo_name
168 c.repo_name = self.db_repo_name
162 c.repository_pull_requests = self.db_repo_pull_requests
169 c.repository_pull_requests = self.db_repo_pull_requests
163
170
164 c.repository_requirements_missing = False
171 c.repository_requirements_missing = False
165 try:
172 try:
166 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
173 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
167 except RepositoryRequirementError as e:
174 except RepositoryRequirementError as e:
168 c.repository_requirements_missing = True
175 c.repository_requirements_missing = True
169 self._handle_missing_requirements(e)
176 self._handle_missing_requirements(e)
170
177
171 return c
178 return c
172
179
173
180
174 class DataGridAppView(object):
181 class DataGridAppView(object):
175 """
182 """
176 Common class to have re-usable grid rendering components
183 Common class to have re-usable grid rendering components
177 """
184 """
178
185
179 def _extract_ordering(self, request, column_map=None):
186 def _extract_ordering(self, request, column_map=None):
180 column_map = column_map or {}
187 column_map = column_map or {}
181 column_index = safe_int(request.GET.get('order[0][column]'))
188 column_index = safe_int(request.GET.get('order[0][column]'))
182 order_dir = request.GET.get(
189 order_dir = request.GET.get(
183 'order[0][dir]', 'desc')
190 'order[0][dir]', 'desc')
184 order_by = request.GET.get(
191 order_by = request.GET.get(
185 'columns[%s][data][sort]' % column_index, 'name_raw')
192 'columns[%s][data][sort]' % column_index, 'name_raw')
186
193
187 # translate datatable to DB columns
194 # translate datatable to DB columns
188 order_by = column_map.get(order_by) or order_by
195 order_by = column_map.get(order_by) or order_by
189
196
190 search_q = request.GET.get('search[value]')
197 search_q = request.GET.get('search[value]')
191 return search_q, order_by, order_dir
198 return search_q, order_by, order_dir
192
199
193 def _extract_chunk(self, request):
200 def _extract_chunk(self, request):
194 start = safe_int(request.GET.get('start'), 0)
201 start = safe_int(request.GET.get('start'), 0)
195 length = safe_int(request.GET.get('length'), 25)
202 length = safe_int(request.GET.get('length'), 25)
196 draw = safe_int(request.GET.get('draw'))
203 draw = safe_int(request.GET.get('draw'))
197 return draw, start, length
204 return draw, start, length
198
205
199
206
200 class BaseReferencesView(RepoAppView):
207 class BaseReferencesView(RepoAppView):
201 """
208 """
202 Base for reference view for branches, tags and bookmarks.
209 Base for reference view for branches, tags and bookmarks.
203 """
210 """
204 def load_default_context(self):
211 def load_default_context(self):
205 c = self._get_local_tmpl_context()
212 c = self._get_local_tmpl_context()
206
213
207 # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead
214 # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead
208 c.repo_info = self.db_repo
215 c.repo_info = self.db_repo
209
216
210 self._register_global_c(c)
217 self._register_global_c(c)
211 return c
218 return c
212
219
213 def load_refs_context(self, ref_items, partials_template):
220 def load_refs_context(self, ref_items, partials_template):
214 _render = PartialRenderer(partials_template)
221 _render = PartialRenderer(partials_template)
215 _data = []
222 _data = []
216 pre_load = ["author", "date", "message"]
223 pre_load = ["author", "date", "message"]
217
224
218 is_svn = h.is_svn(self.rhodecode_vcs_repo)
225 is_svn = h.is_svn(self.rhodecode_vcs_repo)
219 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
226 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
220
227
221 for ref_name, commit_id in ref_items:
228 for ref_name, commit_id in ref_items:
222 commit = self.rhodecode_vcs_repo.get_commit(
229 commit = self.rhodecode_vcs_repo.get_commit(
223 commit_id=commit_id, pre_load=pre_load)
230 commit_id=commit_id, pre_load=pre_load)
224
231
225 # TODO: johbo: Unify generation of reference links
232 # TODO: johbo: Unify generation of reference links
226 use_commit_id = '/' in ref_name or is_svn
233 use_commit_id = '/' in ref_name or is_svn
227 files_url = h.url(
234 files_url = h.url(
228 'files_home',
235 'files_home',
229 repo_name=c.repo_name,
236 repo_name=c.repo_name,
230 f_path=ref_name if is_svn else '',
237 f_path=ref_name if is_svn else '',
231 revision=commit_id if use_commit_id else ref_name,
238 revision=commit_id if use_commit_id else ref_name,
232 at=ref_name)
239 at=ref_name)
233
240
234 _data.append({
241 _data.append({
235 "name": _render('name', ref_name, files_url),
242 "name": _render('name', ref_name, files_url),
236 "name_raw": ref_name,
243 "name_raw": ref_name,
237 "date": _render('date', commit.date),
244 "date": _render('date', commit.date),
238 "date_raw": datetime_to_time(commit.date),
245 "date_raw": datetime_to_time(commit.date),
239 "author": _render('author', commit.author),
246 "author": _render('author', commit.author),
240 "commit": _render(
247 "commit": _render(
241 'commit', commit.message, commit.raw_id, commit.idx),
248 'commit', commit.message, commit.raw_id, commit.idx),
242 "commit_raw": commit.idx,
249 "commit_raw": commit.idx,
243 "compare": _render(
250 "compare": _render(
244 'compare', format_ref_id(ref_name, commit.raw_id)),
251 'compare', format_ref_id(ref_name, commit.raw_id)),
245 })
252 })
246 c.has_references = bool(_data)
253 c.has_references = bool(_data)
247 c.data = json.dumps(_data)
254 c.data = json.dumps(_data)
248
255
249
256
250 class RepoRoutePredicate(object):
257 class RepoRoutePredicate(object):
251 def __init__(self, val, config):
258 def __init__(self, val, config):
252 self.val = val
259 self.val = val
253
260
254 def text(self):
261 def text(self):
255 return 'repo_route = %s' % self.val
262 return 'repo_route = %s' % self.val
256
263
257 phash = text
264 phash = text
258
265
259 def __call__(self, info, request):
266 def __call__(self, info, request):
260
267
261 if hasattr(request, 'vcs_call'):
268 if hasattr(request, 'vcs_call'):
262 # skip vcs calls
269 # skip vcs calls
263 return
270 return
264
271
265 repo_name = info['match']['repo_name']
272 repo_name = info['match']['repo_name']
266 repo_model = repo.RepoModel()
273 repo_model = repo.RepoModel()
267 by_name_match = repo_model.get_by_repo_name(repo_name, cache=True)
274 by_name_match = repo_model.get_by_repo_name(repo_name, cache=True)
268
275
269 if by_name_match:
276 if by_name_match:
270 # register this as request object we can re-use later
277 # register this as request object we can re-use later
271 request.db_repo = by_name_match
278 request.db_repo = by_name_match
272 return True
279 return True
273
280
274 by_id_match = repo_model.get_repo_by_id(repo_name)
281 by_id_match = repo_model.get_repo_by_id(repo_name)
275 if by_id_match:
282 if by_id_match:
276 request.db_repo = by_id_match
283 request.db_repo = by_id_match
277 return True
284 return True
278
285
279 return False
286 return False
280
287
281
288
282 class RepoTypeRoutePredicate(object):
289 class RepoTypeRoutePredicate(object):
283 def __init__(self, val, config):
290 def __init__(self, val, config):
284 self.val = val or ['hg', 'git', 'svn']
291 self.val = val or ['hg', 'git', 'svn']
285
292
286 def text(self):
293 def text(self):
287 return 'repo_accepted_type = %s' % self.val
294 return 'repo_accepted_type = %s' % self.val
288
295
289 phash = text
296 phash = text
290
297
291 def __call__(self, info, request):
298 def __call__(self, info, request):
292 if hasattr(request, 'vcs_call'):
299 if hasattr(request, 'vcs_call'):
293 # skip vcs calls
300 # skip vcs calls
294 return
301 return
295
302
296 rhodecode_db_repo = request.db_repo
303 rhodecode_db_repo = request.db_repo
297
304
298 log.debug(
305 log.debug(
299 '%s checking repo type for %s in %s',
306 '%s checking repo type for %s in %s',
300 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
307 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
301
308
302 if rhodecode_db_repo.repo_type in self.val:
309 if rhodecode_db_repo.repo_type in self.val:
303 return True
310 return True
304 else:
311 else:
305 log.warning('Current view is not supported for repo type:%s',
312 log.warning('Current view is not supported for repo type:%s',
306 rhodecode_db_repo.repo_type)
313 rhodecode_db_repo.repo_type)
307 #
314 #
308 # h.flash(h.literal(
315 # h.flash(h.literal(
309 # _('Action not supported for %s.' % rhodecode_repo.alias)),
316 # _('Action not supported for %s.' % rhodecode_repo.alias)),
310 # category='warning')
317 # category='warning')
311 # return redirect(
318 # return redirect(
312 # url('summary_home', repo_name=cls.rhodecode_db_repo.repo_name))
319 # route_path('repo_summary', repo_name=cls.rhodecode_db_repo.repo_name))
313
320
314 return False
321 return False
315
322
316
323
317 class RepoGroupRoutePredicate(object):
324 class RepoGroupRoutePredicate(object):
318 def __init__(self, val, config):
325 def __init__(self, val, config):
319 self.val = val
326 self.val = val
320
327
321 def text(self):
328 def text(self):
322 return 'repo_group_route = %s' % self.val
329 return 'repo_group_route = %s' % self.val
323
330
324 phash = text
331 phash = text
325
332
326 def __call__(self, info, request):
333 def __call__(self, info, request):
327 if hasattr(request, 'vcs_call'):
334 if hasattr(request, 'vcs_call'):
328 # skip vcs calls
335 # skip vcs calls
329 return
336 return
330
337
331 repo_group_name = info['match']['repo_group_name']
338 repo_group_name = info['match']['repo_group_name']
332 repo_group_model = repo_group.RepoGroupModel()
339 repo_group_model = repo_group.RepoGroupModel()
333 by_name_match = repo_group_model.get_by_group_name(
340 by_name_match = repo_group_model.get_by_group_name(
334 repo_group_name, cache=True)
341 repo_group_name, cache=True)
335
342
336 if by_name_match:
343 if by_name_match:
337 # register this as request object we can re-use later
344 # register this as request object we can re-use later
338 request.db_repo_group = by_name_match
345 request.db_repo_group = by_name_match
339 return True
346 return True
340
347
341 return False
348 return False
342
349
343
350
344 def includeme(config):
351 def includeme(config):
345 config.add_route_predicate(
352 config.add_route_predicate(
346 'repo_route', RepoRoutePredicate)
353 'repo_route', RepoRoutePredicate)
347 config.add_route_predicate(
354 config.add_route_predicate(
348 'repo_accepted_types', RepoTypeRoutePredicate)
355 'repo_accepted_types', RepoTypeRoutePredicate)
349 config.add_route_predicate(
356 config.add_route_predicate(
350 'repo_group_route', RepoGroupRoutePredicate)
357 'repo_group_route', RepoGroupRoutePredicate)
@@ -1,304 +1,304 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import re
21 import re
22 import logging
22 import logging
23
23
24 from pyramid.view import view_config
24 from pyramid.view import view_config
25
25
26 from rhodecode.apps._base import BaseAppView
26 from rhodecode.apps._base import BaseAppView
27 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
28 from rhodecode.lib.auth import LoginRequired, NotAnonymous, \
28 from rhodecode.lib.auth import LoginRequired, NotAnonymous, \
29 HasRepoGroupPermissionAnyDecorator
29 HasRepoGroupPermissionAnyDecorator
30 from rhodecode.lib.index import searcher_from_config
30 from rhodecode.lib.index import searcher_from_config
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
32 from rhodecode.lib.ext_json import json
32 from rhodecode.lib.ext_json import json
33 from rhodecode.model.db import func, Repository, RepoGroup
33 from rhodecode.model.db import func, Repository, RepoGroup
34 from rhodecode.model.repo import RepoModel
34 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.repo_group import RepoGroupModel
35 from rhodecode.model.repo_group import RepoGroupModel
36 from rhodecode.model.scm import ScmModel, RepoGroupList, RepoList
36 from rhodecode.model.scm import ScmModel, RepoGroupList, RepoList
37 from rhodecode.model.user import UserModel
37 from rhodecode.model.user import UserModel
38 from rhodecode.model.user_group import UserGroupModel
38 from rhodecode.model.user_group import UserGroupModel
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 class HomeView(BaseAppView):
43 class HomeView(BaseAppView):
44
44
45 def load_default_context(self):
45 def load_default_context(self):
46 c = self._get_local_tmpl_context()
46 c = self._get_local_tmpl_context()
47 c.user = c.auth_user.get_instance()
47 c.user = c.auth_user.get_instance()
48 self._register_global_c(c)
48 self._register_global_c(c)
49 return c
49 return c
50
50
51 @LoginRequired()
51 @LoginRequired()
52 @view_config(
52 @view_config(
53 route_name='user_autocomplete_data', request_method='GET',
53 route_name='user_autocomplete_data', request_method='GET',
54 renderer='json_ext', xhr=True)
54 renderer='json_ext', xhr=True)
55 def user_autocomplete_data(self):
55 def user_autocomplete_data(self):
56 query = self.request.GET.get('query')
56 query = self.request.GET.get('query')
57 active = str2bool(self.request.GET.get('active') or True)
57 active = str2bool(self.request.GET.get('active') or True)
58 include_groups = str2bool(self.request.GET.get('user_groups'))
58 include_groups = str2bool(self.request.GET.get('user_groups'))
59 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
59 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
60 skip_default_user = str2bool(self.request.GET.get('skip_default_user'))
60 skip_default_user = str2bool(self.request.GET.get('skip_default_user'))
61
61
62 log.debug('generating user list, query:%s, active:%s, with_groups:%s',
62 log.debug('generating user list, query:%s, active:%s, with_groups:%s',
63 query, active, include_groups)
63 query, active, include_groups)
64
64
65 _users = UserModel().get_users(
65 _users = UserModel().get_users(
66 name_contains=query, only_active=active)
66 name_contains=query, only_active=active)
67
67
68 def maybe_skip_default_user(usr):
68 def maybe_skip_default_user(usr):
69 if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER:
69 if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER:
70 return False
70 return False
71 return True
71 return True
72 _users = filter(maybe_skip_default_user, _users)
72 _users = filter(maybe_skip_default_user, _users)
73
73
74 if include_groups:
74 if include_groups:
75 # extend with user groups
75 # extend with user groups
76 _user_groups = UserGroupModel().get_user_groups(
76 _user_groups = UserGroupModel().get_user_groups(
77 name_contains=query, only_active=active,
77 name_contains=query, only_active=active,
78 expand_groups=expand_groups)
78 expand_groups=expand_groups)
79 _users = _users + _user_groups
79 _users = _users + _user_groups
80
80
81 return {'suggestions': _users}
81 return {'suggestions': _users}
82
82
83 @LoginRequired()
83 @LoginRequired()
84 @NotAnonymous()
84 @NotAnonymous()
85 @view_config(
85 @view_config(
86 route_name='user_group_autocomplete_data', request_method='GET',
86 route_name='user_group_autocomplete_data', request_method='GET',
87 renderer='json_ext', xhr=True)
87 renderer='json_ext', xhr=True)
88 def user_group_autocomplete_data(self):
88 def user_group_autocomplete_data(self):
89 query = self.request.GET.get('query')
89 query = self.request.GET.get('query')
90 active = str2bool(self.request.GET.get('active') or True)
90 active = str2bool(self.request.GET.get('active') or True)
91 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
91 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
92
92
93 log.debug('generating user group list, query:%s, active:%s',
93 log.debug('generating user group list, query:%s, active:%s',
94 query, active)
94 query, active)
95
95
96 _user_groups = UserGroupModel().get_user_groups(
96 _user_groups = UserGroupModel().get_user_groups(
97 name_contains=query, only_active=active,
97 name_contains=query, only_active=active,
98 expand_groups=expand_groups)
98 expand_groups=expand_groups)
99 _user_groups = _user_groups
99 _user_groups = _user_groups
100
100
101 return {'suggestions': _user_groups}
101 return {'suggestions': _user_groups}
102
102
103 def _get_repo_list(self, name_contains=None, repo_type=None, limit=20):
103 def _get_repo_list(self, name_contains=None, repo_type=None, limit=20):
104 query = Repository.query()\
104 query = Repository.query()\
105 .order_by(func.length(Repository.repo_name))\
105 .order_by(func.length(Repository.repo_name))\
106 .order_by(Repository.repo_name)
106 .order_by(Repository.repo_name)
107
107
108 if repo_type:
108 if repo_type:
109 query = query.filter(Repository.repo_type == repo_type)
109 query = query.filter(Repository.repo_type == repo_type)
110
110
111 if name_contains:
111 if name_contains:
112 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
112 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
113 query = query.filter(
113 query = query.filter(
114 Repository.repo_name.ilike(ilike_expression))
114 Repository.repo_name.ilike(ilike_expression))
115 query = query.limit(limit)
115 query = query.limit(limit)
116
116
117 all_repos = query.all()
117 all_repos = query.all()
118 # permission checks are inside this function
118 # permission checks are inside this function
119 repo_iter = ScmModel().get_repos(all_repos)
119 repo_iter = ScmModel().get_repos(all_repos)
120 return [
120 return [
121 {
121 {
122 'id': obj['name'],
122 'id': obj['name'],
123 'text': obj['name'],
123 'text': obj['name'],
124 'type': 'repo',
124 'type': 'repo',
125 'obj': obj['dbrepo'],
125 'obj': obj['dbrepo'],
126 'url': h.url('summary_home', repo_name=obj['name'])
126 'url': h.route_path('repo_summary', repo_name=obj['name'])
127 }
127 }
128 for obj in repo_iter]
128 for obj in repo_iter]
129
129
130 def _get_repo_group_list(self, name_contains=None, limit=20):
130 def _get_repo_group_list(self, name_contains=None, limit=20):
131 query = RepoGroup.query()\
131 query = RepoGroup.query()\
132 .order_by(func.length(RepoGroup.group_name))\
132 .order_by(func.length(RepoGroup.group_name))\
133 .order_by(RepoGroup.group_name)
133 .order_by(RepoGroup.group_name)
134
134
135 if name_contains:
135 if name_contains:
136 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
136 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
137 query = query.filter(
137 query = query.filter(
138 RepoGroup.group_name.ilike(ilike_expression))
138 RepoGroup.group_name.ilike(ilike_expression))
139 query = query.limit(limit)
139 query = query.limit(limit)
140
140
141 all_groups = query.all()
141 all_groups = query.all()
142 repo_groups_iter = ScmModel().get_repo_groups(all_groups)
142 repo_groups_iter = ScmModel().get_repo_groups(all_groups)
143 return [
143 return [
144 {
144 {
145 'id': obj.group_name,
145 'id': obj.group_name,
146 'text': obj.group_name,
146 'text': obj.group_name,
147 'type': 'group',
147 'type': 'group',
148 'obj': {},
148 'obj': {},
149 'url': h.route_path('repo_group_home', repo_group_name=obj.group_name)
149 'url': h.route_path('repo_group_home', repo_group_name=obj.group_name)
150 }
150 }
151 for obj in repo_groups_iter]
151 for obj in repo_groups_iter]
152
152
153 def _get_hash_commit_list(self, auth_user, hash_starts_with=None):
153 def _get_hash_commit_list(self, auth_user, hash_starts_with=None):
154 if not hash_starts_with or len(hash_starts_with) < 3:
154 if not hash_starts_with or len(hash_starts_with) < 3:
155 return []
155 return []
156
156
157 commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with)
157 commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with)
158
158
159 if len(commit_hashes) != 1:
159 if len(commit_hashes) != 1:
160 return []
160 return []
161
161
162 commit_hash_prefix = commit_hashes[0]
162 commit_hash_prefix = commit_hashes[0]
163
163
164 searcher = searcher_from_config(self.request.registry.settings)
164 searcher = searcher_from_config(self.request.registry.settings)
165 result = searcher.search(
165 result = searcher.search(
166 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user,
166 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user,
167 raise_on_exc=False)
167 raise_on_exc=False)
168
168
169 return [
169 return [
170 {
170 {
171 'id': entry['commit_id'],
171 'id': entry['commit_id'],
172 'text': entry['commit_id'],
172 'text': entry['commit_id'],
173 'type': 'commit',
173 'type': 'commit',
174 'obj': {'repo': entry['repository']},
174 'obj': {'repo': entry['repository']},
175 'url': h.url('changeset_home',
175 'url': h.url('changeset_home',
176 repo_name=entry['repository'],
176 repo_name=entry['repository'],
177 revision=entry['commit_id'])
177 revision=entry['commit_id'])
178 }
178 }
179 for entry in result['results']]
179 for entry in result['results']]
180
180
181 @LoginRequired()
181 @LoginRequired()
182 @view_config(
182 @view_config(
183 route_name='repo_list_data', request_method='GET',
183 route_name='repo_list_data', request_method='GET',
184 renderer='json_ext', xhr=True)
184 renderer='json_ext', xhr=True)
185 def repo_list_data(self):
185 def repo_list_data(self):
186 _ = self.request.translate
186 _ = self.request.translate
187
187
188 query = self.request.GET.get('query')
188 query = self.request.GET.get('query')
189 repo_type = self.request.GET.get('repo_type')
189 repo_type = self.request.GET.get('repo_type')
190 log.debug('generating repo list, query:%s, repo_type:%s',
190 log.debug('generating repo list, query:%s, repo_type:%s',
191 query, repo_type)
191 query, repo_type)
192
192
193 res = []
193 res = []
194 repos = self._get_repo_list(query, repo_type=repo_type)
194 repos = self._get_repo_list(query, repo_type=repo_type)
195 if repos:
195 if repos:
196 res.append({
196 res.append({
197 'text': _('Repositories'),
197 'text': _('Repositories'),
198 'children': repos
198 'children': repos
199 })
199 })
200
200
201 data = {
201 data = {
202 'more': False,
202 'more': False,
203 'results': res
203 'results': res
204 }
204 }
205 return data
205 return data
206
206
207 @LoginRequired()
207 @LoginRequired()
208 @view_config(
208 @view_config(
209 route_name='goto_switcher_data', request_method='GET',
209 route_name='goto_switcher_data', request_method='GET',
210 renderer='json_ext', xhr=True)
210 renderer='json_ext', xhr=True)
211 def goto_switcher_data(self):
211 def goto_switcher_data(self):
212 c = self.load_default_context()
212 c = self.load_default_context()
213
213
214 _ = self.request.translate
214 _ = self.request.translate
215
215
216 query = self.request.GET.get('query')
216 query = self.request.GET.get('query')
217 log.debug('generating goto switcher list, query %s', query)
217 log.debug('generating goto switcher list, query %s', query)
218
218
219 res = []
219 res = []
220 repo_groups = self._get_repo_group_list(query)
220 repo_groups = self._get_repo_group_list(query)
221 if repo_groups:
221 if repo_groups:
222 res.append({
222 res.append({
223 'text': _('Groups'),
223 'text': _('Groups'),
224 'children': repo_groups
224 'children': repo_groups
225 })
225 })
226
226
227 repos = self._get_repo_list(query)
227 repos = self._get_repo_list(query)
228 if repos:
228 if repos:
229 res.append({
229 res.append({
230 'text': _('Repositories'),
230 'text': _('Repositories'),
231 'children': repos
231 'children': repos
232 })
232 })
233
233
234 commits = self._get_hash_commit_list(c.auth_user, query)
234 commits = self._get_hash_commit_list(c.auth_user, query)
235 if commits:
235 if commits:
236 unique_repos = {}
236 unique_repos = {}
237 for commit in commits:
237 for commit in commits:
238 unique_repos.setdefault(commit['obj']['repo'], []
238 unique_repos.setdefault(commit['obj']['repo'], []
239 ).append(commit)
239 ).append(commit)
240
240
241 for repo in unique_repos:
241 for repo in unique_repos:
242 res.append({
242 res.append({
243 'text': _('Commits in %(repo)s') % {'repo': repo},
243 'text': _('Commits in %(repo)s') % {'repo': repo},
244 'children': unique_repos[repo]
244 'children': unique_repos[repo]
245 })
245 })
246
246
247 data = {
247 data = {
248 'more': False,
248 'more': False,
249 'results': res
249 'results': res
250 }
250 }
251 return data
251 return data
252
252
253 def _get_groups_and_repos(self, repo_group_id=None):
253 def _get_groups_and_repos(self, repo_group_id=None):
254 # repo groups groups
254 # repo groups groups
255 repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id)
255 repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id)
256 _perms = ['group.read', 'group.write', 'group.admin']
256 _perms = ['group.read', 'group.write', 'group.admin']
257 repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms)
257 repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms)
258 repo_group_data = RepoGroupModel().get_repo_groups_as_dict(
258 repo_group_data = RepoGroupModel().get_repo_groups_as_dict(
259 repo_group_list=repo_group_list_acl, admin=False)
259 repo_group_list=repo_group_list_acl, admin=False)
260
260
261 # repositories
261 # repositories
262 repo_list = Repository.get_all_repos(group_id=repo_group_id)
262 repo_list = Repository.get_all_repos(group_id=repo_group_id)
263 _perms = ['repository.read', 'repository.write', 'repository.admin']
263 _perms = ['repository.read', 'repository.write', 'repository.admin']
264 repo_list_acl = RepoList(repo_list, perm_set=_perms)
264 repo_list_acl = RepoList(repo_list, perm_set=_perms)
265 repo_data = RepoModel().get_repos_as_dict(
265 repo_data = RepoModel().get_repos_as_dict(
266 repo_list=repo_list_acl, admin=False)
266 repo_list=repo_list_acl, admin=False)
267
267
268 return repo_data, repo_group_data
268 return repo_data, repo_group_data
269
269
270 @LoginRequired()
270 @LoginRequired()
271 @view_config(
271 @view_config(
272 route_name='home', request_method='GET',
272 route_name='home', request_method='GET',
273 renderer='rhodecode:templates/index.mako')
273 renderer='rhodecode:templates/index.mako')
274 def main_page(self):
274 def main_page(self):
275 c = self.load_default_context()
275 c = self.load_default_context()
276 c.repo_group = None
276 c.repo_group = None
277
277
278 repo_data, repo_group_data = self._get_groups_and_repos()
278 repo_data, repo_group_data = self._get_groups_and_repos()
279 # json used to render the grids
279 # json used to render the grids
280 c.repos_data = json.dumps(repo_data)
280 c.repos_data = json.dumps(repo_data)
281 c.repo_groups_data = json.dumps(repo_group_data)
281 c.repo_groups_data = json.dumps(repo_group_data)
282
282
283 return self._get_template_context(c)
283 return self._get_template_context(c)
284
284
285 @LoginRequired()
285 @LoginRequired()
286 @HasRepoGroupPermissionAnyDecorator(
286 @HasRepoGroupPermissionAnyDecorator(
287 'group.read', 'group.write', 'group.admin')
287 'group.read', 'group.write', 'group.admin')
288 @view_config(
288 @view_config(
289 route_name='repo_group_home', request_method='GET',
289 route_name='repo_group_home', request_method='GET',
290 renderer='rhodecode:templates/index_repo_group.mako')
290 renderer='rhodecode:templates/index_repo_group.mako')
291 @view_config(
291 @view_config(
292 route_name='repo_group_home_slash', request_method='GET',
292 route_name='repo_group_home_slash', request_method='GET',
293 renderer='rhodecode:templates/index_repo_group.mako')
293 renderer='rhodecode:templates/index_repo_group.mako')
294 def repo_group_main_page(self):
294 def repo_group_main_page(self):
295 c = self.load_default_context()
295 c = self.load_default_context()
296 c.repo_group = self.request.db_repo_group
296 c.repo_group = self.request.db_repo_group
297 repo_data, repo_group_data = self._get_groups_and_repos(
297 repo_data, repo_group_data = self._get_groups_and_repos(
298 c.repo_group.group_id)
298 c.repo_group.group_id)
299
299
300 # json used to render the grids
300 # json used to render the grids
301 c.repos_data = json.dumps(repo_data)
301 c.repos_data = json.dumps(repo_data)
302 c.repo_groups_data = json.dumps(repo_group_data)
302 c.repo_groups_data = json.dumps(repo_group_data)
303
303
304 return self._get_template_context(c)
304 return self._get_template_context(c)
@@ -1,133 +1,148 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 from rhodecode.apps._base import add_route_with_slash
20
21
21
22
22 def includeme(config):
23 def includeme(config):
23
24
24 # Summary
25 # Summary
25 config.add_route(
26 # NOTE(marcink): one additional route is defined in very bottom, catch
26 name='repo_summary',
27 # all pattern
27 pattern='/{repo_name:.*?[^/]}', repo_route=True)
28
29 config.add_route(
28 config.add_route(
30 name='repo_summary_explicit',
29 name='repo_summary_explicit',
31 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
30 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
31 config.add_route(
32 name='repo_summary_commits',
33 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
34
35 # refs data
36 config.add_route(
37 name='repo_refs_data',
38 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
39
40 config.add_route(
41 name='repo_refs_changelog_data',
42 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
43
44 config.add_route(
45 name='repo_stats',
46 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
32
47
33 # Tags
48 # Tags
34 config.add_route(
49 config.add_route(
35 name='tags_home',
50 name='tags_home',
36 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
51 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
37
52
38 # Branches
53 # Branches
39 config.add_route(
54 config.add_route(
40 name='branches_home',
55 name='branches_home',
41 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
56 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
42
57
43 # Bookmarks
44 config.add_route(
58 config.add_route(
45 name='bookmarks_home',
59 name='bookmarks_home',
46 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
60 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
47
61
48 # Pull Requests
62 # Pull Requests
49 config.add_route(
63 config.add_route(
50 name='pullrequest_show',
64 name='pullrequest_show',
51 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id}',
65 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id}',
52 repo_route=True)
66 repo_route=True)
53
67
54 config.add_route(
68 config.add_route(
55 name='pullrequest_show_all',
69 name='pullrequest_show_all',
56 pattern='/{repo_name:.*?[^/]}/pull-request',
70 pattern='/{repo_name:.*?[^/]}/pull-request',
57 repo_route=True, repo_accepted_types=['hg', 'git'])
71 repo_route=True, repo_accepted_types=['hg', 'git'])
58
72
59 config.add_route(
73 config.add_route(
60 name='pullrequest_show_all_data',
74 name='pullrequest_show_all_data',
61 pattern='/{repo_name:.*?[^/]}/pull-request-data',
75 pattern='/{repo_name:.*?[^/]}/pull-request-data',
62 repo_route=True, repo_accepted_types=['hg', 'git'])
76 repo_route=True, repo_accepted_types=['hg', 'git'])
63
77
64 # Settings
78 # Settings
65 config.add_route(
79 config.add_route(
66 name='edit_repo',
80 name='edit_repo',
67 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
81 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
68
82
69 # Settings advanced
83 # Settings advanced
70 config.add_route(
84 config.add_route(
71 name='edit_repo_advanced',
85 name='edit_repo_advanced',
72 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
86 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
73 config.add_route(
87 config.add_route(
74 name='edit_repo_advanced_delete',
88 name='edit_repo_advanced_delete',
75 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
89 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
76 config.add_route(
90 config.add_route(
77 name='edit_repo_advanced_locking',
91 name='edit_repo_advanced_locking',
78 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
92 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
79 config.add_route(
93 config.add_route(
80 name='edit_repo_advanced_journal',
94 name='edit_repo_advanced_journal',
81 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
95 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
82 config.add_route(
96 config.add_route(
83 name='edit_repo_advanced_fork',
97 name='edit_repo_advanced_fork',
84 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
98 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
85
99
86 # Caches
100 # Caches
87 config.add_route(
101 config.add_route(
88 name='edit_repo_caches',
102 name='edit_repo_caches',
89 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
103 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
90
104
91 # Permissions
105 # Permissions
92 config.add_route(
106 config.add_route(
93 name='edit_repo_perms',
107 name='edit_repo_perms',
94 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
108 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
95
109
96 # Repo Review Rules
110 # Repo Review Rules
97 config.add_route(
111 config.add_route(
98 name='repo_reviewers',
112 name='repo_reviewers',
99 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
113 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
100
114
101 config.add_route(
115 config.add_route(
102 name='repo_default_reviewers_data',
116 name='repo_default_reviewers_data',
103 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
117 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
104
118
105 # Maintenance
119 # Maintenance
106 config.add_route(
120 config.add_route(
107 name='repo_maintenance',
121 name='repo_maintenance',
108 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
122 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
109
123
110 config.add_route(
124 config.add_route(
111 name='repo_maintenance_execute',
125 name='repo_maintenance_execute',
112 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
126 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
113
127
114 # Strip
128 # Strip
115 config.add_route(
129 config.add_route(
116 name='strip',
130 name='strip',
117 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
131 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
118
132
119 config.add_route(
133 config.add_route(
120 name='strip_check',
134 name='strip_check',
121 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
135 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
122
136
123 config.add_route(
137 config.add_route(
124 name='strip_execute',
138 name='strip_execute',
125 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
139 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
126
140
127 # NOTE(marcink): needs to be at the end for catch-all
141 # NOTE(marcink): needs to be at the end for catch-all
128 # config.add_route(
142 add_route_with_slash(
129 # name='repo_summary',
143 config,
130 # pattern='/{repo_name:.*?[^/]}', repo_route=True)
144 name='repo_summary',
145 pattern='/{repo_name:.*?[^/]}', repo_route=True)
131
146
132 # Scan module for configuration decorators.
147 # Scan module for configuration decorators.
133 config.scan()
148 config.scan()
@@ -1,516 +1,494 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import re
21 import re
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.controllers import summary
26 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
27 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
28 from rhodecode.lib.compat import OrderedDict
28 from rhodecode.lib.compat import OrderedDict
29 from rhodecode.lib.utils2 import AttributeDict
29 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 from rhodecode.model.db import Repository
31 from rhodecode.model.db import Repository
31 from rhodecode.model.meta import Session
32 from rhodecode.model.meta import Session
32 from rhodecode.model.repo import RepoModel
33 from rhodecode.model.repo import RepoModel
33 from rhodecode.model.scm import ScmModel
34 from rhodecode.model.scm import ScmModel
34 from rhodecode.tests import (
35 from rhodecode.tests import assert_session_flash
35 TestController, url, HG_REPO, assert_session_flash)
36 from rhodecode.tests.fixture import Fixture
36 from rhodecode.tests.fixture import Fixture
37 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
37 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
38
38
39
39
40 fixture = Fixture()
40 fixture = Fixture()
41
41
42
42
43 class TestSummaryController(TestController):
43 def route_path(name, params=None, **kwargs):
44 def test_index(self, backend, http_host_only_stub):
44 import urllib
45 self.log_user()
45
46 base_url = {
47 'repo_summary': '/{repo_name}',
48 'repo_stats': '/{repo_name}/repo_stats/{commit_id}',
49 'repo_refs_data': '/{repo_name}/refs-data',
50 'repo_refs_changelog_data': '/{repo_name}/refs-data-changelog'
51
52 }[name].format(**kwargs)
53
54 if params:
55 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
56 return base_url
57
58
59 @pytest.mark.usefixtures('app')
60 class TestSummaryView(object):
61 def test_index(self, autologin_user, backend, http_host_only_stub):
46 repo_id = backend.repo.repo_id
62 repo_id = backend.repo.repo_id
47 repo_name = backend.repo_name
63 repo_name = backend.repo_name
48 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
64 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
49 return_value=False):
65 return_value=False):
50 response = self.app.get(url('summary_home', repo_name=repo_name))
66 response = self.app.get(
67 route_path('repo_summary', repo_name=repo_name))
51
68
52 # repo type
69 # repo type
53 response.mustcontain(
70 response.mustcontain(
54 '<i class="icon-%s">' % (backend.alias, )
71 '<i class="icon-%s">' % (backend.alias, )
55 )
72 )
56 # public/private
73 # public/private
57 response.mustcontain(
74 response.mustcontain(
58 """<i class="icon-unlock-alt">"""
75 """<i class="icon-unlock-alt">"""
59 )
76 )
60
77
61 # clone url...
78 # clone url...
62 response.mustcontain(
79 response.mustcontain(
63 'id="clone_url" readonly="readonly"'
80 'id="clone_url" readonly="readonly"'
64 ' value="http://test_admin@%s/%s"' % (http_host_only_stub, repo_name, ))
81 ' value="http://test_admin@%s/%s"' % (http_host_only_stub, repo_name, ))
65 response.mustcontain(
82 response.mustcontain(
66 'id="clone_url_id" readonly="readonly"'
83 'id="clone_url_id" readonly="readonly"'
67 ' value="http://test_admin@%s/_%s"' % (http_host_only_stub, repo_id, ))
84 ' value="http://test_admin@%s/_%s"' % (http_host_only_stub, repo_id, ))
68
85
69 def test_index_svn_without_proxy(self, backend_svn, http_host_only_stub):
86 def test_index_svn_without_proxy(
70 self.log_user()
87 self, autologin_user, backend_svn, http_host_only_stub):
71 repo_id = backend_svn.repo.repo_id
88 repo_id = backend_svn.repo.repo_id
72 repo_name = backend_svn.repo_name
89 repo_name = backend_svn.repo_name
73 response = self.app.get(url('summary_home', repo_name=repo_name))
90 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
74 # clone url...
91 # clone url...
75 response.mustcontain(
92 response.mustcontain(
76 'id="clone_url" disabled'
93 'id="clone_url" disabled'
77 ' value="http://test_admin@%s/%s"' % (http_host_only_stub, repo_name, ))
94 ' value="http://test_admin@%s/%s"' % (http_host_only_stub, repo_name, ))
78 response.mustcontain(
95 response.mustcontain(
79 'id="clone_url_id" disabled'
96 'id="clone_url_id" disabled'
80 ' value="http://test_admin@%s/_%s"' % (http_host_only_stub, repo_id, ))
97 ' value="http://test_admin@%s/_%s"' % (http_host_only_stub, repo_id, ))
81
98
82 def test_index_with_trailing_slash(self, autologin_user, backend,
99 def test_index_with_trailing_slash(
83 http_host_only_stub):
100 self, autologin_user, backend, http_host_only_stub):
101
84 repo_id = backend.repo.repo_id
102 repo_id = backend.repo.repo_id
85 repo_name = backend.repo_name
103 repo_name = backend.repo_name
86 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
104 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
87 return_value=False):
105 return_value=False):
88 response = self.app.get(
106 response = self.app.get(
89 url('summary_home', repo_name=repo_name) + '/',
107 route_path('repo_summary', repo_name=repo_name) + '/',
90 status=200)
108 status=200)
91
109
92 # clone url...
110 # clone url...
93 response.mustcontain(
111 response.mustcontain(
94 'id="clone_url" readonly="readonly"'
112 'id="clone_url" readonly="readonly"'
95 ' value="http://test_admin@%s/%s"' % (http_host_only_stub, repo_name, ))
113 ' value="http://test_admin@%s/%s"' % (http_host_only_stub, repo_name, ))
96 response.mustcontain(
114 response.mustcontain(
97 'id="clone_url_id" readonly="readonly"'
115 'id="clone_url_id" readonly="readonly"'
98 ' value="http://test_admin@%s/_%s"' % (http_host_only_stub, repo_id, ))
116 ' value="http://test_admin@%s/_%s"' % (http_host_only_stub, repo_id, ))
99
117
100 def test_index_by_id(self, backend):
118 def test_index_by_id(self, autologin_user, backend):
101 self.log_user()
102 repo_id = backend.repo.repo_id
119 repo_id = backend.repo.repo_id
103 response = self.app.get(url(
120 response = self.app.get(
104 'summary_home', repo_name='_%s' % (repo_id,)))
121 route_path('repo_summary', repo_name='_%s' % (repo_id,)))
105
122
106 # repo type
123 # repo type
107 response.mustcontain(
124 response.mustcontain(
108 '<i class="icon-%s">' % (backend.alias, )
125 '<i class="icon-%s">' % (backend.alias, )
109 )
126 )
110 # public/private
127 # public/private
111 response.mustcontain(
128 response.mustcontain(
112 """<i class="icon-unlock-alt">"""
129 """<i class="icon-unlock-alt">"""
113 )
130 )
114
131
115 def test_index_by_repo_having_id_path_in_name_hg(self):
132 def test_index_by_repo_having_id_path_in_name_hg(self, autologin_user):
116 self.log_user()
117 fixture.create_repo(name='repo_1')
133 fixture.create_repo(name='repo_1')
118 response = self.app.get(url('summary_home', repo_name='repo_1'))
134 response = self.app.get(route_path('repo_summary', repo_name='repo_1'))
119
135
120 try:
136 try:
121 response.mustcontain("repo_1")
137 response.mustcontain("repo_1")
122 finally:
138 finally:
123 RepoModel().delete(Repository.get_by_repo_name('repo_1'))
139 RepoModel().delete(Repository.get_by_repo_name('repo_1'))
124 Session().commit()
140 Session().commit()
125
141
126 def test_index_with_anonymous_access_disabled(self):
142 def test_index_with_anonymous_access_disabled(
127 with fixture.anon_access(False):
143 self, backend, disable_anonymous_user):
128 response = self.app.get(url('summary_home', repo_name=HG_REPO),
144 response = self.app.get(
129 status=302)
145 route_path('repo_summary', repo_name=backend.repo_name), status=302)
130 assert 'login' in response.location
146 assert 'login' in response.location
131
147
132 def _enable_stats(self, repo):
148 def _enable_stats(self, repo):
133 r = Repository.get_by_repo_name(repo)
149 r = Repository.get_by_repo_name(repo)
134 r.enable_statistics = True
150 r.enable_statistics = True
135 Session().add(r)
151 Session().add(r)
136 Session().commit()
152 Session().commit()
137
153
138 expected_trending = {
154 expected_trending = {
139 'hg': {
155 'hg': {
140 "py": {"count": 68, "desc": ["Python"]},
156 "py": {"count": 68, "desc": ["Python"]},
141 "rst": {"count": 16, "desc": ["Rst"]},
157 "rst": {"count": 16, "desc": ["Rst"]},
142 "css": {"count": 2, "desc": ["Css"]},
158 "css": {"count": 2, "desc": ["Css"]},
143 "sh": {"count": 2, "desc": ["Bash"]},
159 "sh": {"count": 2, "desc": ["Bash"]},
144 "bat": {"count": 1, "desc": ["Batch"]},
160 "bat": {"count": 1, "desc": ["Batch"]},
145 "cfg": {"count": 1, "desc": ["Ini"]},
161 "cfg": {"count": 1, "desc": ["Ini"]},
146 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
162 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
147 "ini": {"count": 1, "desc": ["Ini"]},
163 "ini": {"count": 1, "desc": ["Ini"]},
148 "js": {"count": 1, "desc": ["Javascript"]},
164 "js": {"count": 1, "desc": ["Javascript"]},
149 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
165 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
150 },
166 },
151 'git': {
167 'git': {
152 "py": {"count": 68, "desc": ["Python"]},
168 "py": {"count": 68, "desc": ["Python"]},
153 "rst": {"count": 16, "desc": ["Rst"]},
169 "rst": {"count": 16, "desc": ["Rst"]},
154 "css": {"count": 2, "desc": ["Css"]},
170 "css": {"count": 2, "desc": ["Css"]},
155 "sh": {"count": 2, "desc": ["Bash"]},
171 "sh": {"count": 2, "desc": ["Bash"]},
156 "bat": {"count": 1, "desc": ["Batch"]},
172 "bat": {"count": 1, "desc": ["Batch"]},
157 "cfg": {"count": 1, "desc": ["Ini"]},
173 "cfg": {"count": 1, "desc": ["Ini"]},
158 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
174 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
159 "ini": {"count": 1, "desc": ["Ini"]},
175 "ini": {"count": 1, "desc": ["Ini"]},
160 "js": {"count": 1, "desc": ["Javascript"]},
176 "js": {"count": 1, "desc": ["Javascript"]},
161 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
177 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
162 },
178 },
163 'svn': {
179 'svn': {
164 "py": {"count": 75, "desc": ["Python"]},
180 "py": {"count": 75, "desc": ["Python"]},
165 "rst": {"count": 16, "desc": ["Rst"]},
181 "rst": {"count": 16, "desc": ["Rst"]},
166 "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]},
182 "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]},
167 "css": {"count": 2, "desc": ["Css"]},
183 "css": {"count": 2, "desc": ["Css"]},
168 "bat": {"count": 1, "desc": ["Batch"]},
184 "bat": {"count": 1, "desc": ["Batch"]},
169 "cfg": {"count": 1, "desc": ["Ini"]},
185 "cfg": {"count": 1, "desc": ["Ini"]},
170 "ini": {"count": 1, "desc": ["Ini"]},
186 "ini": {"count": 1, "desc": ["Ini"]},
171 "js": {"count": 1, "desc": ["Javascript"]},
187 "js": {"count": 1, "desc": ["Javascript"]},
172 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]},
188 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]},
173 "sh": {"count": 1, "desc": ["Bash"]}
189 "sh": {"count": 1, "desc": ["Bash"]}
174 },
190 },
175 }
191 }
176
192
177 def test_repo_stats(self, backend, xhr_header):
193 def test_repo_stats(self, autologin_user, backend, xhr_header):
178 self.log_user()
179 response = self.app.get(
194 response = self.app.get(
180 url('repo_stats',
195 route_path(
181 repo_name=backend.repo_name, commit_id='tip'),
196 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
182 extra_environ=xhr_header,
197 extra_environ=xhr_header,
183 status=200)
198 status=200)
184 assert re.match(r'6[\d\.]+ KiB', response.json['size'])
199 assert re.match(r'6[\d\.]+ KiB', response.json['size'])
185
200
186 def test_repo_stats_code_stats_enabled(self, backend, xhr_header):
201 def test_repo_stats_code_stats_enabled(self, autologin_user, backend, xhr_header):
187 self.log_user()
188 repo_name = backend.repo_name
202 repo_name = backend.repo_name
189
203
190 # codes stats
204 # codes stats
191 self._enable_stats(repo_name)
205 self._enable_stats(repo_name)
192 ScmModel().mark_for_invalidation(repo_name)
206 ScmModel().mark_for_invalidation(repo_name)
193
207
194 response = self.app.get(
208 response = self.app.get(
195 url('repo_stats',
209 route_path(
196 repo_name=backend.repo_name, commit_id='tip'),
210 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
197 extra_environ=xhr_header,
211 extra_environ=xhr_header,
198 status=200)
212 status=200)
199
213
200 expected_data = self.expected_trending[backend.alias]
214 expected_data = self.expected_trending[backend.alias]
201 returned_stats = response.json['code_stats']
215 returned_stats = response.json['code_stats']
202 for k, v in expected_data.items():
216 for k, v in expected_data.items():
203 assert v == returned_stats[k]
217 assert v == returned_stats[k]
204
218
205 def test_repo_refs_data(self, backend):
219 def test_repo_refs_data(self, backend):
206 response = self.app.get(
220 response = self.app.get(
207 url('repo_refs_data', repo_name=backend.repo_name),
221 route_path('repo_refs_data', repo_name=backend.repo_name),
208 status=200)
222 status=200)
209
223
210 # Ensure that there is the correct amount of items in the result
224 # Ensure that there is the correct amount of items in the result
211 repo = backend.repo.scm_instance()
225 repo = backend.repo.scm_instance()
212 data = response.json['results']
226 data = response.json['results']
213 items = sum(len(section['children']) for section in data)
227 items = sum(len(section['children']) for section in data)
214 repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks)
228 repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks)
215 assert items == repo_refs
229 assert items == repo_refs
216
230
217 def test_index_shows_missing_requirements_message(
231 def test_index_shows_missing_requirements_message(
218 self, backend, autologin_user):
232 self, backend, autologin_user):
219 repo_name = backend.repo_name
233 repo_name = backend.repo_name
220 scm_patcher = mock.patch.object(
234 scm_patcher = mock.patch.object(
221 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
235 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
222
236
223 with scm_patcher:
237 with scm_patcher:
224 response = self.app.get(url('summary_home', repo_name=repo_name))
238 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
225 assert_response = AssertResponse(response)
239 assert_response = AssertResponse(response)
226 assert_response.element_contains(
240 assert_response.element_contains(
227 '.main .alert-warning strong', 'Missing requirements')
241 '.main .alert-warning strong', 'Missing requirements')
228 assert_response.element_contains(
242 assert_response.element_contains(
229 '.main .alert-warning',
243 '.main .alert-warning',
230 'These commits cannot be displayed, because this repository'
244 'Commits cannot be displayed, because this repository '
231 ' uses the Mercurial largefiles extension, which was not enabled.')
245 'uses one or more extensions, which was not enabled.')
232
246
233 def test_missing_requirements_page_does_not_contains_switch_to(
247 def test_missing_requirements_page_does_not_contains_switch_to(
234 self, backend):
248 self, autologin_user, backend):
235 self.log_user()
236 repo_name = backend.repo_name
249 repo_name = backend.repo_name
237 scm_patcher = mock.patch.object(
250 scm_patcher = mock.patch.object(
238 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
251 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
239
252
240 with scm_patcher:
253 with scm_patcher:
241 response = self.app.get(url('summary_home', repo_name=repo_name))
254 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
242 response.mustcontain(no='Switch To')
255 response.mustcontain(no='Switch To')
243
256
244
257
245 @pytest.mark.usefixtures('pylonsapp')
258 @pytest.mark.usefixtures('app')
246 class TestSwitcherReferenceData:
259 class TestRepoLocation(object):
247
260
248 def test_creates_reference_urls_based_on_name(self):
261 @pytest.mark.parametrize("suffix", [u'', u'ąęł'], ids=['', 'non-ascii'])
249 references = {
262 def test_manual_delete(self, autologin_user, backend, suffix, csrf_token):
250 'name': 'commit_id',
263 repo = backend.create_repo(name_suffix=suffix)
251 }
264 repo_name = repo.repo_name
252 controller = summary.SummaryController()
265
253 is_svn = False
266 # delete from file system
254 result = controller._switcher_reference_data(
267 RepoModel()._delete_filesystem_repo(repo)
255 'repo_name', references, is_svn)
256 expected_url = h.url(
257 'files_home', repo_name='repo_name', revision='name',
258 at='name')
259 assert result[0]['files_url'] == expected_url
260
268
261 def test_urls_contain_commit_id_if_slash_in_name(self):
269 # test if the repo is still in the database
262 references = {
270 new_repo = RepoModel().get_by_repo_name(repo_name)
263 'name/with/slash': 'commit_id',
271 assert new_repo.repo_name == repo_name
264 }
265 controller = summary.SummaryController()
266 is_svn = False
267 result = controller._switcher_reference_data(
268 'repo_name', references, is_svn)
269 expected_url = h.url(
270 'files_home', repo_name='repo_name', revision='commit_id',
271 at='name/with/slash')
272 assert result[0]['files_url'] == expected_url
273
272
274 def test_adds_reference_to_path_for_svn(self):
273 # check if repo is not in the filesystem
275 references = {
274 assert not repo_on_filesystem(repo_name)
276 'name/with/slash': 'commit_id',
275 self.assert_repo_not_found_redirect(repo_name)
277 }
276
278 controller = summary.SummaryController()
277 def assert_repo_not_found_redirect(self, repo_name):
279 is_svn = True
278 # run the check page that triggers the other flash message
280 result = controller._switcher_reference_data(
279 response = self.app.get(h.url('repo_check_home', repo_name=repo_name))
281 'repo_name', references, is_svn)
280 assert_session_flash(
282 expected_url = h.url(
281 response, 'The repository at %s cannot be located.' % repo_name)
283 'files_home', repo_name='repo_name', f_path='name/with/slash',
284 revision='commit_id', at='name/with/slash')
285 assert result[0]['files_url'] == expected_url
286
282
287
283
288 @pytest.mark.usefixtures('pylonsapp')
284 @pytest.fixture()
289 class TestCreateReferenceData:
285 def summary_view(context_stub, request_stub, user_util):
286 """
287 Bootstrap view to test the view functions
288 """
289 request_stub.matched_route = AttributeDict(name='test_view')
290
291 request_stub.user = user_util.create_user().AuthUser
292 request_stub.db_repo = user_util.create_repo()
293
294 view = RepoSummaryView(context=context_stub, request=request_stub)
295 return view
296
297
298 @pytest.mark.usefixtures('app')
299 class TestCreateReferenceData(object):
290
300
291 @pytest.fixture
301 @pytest.fixture
292 def example_refs(self):
302 def example_refs(self):
293 section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id')))
303 section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id')))
294 example_refs = [
304 example_refs = [
295 ('section_1', section_1_refs, 't1'),
305 ('section_1', section_1_refs, 't1'),
296 ('section_2', {'c': 'c_id'}, 't2'),
306 ('section_2', {'c': 'c_id'}, 't2'),
297 ]
307 ]
298 return example_refs
308 return example_refs
299
309
300 def test_generates_refs_based_on_commit_ids(self, example_refs):
310 def test_generates_refs_based_on_commit_ids(self, example_refs, summary_view):
301 repo = mock.Mock()
311 repo = mock.Mock()
302 repo.name = 'test-repo'
312 repo.name = 'test-repo'
303 repo.alias = 'git'
313 repo.alias = 'git'
304 full_repo_name = 'pytest-repo-group/' + repo.name
314 full_repo_name = 'pytest-repo-group/' + repo.name
305 controller = summary.SummaryController()
306
315
307 result = controller._create_reference_data(
316 result = summary_view._create_reference_data(
308 repo, full_repo_name, example_refs)
317 repo, full_repo_name, example_refs)
309
318
310 expected_files_url = '/{}/files/'.format(full_repo_name)
319 expected_files_url = '/{}/files/'.format(full_repo_name)
311 expected_result = [
320 expected_result = [
312 {
321 {
313 'children': [
322 'children': [
314 {
323 {
315 'id': 'a', 'raw_id': 'a_id', 'text': 'a', 'type': 't1',
324 'id': 'a', 'raw_id': 'a_id', 'text': 'a', 'type': 't1',
316 'files_url': expected_files_url + 'a/?at=a',
325 'files_url': expected_files_url + 'a/?at=a',
317 },
326 },
318 {
327 {
319 'id': 'b', 'raw_id': 'b_id', 'text': 'b', 'type': 't1',
328 'id': 'b', 'raw_id': 'b_id', 'text': 'b', 'type': 't1',
320 'files_url': expected_files_url + 'b/?at=b',
329 'files_url': expected_files_url + 'b/?at=b',
321 }
330 }
322 ],
331 ],
323 'text': 'section_1'
332 'text': 'section_1'
324 },
333 },
325 {
334 {
326 'children': [
335 'children': [
327 {
336 {
328 'id': 'c', 'raw_id': 'c_id', 'text': 'c', 'type': 't2',
337 'id': 'c', 'raw_id': 'c_id', 'text': 'c', 'type': 't2',
329 'files_url': expected_files_url + 'c/?at=c',
338 'files_url': expected_files_url + 'c/?at=c',
330 }
339 }
331 ],
340 ],
332 'text': 'section_2'
341 'text': 'section_2'
333 }]
342 }]
334 assert result == expected_result
343 assert result == expected_result
335
344
336 def test_generates_refs_with_path_for_svn(self, example_refs):
345 def test_generates_refs_with_path_for_svn(self, example_refs, summary_view):
337 repo = mock.Mock()
346 repo = mock.Mock()
338 repo.name = 'test-repo'
347 repo.name = 'test-repo'
339 repo.alias = 'svn'
348 repo.alias = 'svn'
340 full_repo_name = 'pytest-repo-group/' + repo.name
349 full_repo_name = 'pytest-repo-group/' + repo.name
341 controller = summary.SummaryController()
350
342 result = controller._create_reference_data(
351 result = summary_view._create_reference_data(
343 repo, full_repo_name, example_refs)
352 repo, full_repo_name, example_refs)
344
353
345 expected_files_url = '/{}/files/'.format(full_repo_name)
354 expected_files_url = '/{}/files/'.format(full_repo_name)
346 expected_result = [
355 expected_result = [
347 {
356 {
348 'children': [
357 'children': [
349 {
358 {
350 'id': 'a@a_id', 'raw_id': 'a_id',
359 'id': 'a@a_id', 'raw_id': 'a_id',
351 'text': 'a', 'type': 't1',
360 'text': 'a', 'type': 't1',
352 'files_url': expected_files_url + 'a_id/a?at=a',
361 'files_url': expected_files_url + 'a_id/a?at=a',
353 },
362 },
354 {
363 {
355 'id': 'b@b_id', 'raw_id': 'b_id',
364 'id': 'b@b_id', 'raw_id': 'b_id',
356 'text': 'b', 'type': 't1',
365 'text': 'b', 'type': 't1',
357 'files_url': expected_files_url + 'b_id/b?at=b',
366 'files_url': expected_files_url + 'b_id/b?at=b',
358 }
367 }
359 ],
368 ],
360 'text': 'section_1'
369 'text': 'section_1'
361 },
370 },
362 {
371 {
363 'children': [
372 'children': [
364 {
373 {
365 'id': 'c@c_id', 'raw_id': 'c_id',
374 'id': 'c@c_id', 'raw_id': 'c_id',
366 'text': 'c', 'type': 't2',
375 'text': 'c', 'type': 't2',
367 'files_url': expected_files_url + 'c_id/c?at=c',
376 'files_url': expected_files_url + 'c_id/c?at=c',
368 }
377 }
369 ],
378 ],
370 'text': 'section_2'
379 'text': 'section_2'
371 }
380 }
372 ]
381 ]
373 assert result == expected_result
382 assert result == expected_result
374
383
375
384
376 @pytest.mark.usefixtures("app")
385 class TestCreateFilesUrl(object):
377 class TestRepoLocation:
378
379 @pytest.mark.parametrize("suffix", [u'', u'ąęł'], ids=['', 'non-ascii'])
380 def test_manual_delete(self, autologin_user, backend, suffix, csrf_token):
381 repo = backend.create_repo(name_suffix=suffix)
382 repo_name = repo.repo_name
383
384 # delete from file system
385 RepoModel()._delete_filesystem_repo(repo)
386
387 # test if the repo is still in the database
388 new_repo = RepoModel().get_by_repo_name(repo_name)
389 assert new_repo.repo_name == repo_name
390
386
391 # check if repo is not in the filesystem
387 def test_creates_non_svn_url(self, summary_view):
392 assert not repo_on_filesystem(repo_name)
393 self.assert_repo_not_found_redirect(repo_name)
394
395 def assert_repo_not_found_redirect(self, repo_name):
396 # run the check page that triggers the other flash message
397 response = self.app.get(url('repo_check_home', repo_name=repo_name))
398 assert_session_flash(
399 response, 'The repository at %s cannot be located.' % repo_name)
400
401
402 class TestCreateFilesUrl(object):
403 def test_creates_non_svn_url(self):
404 controller = summary.SummaryController()
405 repo = mock.Mock()
388 repo = mock.Mock()
406 repo.name = 'abcde'
389 repo.name = 'abcde'
407 full_repo_name = 'test-repo-group/' + repo.name
390 full_repo_name = 'test-repo-group/' + repo.name
408 ref_name = 'branch1'
391 ref_name = 'branch1'
409 raw_id = 'deadbeef0123456789'
392 raw_id = 'deadbeef0123456789'
410 is_svn = False
393 is_svn = False
411
394
412 with mock.patch.object(summary.h, 'url') as url_mock:
395 with mock.patch('rhodecode.lib.helpers.url') as url_mock:
413 result = controller._create_files_url(
396 result = summary_view._create_files_url(
414 repo, full_repo_name, ref_name, raw_id, is_svn)
397 repo, full_repo_name, ref_name, raw_id, is_svn)
415 url_mock.assert_called_once_with(
398 url_mock.assert_called_once_with(
416 'files_home', repo_name=full_repo_name, f_path='',
399 'files_home', repo_name=full_repo_name, f_path='',
417 revision=ref_name, at=ref_name)
400 revision=ref_name, at=ref_name)
418 assert result == url_mock.return_value
401 assert result == url_mock.return_value
419
402
420 def test_creates_svn_url(self):
403 def test_creates_svn_url(self, summary_view):
421 controller = summary.SummaryController()
422 repo = mock.Mock()
404 repo = mock.Mock()
423 repo.name = 'abcde'
405 repo.name = 'abcde'
424 full_repo_name = 'test-repo-group/' + repo.name
406 full_repo_name = 'test-repo-group/' + repo.name
425 ref_name = 'branch1'
407 ref_name = 'branch1'
426 raw_id = 'deadbeef0123456789'
408 raw_id = 'deadbeef0123456789'
427 is_svn = True
409 is_svn = True
428
410
429 with mock.patch.object(summary.h, 'url') as url_mock:
411 with mock.patch('rhodecode.lib.helpers.url') as url_mock:
430 result = controller._create_files_url(
412 result = summary_view._create_files_url(
431 repo, full_repo_name, ref_name, raw_id, is_svn)
413 repo, full_repo_name, ref_name, raw_id, is_svn)
432 url_mock.assert_called_once_with(
414 url_mock.assert_called_once_with(
433 'files_home', repo_name=full_repo_name, f_path=ref_name,
415 'files_home', repo_name=full_repo_name, f_path=ref_name,
434 revision=raw_id, at=ref_name)
416 revision=raw_id, at=ref_name)
435 assert result == url_mock.return_value
417 assert result == url_mock.return_value
436
418
437 def test_name_has_slashes(self):
419 def test_name_has_slashes(self, summary_view):
438 controller = summary.SummaryController()
439 repo = mock.Mock()
420 repo = mock.Mock()
440 repo.name = 'abcde'
421 repo.name = 'abcde'
441 full_repo_name = 'test-repo-group/' + repo.name
422 full_repo_name = 'test-repo-group/' + repo.name
442 ref_name = 'branch1/branch2'
423 ref_name = 'branch1/branch2'
443 raw_id = 'deadbeef0123456789'
424 raw_id = 'deadbeef0123456789'
444 is_svn = False
425 is_svn = False
445
426
446 with mock.patch.object(summary.h, 'url') as url_mock:
427 with mock.patch('rhodecode.lib.helpers.url') as url_mock:
447 result = controller._create_files_url(
428 result = summary_view._create_files_url(
448 repo, full_repo_name, ref_name, raw_id, is_svn)
429 repo, full_repo_name, ref_name, raw_id, is_svn)
449 url_mock.assert_called_once_with(
430 url_mock.assert_called_once_with(
450 'files_home', repo_name=full_repo_name, f_path='', revision=raw_id,
431 'files_home', repo_name=full_repo_name, f_path='', revision=raw_id,
451 at=ref_name)
432 at=ref_name)
452 assert result == url_mock.return_value
433 assert result == url_mock.return_value
453
434
454
435
455 class TestReferenceItems(object):
436 class TestReferenceItems(object):
456 repo = mock.Mock()
437 repo = mock.Mock()
457 repo.name = 'pytest-repo'
438 repo.name = 'pytest-repo'
458 repo_full_name = 'pytest-repo-group/' + repo.name
439 repo_full_name = 'pytest-repo-group/' + repo.name
459 ref_type = 'branch'
440 ref_type = 'branch'
460 fake_url = '/abcde/'
441 fake_url = '/abcde/'
461
442
462 @staticmethod
443 @staticmethod
463 def _format_function(name, id_):
444 def _format_function(name, id_):
464 return 'format_function_{}_{}'.format(name, id_)
445 return 'format_function_{}_{}'.format(name, id_)
465
446
466 def test_creates_required_amount_of_items(self):
447 def test_creates_required_amount_of_items(self, summary_view):
467 amount = 100
448 amount = 100
468 refs = {
449 refs = {
469 'ref{}'.format(i): '{0:040d}'.format(i)
450 'ref{}'.format(i): '{0:040d}'.format(i)
470 for i in range(amount)
451 for i in range(amount)
471 }
452 }
472
453
473 controller = summary.SummaryController()
454 url_patcher = mock.patch.object(summary_view, '_create_files_url')
474
455 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
475 url_patcher = mock.patch.object(
456 return_value=False)
476 controller, '_create_files_url')
477 svn_patcher = mock.patch.object(
478 summary.h, 'is_svn', return_value=False)
479
457
480 with url_patcher as url_mock, svn_patcher:
458 with url_patcher as url_mock, svn_patcher:
481 result = controller._create_reference_items(
459 result = summary_view._create_reference_items(
482 self.repo, self.repo_full_name, refs, self.ref_type,
460 self.repo, self.repo_full_name, refs, self.ref_type,
483 self._format_function)
461 self._format_function)
484 assert len(result) == amount
462 assert len(result) == amount
485 assert url_mock.call_count == amount
463 assert url_mock.call_count == amount
486
464
487 def test_single_item_details(self):
465 def test_single_item_details(self, summary_view):
488 ref_name = 'ref1'
466 ref_name = 'ref1'
489 ref_id = 'deadbeef'
467 ref_id = 'deadbeef'
490 refs = {
468 refs = {
491 ref_name: ref_id
469 ref_name: ref_id
492 }
470 }
493
471
494 controller = summary.SummaryController()
472 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
473 return_value=False)
474
495 url_patcher = mock.patch.object(
475 url_patcher = mock.patch.object(
496 controller, '_create_files_url', return_value=self.fake_url)
476 summary_view, '_create_files_url', return_value=self.fake_url)
497 svn_patcher = mock.patch.object(
498 summary.h, 'is_svn', return_value=False)
499
477
500 with url_patcher as url_mock, svn_patcher:
478 with url_patcher as url_mock, svn_patcher:
501 result = controller._create_reference_items(
479 result = summary_view._create_reference_items(
502 self.repo, self.repo_full_name, refs, self.ref_type,
480 self.repo, self.repo_full_name, refs, self.ref_type,
503 self._format_function)
481 self._format_function)
504
482
505 url_mock.assert_called_once_with(
483 url_mock.assert_called_once_with(
506 self.repo, self.repo_full_name, ref_name, ref_id, False)
484 self.repo, self.repo_full_name, ref_name, ref_id, False)
507 expected_result = [
485 expected_result = [
508 {
486 {
509 'text': ref_name,
487 'text': ref_name,
510 'id': self._format_function(ref_name, ref_id),
488 'id': self._format_function(ref_name, ref_id),
511 'raw_id': ref_id,
489 'raw_id': ref_id,
512 'type': self.ref_type,
490 'type': self.ref_type,
513 'files_url': self.fake_url
491 'files_url': self.fake_url
514 }
492 }
515 ]
493 ]
516 assert result == expected_result
494 assert result == expected_result
@@ -1,516 +1,521 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Pylons middleware initialization
22 Pylons middleware initialization
23 """
23 """
24 import logging
24 import logging
25 from collections import OrderedDict
25 from collections import OrderedDict
26
26
27 from paste.registry import RegistryManager
27 from paste.registry import RegistryManager
28 from paste.gzipper import make_gzip_middleware
28 from paste.gzipper import make_gzip_middleware
29 from pylons.wsgiapp import PylonsApp
29 from pylons.wsgiapp import PylonsApp
30 from pyramid.authorization import ACLAuthorizationPolicy
30 from pyramid.authorization import ACLAuthorizationPolicy
31 from pyramid.config import Configurator
31 from pyramid.config import Configurator
32 from pyramid.settings import asbool, aslist
32 from pyramid.settings import asbool, aslist
33 from pyramid.wsgi import wsgiapp
33 from pyramid.wsgi import wsgiapp
34 from pyramid.httpexceptions import (
34 from pyramid.httpexceptions import (
35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound)
35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound)
36 from pyramid.events import ApplicationCreated
36 from pyramid.events import ApplicationCreated
37 from pyramid.renderers import render_to_response
37 from pyramid.renderers import render_to_response
38 from routes.middleware import RoutesMiddleware
38 from routes.middleware import RoutesMiddleware
39 import routes.util
39 import routes.util
40
40
41 import rhodecode
41 import rhodecode
42
42 from rhodecode.model import meta
43 from rhodecode.model import meta
43 from rhodecode.config import patches
44 from rhodecode.config import patches
44 from rhodecode.config.routing import STATIC_FILE_PREFIX
45 from rhodecode.config.routing import STATIC_FILE_PREFIX
45 from rhodecode.config.environment import (
46 from rhodecode.config.environment import (
46 load_environment, load_pyramid_environment)
47 load_environment, load_pyramid_environment)
48
49 from rhodecode.lib.vcs import VCSCommunicationError
50 from rhodecode.lib.exceptions import VCSServerUnavailable
47 from rhodecode.lib.middleware import csrf
51 from rhodecode.lib.middleware import csrf
48 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
52 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
49 from rhodecode.lib.middleware.error_handling import (
53 from rhodecode.lib.middleware.error_handling import (
50 PylonsErrorHandlingMiddleware)
54 PylonsErrorHandlingMiddleware)
51 from rhodecode.lib.middleware.https_fixup import HttpsFixup
55 from rhodecode.lib.middleware.https_fixup import HttpsFixup
52 from rhodecode.lib.middleware.vcs import VCSMiddleware
56 from rhodecode.lib.middleware.vcs import VCSMiddleware
53 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
57 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
54 from rhodecode.lib.utils2 import aslist as rhodecode_aslist
58 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
55 from rhodecode.subscribers import (
59 from rhodecode.subscribers import (
56 scan_repositories_if_enabled, write_js_routes_if_enabled,
60 scan_repositories_if_enabled, write_js_routes_if_enabled,
57 write_metadata_if_needed)
61 write_metadata_if_needed)
58
62
59
63
60 log = logging.getLogger(__name__)
64 log = logging.getLogger(__name__)
61
65
62
66
63 # this is used to avoid avoid the route lookup overhead in routesmiddleware
67 # this is used to avoid avoid the route lookup overhead in routesmiddleware
64 # for certain routes which won't go to pylons to - eg. static files, debugger
68 # for certain routes which won't go to pylons to - eg. static files, debugger
65 # it is only needed for the pylons migration and can be removed once complete
69 # it is only needed for the pylons migration and can be removed once complete
66 class SkippableRoutesMiddleware(RoutesMiddleware):
70 class SkippableRoutesMiddleware(RoutesMiddleware):
67 """ Routes middleware that allows you to skip prefixes """
71 """ Routes middleware that allows you to skip prefixes """
68
72
69 def __init__(self, *args, **kw):
73 def __init__(self, *args, **kw):
70 self.skip_prefixes = kw.pop('skip_prefixes', [])
74 self.skip_prefixes = kw.pop('skip_prefixes', [])
71 super(SkippableRoutesMiddleware, self).__init__(*args, **kw)
75 super(SkippableRoutesMiddleware, self).__init__(*args, **kw)
72
76
73 def __call__(self, environ, start_response):
77 def __call__(self, environ, start_response):
74 for prefix in self.skip_prefixes:
78 for prefix in self.skip_prefixes:
75 if environ['PATH_INFO'].startswith(prefix):
79 if environ['PATH_INFO'].startswith(prefix):
76 # added to avoid the case when a missing /_static route falls
80 # added to avoid the case when a missing /_static route falls
77 # through to pylons and causes an exception as pylons is
81 # through to pylons and causes an exception as pylons is
78 # expecting wsgiorg.routingargs to be set in the environ
82 # expecting wsgiorg.routingargs to be set in the environ
79 # by RoutesMiddleware.
83 # by RoutesMiddleware.
80 if 'wsgiorg.routing_args' not in environ:
84 if 'wsgiorg.routing_args' not in environ:
81 environ['wsgiorg.routing_args'] = (None, {})
85 environ['wsgiorg.routing_args'] = (None, {})
82 return self.app(environ, start_response)
86 return self.app(environ, start_response)
83
87
84 return super(SkippableRoutesMiddleware, self).__call__(
88 return super(SkippableRoutesMiddleware, self).__call__(
85 environ, start_response)
89 environ, start_response)
86
90
87
91
88 def make_app(global_conf, static_files=True, **app_conf):
92 def make_app(global_conf, static_files=True, **app_conf):
89 """Create a Pylons WSGI application and return it
93 """Create a Pylons WSGI application and return it
90
94
91 ``global_conf``
95 ``global_conf``
92 The inherited configuration for this application. Normally from
96 The inherited configuration for this application. Normally from
93 the [DEFAULT] section of the Paste ini file.
97 the [DEFAULT] section of the Paste ini file.
94
98
95 ``app_conf``
99 ``app_conf``
96 The application's local configuration. Normally specified in
100 The application's local configuration. Normally specified in
97 the [app:<name>] section of the Paste ini file (where <name>
101 the [app:<name>] section of the Paste ini file (where <name>
98 defaults to main).
102 defaults to main).
99
103
100 """
104 """
101 # Apply compatibility patches
105 # Apply compatibility patches
102 patches.kombu_1_5_1_python_2_7_11()
106 patches.kombu_1_5_1_python_2_7_11()
103 patches.inspect_getargspec()
107 patches.inspect_getargspec()
104
108
105 # Configure the Pylons environment
109 # Configure the Pylons environment
106 config = load_environment(global_conf, app_conf)
110 config = load_environment(global_conf, app_conf)
107
111
108 # The Pylons WSGI app
112 # The Pylons WSGI app
109 app = PylonsApp(config=config)
113 app = PylonsApp(config=config)
110 if rhodecode.is_test:
114 if rhodecode.is_test:
111 app = csrf.CSRFDetector(app)
115 app = csrf.CSRFDetector(app)
112
116
113 expected_origin = config.get('expected_origin')
117 expected_origin = config.get('expected_origin')
114 if expected_origin:
118 if expected_origin:
115 # The API can be accessed from other Origins.
119 # The API can be accessed from other Origins.
116 app = csrf.OriginChecker(app, expected_origin,
120 app = csrf.OriginChecker(app, expected_origin,
117 skip_urls=[routes.util.url_for('api')])
121 skip_urls=[routes.util.url_for('api')])
118
122
119 # Establish the Registry for this application
123 # Establish the Registry for this application
120 app = RegistryManager(app)
124 app = RegistryManager(app)
121
125
122 app.config = config
126 app.config = config
123
127
124 return app
128 return app
125
129
126
130
127 def make_pyramid_app(global_config, **settings):
131 def make_pyramid_app(global_config, **settings):
128 """
132 """
129 Constructs the WSGI application based on Pyramid and wraps the Pylons based
133 Constructs the WSGI application based on Pyramid and wraps the Pylons based
130 application.
134 application.
131
135
132 Specials:
136 Specials:
133
137
134 * We migrate from Pylons to Pyramid. While doing this, we keep both
138 * We migrate from Pylons to Pyramid. While doing this, we keep both
135 frameworks functional. This involves moving some WSGI middlewares around
139 frameworks functional. This involves moving some WSGI middlewares around
136 and providing access to some data internals, so that the old code is
140 and providing access to some data internals, so that the old code is
137 still functional.
141 still functional.
138
142
139 * The application can also be integrated like a plugin via the call to
143 * The application can also be integrated like a plugin via the call to
140 `includeme`. This is accompanied with the other utility functions which
144 `includeme`. This is accompanied with the other utility functions which
141 are called. Changing this should be done with great care to not break
145 are called. Changing this should be done with great care to not break
142 cases when these fragments are assembled from another place.
146 cases when these fragments are assembled from another place.
143
147
144 """
148 """
145 # The edition string should be available in pylons too, so we add it here
149 # The edition string should be available in pylons too, so we add it here
146 # before copying the settings.
150 # before copying the settings.
147 settings.setdefault('rhodecode.edition', 'Community Edition')
151 settings.setdefault('rhodecode.edition', 'Community Edition')
148
152
149 # As long as our Pylons application does expect "unprepared" settings, make
153 # As long as our Pylons application does expect "unprepared" settings, make
150 # sure that we keep an unmodified copy. This avoids unintentional change of
154 # sure that we keep an unmodified copy. This avoids unintentional change of
151 # behavior in the old application.
155 # behavior in the old application.
152 settings_pylons = settings.copy()
156 settings_pylons = settings.copy()
153
157
154 sanitize_settings_and_apply_defaults(settings)
158 sanitize_settings_and_apply_defaults(settings)
155 config = Configurator(settings=settings)
159 config = Configurator(settings=settings)
156 add_pylons_compat_data(config.registry, global_config, settings_pylons)
160 add_pylons_compat_data(config.registry, global_config, settings_pylons)
157
161
158 load_pyramid_environment(global_config, settings)
162 load_pyramid_environment(global_config, settings)
159
163
160 includeme_first(config)
164 includeme_first(config)
161 includeme(config)
165 includeme(config)
162 pyramid_app = config.make_wsgi_app()
166 pyramid_app = config.make_wsgi_app()
163 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
167 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
164 pyramid_app.config = config
168 pyramid_app.config = config
165
169
166 # creating the app uses a connection - return it after we are done
170 # creating the app uses a connection - return it after we are done
167 meta.Session.remove()
171 meta.Session.remove()
168
172
169 return pyramid_app
173 return pyramid_app
170
174
171
175
172 def make_not_found_view(config):
176 def make_not_found_view(config):
173 """
177 """
174 This creates the view which should be registered as not-found-view to
178 This creates the view which should be registered as not-found-view to
175 pyramid. Basically it contains of the old pylons app, converted to a view.
179 pyramid. Basically it contains of the old pylons app, converted to a view.
176 Additionally it is wrapped by some other middlewares.
180 Additionally it is wrapped by some other middlewares.
177 """
181 """
178 settings = config.registry.settings
182 settings = config.registry.settings
179 vcs_server_enabled = settings['vcs.server.enable']
183 vcs_server_enabled = settings['vcs.server.enable']
180
184
181 # Make pylons app from unprepared settings.
185 # Make pylons app from unprepared settings.
182 pylons_app = make_app(
186 pylons_app = make_app(
183 config.registry._pylons_compat_global_config,
187 config.registry._pylons_compat_global_config,
184 **config.registry._pylons_compat_settings)
188 **config.registry._pylons_compat_settings)
185 config.registry._pylons_compat_config = pylons_app.config
189 config.registry._pylons_compat_config = pylons_app.config
186
190
187 # Appenlight monitoring.
191 # Appenlight monitoring.
188 pylons_app, appenlight_client = wrap_in_appenlight_if_enabled(
192 pylons_app, appenlight_client = wrap_in_appenlight_if_enabled(
189 pylons_app, settings)
193 pylons_app, settings)
190
194
191 # The pylons app is executed inside of the pyramid 404 exception handler.
195 # The pylons app is executed inside of the pyramid 404 exception handler.
192 # Exceptions which are raised inside of it are not handled by pyramid
196 # Exceptions which are raised inside of it are not handled by pyramid
193 # again. Therefore we add a middleware that invokes the error handler in
197 # again. Therefore we add a middleware that invokes the error handler in
194 # case of an exception or error response. This way we return proper error
198 # case of an exception or error response. This way we return proper error
195 # HTML pages in case of an error.
199 # HTML pages in case of an error.
196 reraise = (settings.get('debugtoolbar.enabled', False) or
200 reraise = (settings.get('debugtoolbar.enabled', False) or
197 rhodecode.disable_error_handler)
201 rhodecode.disable_error_handler)
198 pylons_app = PylonsErrorHandlingMiddleware(
202 pylons_app = PylonsErrorHandlingMiddleware(
199 pylons_app, error_handler, reraise)
203 pylons_app, error_handler, reraise)
200
204
201 # The VCSMiddleware shall operate like a fallback if pyramid doesn't find a
205 # The VCSMiddleware shall operate like a fallback if pyramid doesn't find a
202 # view to handle the request. Therefore it is wrapped around the pylons
206 # view to handle the request. Therefore it is wrapped around the pylons
203 # app. It has to be outside of the error handling otherwise error responses
207 # app. It has to be outside of the error handling otherwise error responses
204 # from the vcsserver are converted to HTML error pages. This confuses the
208 # from the vcsserver are converted to HTML error pages. This confuses the
205 # command line tools and the user won't get a meaningful error message.
209 # command line tools and the user won't get a meaningful error message.
206 if vcs_server_enabled:
210 if vcs_server_enabled:
207 pylons_app = VCSMiddleware(
211 pylons_app = VCSMiddleware(
208 pylons_app, settings, appenlight_client, registry=config.registry)
212 pylons_app, settings, appenlight_client, registry=config.registry)
209
213
210 # Convert WSGI app to pyramid view and return it.
214 # Convert WSGI app to pyramid view and return it.
211 return wsgiapp(pylons_app)
215 return wsgiapp(pylons_app)
212
216
213
217
214 def add_pylons_compat_data(registry, global_config, settings):
218 def add_pylons_compat_data(registry, global_config, settings):
215 """
219 """
216 Attach data to the registry to support the Pylons integration.
220 Attach data to the registry to support the Pylons integration.
217 """
221 """
218 registry._pylons_compat_global_config = global_config
222 registry._pylons_compat_global_config = global_config
219 registry._pylons_compat_settings = settings
223 registry._pylons_compat_settings = settings
220
224
221
225
222 def error_handler(exception, request):
226 def error_handler(exception, request):
223 import rhodecode
227 import rhodecode
224 from rhodecode.lib.utils2 import AttributeDict
225 from rhodecode.lib import helpers
228 from rhodecode.lib import helpers
226
229
227 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
230 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
228
231
229 base_response = HTTPInternalServerError()
232 base_response = HTTPInternalServerError()
230 # prefer original exception for the response since it may have headers set
233 # prefer original exception for the response since it may have headers set
231 if isinstance(exception, HTTPException):
234 if isinstance(exception, HTTPException):
232 base_response = exception
235 base_response = exception
236 elif isinstance(exception, VCSCommunicationError):
237 base_response = VCSServerUnavailable()
233
238
234 def is_http_error(response):
239 def is_http_error(response):
235 # error which should have traceback
240 # error which should have traceback
236 return response.status_code > 499
241 return response.status_code > 499
237
242
238 if is_http_error(base_response):
243 if is_http_error(base_response):
239 log.exception(
244 log.exception(
240 'error occurred handling this request for path: %s', request.path)
245 'error occurred handling this request for path: %s', request.path)
241
246
242 c = AttributeDict()
247 c = AttributeDict()
243 c.error_message = base_response.status
248 c.error_message = base_response.status
244 c.error_explanation = base_response.explanation or str(base_response)
249 c.error_explanation = base_response.explanation or str(base_response)
245 c.visual = AttributeDict()
250 c.visual = AttributeDict()
246
251
247 c.visual.rhodecode_support_url = (
252 c.visual.rhodecode_support_url = (
248 request.registry.settings.get('rhodecode_support_url') or
253 request.registry.settings.get('rhodecode_support_url') or
249 request.route_url('rhodecode_support')
254 request.route_url('rhodecode_support')
250 )
255 )
251 c.redirect_time = 0
256 c.redirect_time = 0
252 c.rhodecode_name = rhodecode_title
257 c.rhodecode_name = rhodecode_title
253 if not c.rhodecode_name:
258 if not c.rhodecode_name:
254 c.rhodecode_name = 'Rhodecode'
259 c.rhodecode_name = 'Rhodecode'
255
260
256 c.causes = []
261 c.causes = []
257 if hasattr(base_response, 'causes'):
262 if hasattr(base_response, 'causes'):
258 c.causes = base_response.causes
263 c.causes = base_response.causes
259 c.messages = helpers.flash.pop_messages()
264 c.messages = helpers.flash.pop_messages()
265
260 response = render_to_response(
266 response = render_to_response(
261 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
267 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
262 response=base_response)
268 response=base_response)
263
269
264 return response
270 return response
265
271
266
272
267 def includeme(config):
273 def includeme(config):
268 settings = config.registry.settings
274 settings = config.registry.settings
269
275
270 # plugin information
276 # plugin information
271 config.registry.rhodecode_plugins = OrderedDict()
277 config.registry.rhodecode_plugins = OrderedDict()
272
278
273 config.add_directive(
279 config.add_directive(
274 'register_rhodecode_plugin', register_rhodecode_plugin)
280 'register_rhodecode_plugin', register_rhodecode_plugin)
275
281
276 if asbool(settings.get('appenlight', 'false')):
282 if asbool(settings.get('appenlight', 'false')):
277 config.include('appenlight_client.ext.pyramid_tween')
283 config.include('appenlight_client.ext.pyramid_tween')
278
284
279 # Includes which are required. The application would fail without them.
285 # Includes which are required. The application would fail without them.
280 config.include('pyramid_mako')
286 config.include('pyramid_mako')
281 config.include('pyramid_beaker')
287 config.include('pyramid_beaker')
282
288
283 config.include('rhodecode.authentication')
289 config.include('rhodecode.authentication')
284 config.include('rhodecode.integrations')
290 config.include('rhodecode.integrations')
285
291
286 # apps
292 # apps
287 config.include('rhodecode.apps._base')
293 config.include('rhodecode.apps._base')
288 config.include('rhodecode.apps.ops')
294 config.include('rhodecode.apps.ops')
289
295
290 config.include('rhodecode.apps.admin')
296 config.include('rhodecode.apps.admin')
291 config.include('rhodecode.apps.channelstream')
297 config.include('rhodecode.apps.channelstream')
292 config.include('rhodecode.apps.login')
298 config.include('rhodecode.apps.login')
293 config.include('rhodecode.apps.home')
299 config.include('rhodecode.apps.home')
294 config.include('rhodecode.apps.repository')
300 config.include('rhodecode.apps.repository')
295 config.include('rhodecode.apps.repo_group')
301 config.include('rhodecode.apps.repo_group')
296 config.include('rhodecode.apps.search')
302 config.include('rhodecode.apps.search')
297 config.include('rhodecode.apps.user_profile')
303 config.include('rhodecode.apps.user_profile')
298 config.include('rhodecode.apps.my_account')
304 config.include('rhodecode.apps.my_account')
299 config.include('rhodecode.apps.svn_support')
305 config.include('rhodecode.apps.svn_support')
300
306
301 config.include('rhodecode.tweens')
307 config.include('rhodecode.tweens')
302 config.include('rhodecode.api')
308 config.include('rhodecode.api')
303
309
304 config.add_route(
310 config.add_route(
305 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
311 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
306
312
307 config.add_translation_dirs('rhodecode:i18n/')
313 config.add_translation_dirs('rhodecode:i18n/')
308 settings['default_locale_name'] = settings.get('lang', 'en')
314 settings['default_locale_name'] = settings.get('lang', 'en')
309
315
310 # Add subscribers.
316 # Add subscribers.
311 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
317 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
312 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
318 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
313 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
319 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
314
320
315 # Set the authorization policy.
321 # Set the authorization policy.
316 authz_policy = ACLAuthorizationPolicy()
322 authz_policy = ACLAuthorizationPolicy()
317 config.set_authorization_policy(authz_policy)
323 config.set_authorization_policy(authz_policy)
318
324
319 # Set the default renderer for HTML templates to mako.
325 # Set the default renderer for HTML templates to mako.
320 config.add_mako_renderer('.html')
326 config.add_mako_renderer('.html')
321
327
322 config.add_renderer(
328 config.add_renderer(
323 name='json_ext',
329 name='json_ext',
324 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
330 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
325
331
326 # include RhodeCode plugins
332 # include RhodeCode plugins
327 includes = aslist(settings.get('rhodecode.includes', []))
333 includes = aslist(settings.get('rhodecode.includes', []))
328 for inc in includes:
334 for inc in includes:
329 config.include(inc)
335 config.include(inc)
330
336
331 # This is the glue which allows us to migrate in chunks. By registering the
337 # This is the glue which allows us to migrate in chunks. By registering the
332 # pylons based application as the "Not Found" view in Pyramid, we will
338 # pylons based application as the "Not Found" view in Pyramid, we will
333 # fallback to the old application each time the new one does not yet know
339 # fallback to the old application each time the new one does not yet know
334 # how to handle a request.
340 # how to handle a request.
335 config.add_notfound_view(make_not_found_view(config))
341 config.add_notfound_view(make_not_found_view(config))
336
342
337 if not settings.get('debugtoolbar.enabled', False):
343 if not settings.get('debugtoolbar.enabled', False):
338 # if no toolbar, then any exception gets caught and rendered
344 # if no toolbar, then any exception gets caught and rendered
339 config.add_view(error_handler, context=Exception)
345 config.add_view(error_handler, context=Exception)
340
346
341 config.add_view(error_handler, context=HTTPError)
347 config.add_view(error_handler, context=HTTPError)
342
348
343
349
344 def includeme_first(config):
350 def includeme_first(config):
345 # redirect automatic browser favicon.ico requests to correct place
351 # redirect automatic browser favicon.ico requests to correct place
346 def favicon_redirect(context, request):
352 def favicon_redirect(context, request):
347 return HTTPFound(
353 return HTTPFound(
348 request.static_path('rhodecode:public/images/favicon.ico'))
354 request.static_path('rhodecode:public/images/favicon.ico'))
349
355
350 config.add_view(favicon_redirect, route_name='favicon')
356 config.add_view(favicon_redirect, route_name='favicon')
351 config.add_route('favicon', '/favicon.ico')
357 config.add_route('favicon', '/favicon.ico')
352
358
353 def robots_redirect(context, request):
359 def robots_redirect(context, request):
354 return HTTPFound(
360 return HTTPFound(
355 request.static_path('rhodecode:public/robots.txt'))
361 request.static_path('rhodecode:public/robots.txt'))
356
362
357 config.add_view(robots_redirect, route_name='robots')
363 config.add_view(robots_redirect, route_name='robots')
358 config.add_route('robots', '/robots.txt')
364 config.add_route('robots', '/robots.txt')
359
365
360 config.add_static_view(
366 config.add_static_view(
361 '_static/deform', 'deform:static')
367 '_static/deform', 'deform:static')
362 config.add_static_view(
368 config.add_static_view(
363 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
369 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
364
370
365
371
366 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
372 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
367 """
373 """
368 Apply outer WSGI middlewares around the application.
374 Apply outer WSGI middlewares around the application.
369
375
370 Part of this has been moved up from the Pylons layer, so that the
376 Part of this has been moved up from the Pylons layer, so that the
371 data is also available if old Pylons code is hit through an already ported
377 data is also available if old Pylons code is hit through an already ported
372 view.
378 view.
373 """
379 """
374 settings = config.registry.settings
380 settings = config.registry.settings
375
381
376 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
382 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
377 pyramid_app = HttpsFixup(pyramid_app, settings)
383 pyramid_app = HttpsFixup(pyramid_app, settings)
378
384
379 # Add RoutesMiddleware to support the pylons compatibility tween during
385 # Add RoutesMiddleware to support the pylons compatibility tween during
380 # migration to pyramid.
386 # migration to pyramid.
381 pyramid_app = SkippableRoutesMiddleware(
387 pyramid_app = SkippableRoutesMiddleware(
382 pyramid_app, config.registry._pylons_compat_config['routes.map'],
388 pyramid_app, config.registry._pylons_compat_config['routes.map'],
383 skip_prefixes=(STATIC_FILE_PREFIX, '/_debug_toolbar'))
389 skip_prefixes=(STATIC_FILE_PREFIX, '/_debug_toolbar'))
384
390
385 pyramid_app, _ = wrap_in_appenlight_if_enabled(pyramid_app, settings)
391 pyramid_app, _ = wrap_in_appenlight_if_enabled(pyramid_app, settings)
386
392
387 if settings['gzip_responses']:
393 if settings['gzip_responses']:
388 pyramid_app = make_gzip_middleware(
394 pyramid_app = make_gzip_middleware(
389 pyramid_app, settings, compress_level=1)
395 pyramid_app, settings, compress_level=1)
390
396
391 # this should be the outer most middleware in the wsgi stack since
397 # this should be the outer most middleware in the wsgi stack since
392 # middleware like Routes make database calls
398 # middleware like Routes make database calls
393 def pyramid_app_with_cleanup(environ, start_response):
399 def pyramid_app_with_cleanup(environ, start_response):
394 try:
400 try:
395 return pyramid_app(environ, start_response)
401 return pyramid_app(environ, start_response)
396 finally:
402 finally:
397 # Dispose current database session and rollback uncommitted
403 # Dispose current database session and rollback uncommitted
398 # transactions.
404 # transactions.
399 meta.Session.remove()
405 meta.Session.remove()
400
406
401 # In a single threaded mode server, on non sqlite db we should have
407 # In a single threaded mode server, on non sqlite db we should have
402 # '0 Current Checked out connections' at the end of a request,
408 # '0 Current Checked out connections' at the end of a request,
403 # if not, then something, somewhere is leaving a connection open
409 # if not, then something, somewhere is leaving a connection open
404 pool = meta.Base.metadata.bind.engine.pool
410 pool = meta.Base.metadata.bind.engine.pool
405 log.debug('sa pool status: %s', pool.status())
411 log.debug('sa pool status: %s', pool.status())
406
412
407
408 return pyramid_app_with_cleanup
413 return pyramid_app_with_cleanup
409
414
410
415
411 def sanitize_settings_and_apply_defaults(settings):
416 def sanitize_settings_and_apply_defaults(settings):
412 """
417 """
413 Applies settings defaults and does all type conversion.
418 Applies settings defaults and does all type conversion.
414
419
415 We would move all settings parsing and preparation into this place, so that
420 We would move all settings parsing and preparation into this place, so that
416 we have only one place left which deals with this part. The remaining parts
421 we have only one place left which deals with this part. The remaining parts
417 of the application would start to rely fully on well prepared settings.
422 of the application would start to rely fully on well prepared settings.
418
423
419 This piece would later be split up per topic to avoid a big fat monster
424 This piece would later be split up per topic to avoid a big fat monster
420 function.
425 function.
421 """
426 """
422
427
423 # Pyramid's mako renderer has to search in the templates folder so that the
428 # Pyramid's mako renderer has to search in the templates folder so that the
424 # old templates still work. Ported and new templates are expected to use
429 # old templates still work. Ported and new templates are expected to use
425 # real asset specifications for the includes.
430 # real asset specifications for the includes.
426 mako_directories = settings.setdefault('mako.directories', [
431 mako_directories = settings.setdefault('mako.directories', [
427 # Base templates of the original Pylons application
432 # Base templates of the original Pylons application
428 'rhodecode:templates',
433 'rhodecode:templates',
429 ])
434 ])
430 log.debug(
435 log.debug(
431 "Using the following Mako template directories: %s",
436 "Using the following Mako template directories: %s",
432 mako_directories)
437 mako_directories)
433
438
434 # Default includes, possible to change as a user
439 # Default includes, possible to change as a user
435 pyramid_includes = settings.setdefault('pyramid.includes', [
440 pyramid_includes = settings.setdefault('pyramid.includes', [
436 'rhodecode.lib.middleware.request_wrapper',
441 'rhodecode.lib.middleware.request_wrapper',
437 ])
442 ])
438 log.debug(
443 log.debug(
439 "Using the following pyramid.includes: %s",
444 "Using the following pyramid.includes: %s",
440 pyramid_includes)
445 pyramid_includes)
441
446
442 # TODO: johbo: Re-think this, usually the call to config.include
447 # TODO: johbo: Re-think this, usually the call to config.include
443 # should allow to pass in a prefix.
448 # should allow to pass in a prefix.
444 settings.setdefault('rhodecode.api.url', '/_admin/api')
449 settings.setdefault('rhodecode.api.url', '/_admin/api')
445
450
446 # Sanitize generic settings.
451 # Sanitize generic settings.
447 _list_setting(settings, 'default_encoding', 'UTF-8')
452 _list_setting(settings, 'default_encoding', 'UTF-8')
448 _bool_setting(settings, 'is_test', 'false')
453 _bool_setting(settings, 'is_test', 'false')
449 _bool_setting(settings, 'gzip_responses', 'false')
454 _bool_setting(settings, 'gzip_responses', 'false')
450
455
451 # Call split out functions that sanitize settings for each topic.
456 # Call split out functions that sanitize settings for each topic.
452 _sanitize_appenlight_settings(settings)
457 _sanitize_appenlight_settings(settings)
453 _sanitize_vcs_settings(settings)
458 _sanitize_vcs_settings(settings)
454
459
455 return settings
460 return settings
456
461
457
462
458 def _sanitize_appenlight_settings(settings):
463 def _sanitize_appenlight_settings(settings):
459 _bool_setting(settings, 'appenlight', 'false')
464 _bool_setting(settings, 'appenlight', 'false')
460
465
461
466
462 def _sanitize_vcs_settings(settings):
467 def _sanitize_vcs_settings(settings):
463 """
468 """
464 Applies settings defaults and does type conversion for all VCS related
469 Applies settings defaults and does type conversion for all VCS related
465 settings.
470 settings.
466 """
471 """
467 _string_setting(settings, 'vcs.svn.compatible_version', '')
472 _string_setting(settings, 'vcs.svn.compatible_version', '')
468 _string_setting(settings, 'git_rev_filter', '--all')
473 _string_setting(settings, 'git_rev_filter', '--all')
469 _string_setting(settings, 'vcs.hooks.protocol', 'http')
474 _string_setting(settings, 'vcs.hooks.protocol', 'http')
470 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
475 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
471 _string_setting(settings, 'vcs.server', '')
476 _string_setting(settings, 'vcs.server', '')
472 _string_setting(settings, 'vcs.server.log_level', 'debug')
477 _string_setting(settings, 'vcs.server.log_level', 'debug')
473 _string_setting(settings, 'vcs.server.protocol', 'http')
478 _string_setting(settings, 'vcs.server.protocol', 'http')
474 _bool_setting(settings, 'startup.import_repos', 'false')
479 _bool_setting(settings, 'startup.import_repos', 'false')
475 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
480 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
476 _bool_setting(settings, 'vcs.server.enable', 'true')
481 _bool_setting(settings, 'vcs.server.enable', 'true')
477 _bool_setting(settings, 'vcs.start_server', 'false')
482 _bool_setting(settings, 'vcs.start_server', 'false')
478 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
483 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
479 _int_setting(settings, 'vcs.connection_timeout', 3600)
484 _int_setting(settings, 'vcs.connection_timeout', 3600)
480
485
481 # Support legacy values of vcs.scm_app_implementation. Legacy
486 # Support legacy values of vcs.scm_app_implementation. Legacy
482 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
487 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
483 # which is now mapped to 'http'.
488 # which is now mapped to 'http'.
484 scm_app_impl = settings['vcs.scm_app_implementation']
489 scm_app_impl = settings['vcs.scm_app_implementation']
485 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
490 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
486 settings['vcs.scm_app_implementation'] = 'http'
491 settings['vcs.scm_app_implementation'] = 'http'
487
492
488
493
489 def _int_setting(settings, name, default):
494 def _int_setting(settings, name, default):
490 settings[name] = int(settings.get(name, default))
495 settings[name] = int(settings.get(name, default))
491
496
492
497
493 def _bool_setting(settings, name, default):
498 def _bool_setting(settings, name, default):
494 input = settings.get(name, default)
499 input = settings.get(name, default)
495 if isinstance(input, unicode):
500 if isinstance(input, unicode):
496 input = input.encode('utf8')
501 input = input.encode('utf8')
497 settings[name] = asbool(input)
502 settings[name] = asbool(input)
498
503
499
504
500 def _list_setting(settings, name, default):
505 def _list_setting(settings, name, default):
501 raw_value = settings.get(name, default)
506 raw_value = settings.get(name, default)
502
507
503 old_separator = ','
508 old_separator = ','
504 if old_separator in raw_value:
509 if old_separator in raw_value:
505 # If we get a comma separated list, pass it to our own function.
510 # If we get a comma separated list, pass it to our own function.
506 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
511 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
507 else:
512 else:
508 # Otherwise we assume it uses pyramids space/newline separation.
513 # Otherwise we assume it uses pyramids space/newline separation.
509 settings[name] = aslist(raw_value)
514 settings[name] = aslist(raw_value)
510
515
511
516
512 def _string_setting(settings, name, default, lower=True):
517 def _string_setting(settings, name, default, lower=True):
513 value = settings.get(name, default)
518 value = settings.get(name, default)
514 if lower:
519 if lower:
515 value = value.lower()
520 value = value.lower()
516 settings[name] = value
521 settings[name] = value
@@ -1,1017 +1,982 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Routes configuration
22 Routes configuration
23
23
24 The more specific and detailed routes should be defined first so they
24 The more specific and detailed routes should be defined first so they
25 may take precedent over the more generic routes. For more information
25 may take precedent over the more generic routes. For more information
26 refer to the routes manual at http://routes.groovie.org/docs/
26 refer to the routes manual at http://routes.groovie.org/docs/
27
27
28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
29 and _route_name variable which uses some of stored naming here to do redirects.
29 and _route_name variable which uses some of stored naming here to do redirects.
30 """
30 """
31 import os
31 import os
32 import re
32 import re
33 from routes import Mapper
33 from routes import Mapper
34
34
35 # prefix for non repository related links needs to be prefixed with `/`
35 # prefix for non repository related links needs to be prefixed with `/`
36 ADMIN_PREFIX = '/_admin'
36 ADMIN_PREFIX = '/_admin'
37 STATIC_FILE_PREFIX = '/_static'
37 STATIC_FILE_PREFIX = '/_static'
38
38
39 # Default requirements for URL parts
39 # Default requirements for URL parts
40 URL_NAME_REQUIREMENTS = {
40 URL_NAME_REQUIREMENTS = {
41 # group name can have a slash in them, but they must not end with a slash
41 # group name can have a slash in them, but they must not end with a slash
42 'group_name': r'.*?[^/]',
42 'group_name': r'.*?[^/]',
43 'repo_group_name': r'.*?[^/]',
43 'repo_group_name': r'.*?[^/]',
44 # repo names can have a slash in them, but they must not end with a slash
44 # repo names can have a slash in them, but they must not end with a slash
45 'repo_name': r'.*?[^/]',
45 'repo_name': r'.*?[^/]',
46 # file path eats up everything at the end
46 # file path eats up everything at the end
47 'f_path': r'.*',
47 'f_path': r'.*',
48 # reference types
48 # reference types
49 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
49 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
50 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
50 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
51 }
51 }
52
52
53
53
54 def add_route_requirements(route_path, requirements):
54 def add_route_requirements(route_path, requirements):
55 """
55 """
56 Adds regex requirements to pyramid routes using a mapping dict
56 Adds regex requirements to pyramid routes using a mapping dict
57
57
58 >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'})
58 >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'})
59 '/{action}/{id:\d+}'
59 '/{action}/{id:\d+}'
60
60
61 """
61 """
62 for key, regex in requirements.items():
62 for key, regex in requirements.items():
63 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
63 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
64 return route_path
64 return route_path
65
65
66
66
67 class JSRoutesMapper(Mapper):
67 class JSRoutesMapper(Mapper):
68 """
68 """
69 Wrapper for routes.Mapper to make pyroutes compatible url definitions
69 Wrapper for routes.Mapper to make pyroutes compatible url definitions
70 """
70 """
71 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
71 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
72 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
72 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
73 def __init__(self, *args, **kw):
73 def __init__(self, *args, **kw):
74 super(JSRoutesMapper, self).__init__(*args, **kw)
74 super(JSRoutesMapper, self).__init__(*args, **kw)
75 self._jsroutes = []
75 self._jsroutes = []
76
76
77 def connect(self, *args, **kw):
77 def connect(self, *args, **kw):
78 """
78 """
79 Wrapper for connect to take an extra argument jsroute=True
79 Wrapper for connect to take an extra argument jsroute=True
80
80
81 :param jsroute: boolean, if True will add the route to the pyroutes list
81 :param jsroute: boolean, if True will add the route to the pyroutes list
82 """
82 """
83 if kw.pop('jsroute', False):
83 if kw.pop('jsroute', False):
84 if not self._named_route_regex.match(args[0]):
84 if not self._named_route_regex.match(args[0]):
85 raise Exception('only named routes can be added to pyroutes')
85 raise Exception('only named routes can be added to pyroutes')
86 self._jsroutes.append(args[0])
86 self._jsroutes.append(args[0])
87
87
88 super(JSRoutesMapper, self).connect(*args, **kw)
88 super(JSRoutesMapper, self).connect(*args, **kw)
89
89
90 def _extract_route_information(self, route):
90 def _extract_route_information(self, route):
91 """
91 """
92 Convert a route into tuple(name, path, args), eg:
92 Convert a route into tuple(name, path, args), eg:
93 ('show_user', '/profile/%(username)s', ['username'])
93 ('show_user', '/profile/%(username)s', ['username'])
94 """
94 """
95 routepath = route.routepath
95 routepath = route.routepath
96 def replace(matchobj):
96 def replace(matchobj):
97 if matchobj.group(1):
97 if matchobj.group(1):
98 return "%%(%s)s" % matchobj.group(1).split(':')[0]
98 return "%%(%s)s" % matchobj.group(1).split(':')[0]
99 else:
99 else:
100 return "%%(%s)s" % matchobj.group(2)
100 return "%%(%s)s" % matchobj.group(2)
101
101
102 routepath = self._argument_prog.sub(replace, routepath)
102 routepath = self._argument_prog.sub(replace, routepath)
103 return (
103 return (
104 route.name,
104 route.name,
105 routepath,
105 routepath,
106 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
106 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
107 for arg in self._argument_prog.findall(route.routepath)]
107 for arg in self._argument_prog.findall(route.routepath)]
108 )
108 )
109
109
110 def jsroutes(self):
110 def jsroutes(self):
111 """
111 """
112 Return a list of pyroutes.js compatible routes
112 Return a list of pyroutes.js compatible routes
113 """
113 """
114 for route_name in self._jsroutes:
114 for route_name in self._jsroutes:
115 yield self._extract_route_information(self._routenames[route_name])
115 yield self._extract_route_information(self._routenames[route_name])
116
116
117
117
118 def make_map(config):
118 def make_map(config):
119 """Create, configure and return the routes Mapper"""
119 """Create, configure and return the routes Mapper"""
120 rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'],
120 rmap = JSRoutesMapper(
121 directory=config['pylons.paths']['controllers'],
121 always_scan=config['debug'])
122 always_scan=config['debug'])
122 rmap.minimization = False
123 rmap.minimization = False
123 rmap.explicit = False
124 rmap.explicit = False
124
125
125 from rhodecode.lib.utils2 import str2bool
126 from rhodecode.lib.utils2 import str2bool
126 from rhodecode.model import repo, repo_group
127 from rhodecode.model import repo, repo_group
127
128
128 def check_repo(environ, match_dict):
129 def check_repo(environ, match_dict):
129 """
130 """
130 check for valid repository for proper 404 handling
131 check for valid repository for proper 404 handling
131
132
132 :param environ:
133 :param environ:
133 :param match_dict:
134 :param match_dict:
134 """
135 """
135 repo_name = match_dict.get('repo_name')
136 repo_name = match_dict.get('repo_name')
136
137
137 if match_dict.get('f_path'):
138 if match_dict.get('f_path'):
138 # fix for multiple initial slashes that causes errors
139 # fix for multiple initial slashes that causes errors
139 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
140 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
140 repo_model = repo.RepoModel()
141 repo_model = repo.RepoModel()
141 by_name_match = repo_model.get_by_repo_name(repo_name)
142 by_name_match = repo_model.get_by_repo_name(repo_name)
142 # if we match quickly from database, short circuit the operation,
143 # if we match quickly from database, short circuit the operation,
143 # and validate repo based on the type.
144 # and validate repo based on the type.
144 if by_name_match:
145 if by_name_match:
145 return True
146 return True
146
147
147 by_id_match = repo_model.get_repo_by_id(repo_name)
148 by_id_match = repo_model.get_repo_by_id(repo_name)
148 if by_id_match:
149 if by_id_match:
149 repo_name = by_id_match.repo_name
150 repo_name = by_id_match.repo_name
150 match_dict['repo_name'] = repo_name
151 match_dict['repo_name'] = repo_name
151 return True
152 return True
152
153
153 return False
154 return False
154
155
155 def check_group(environ, match_dict):
156 def check_group(environ, match_dict):
156 """
157 """
157 check for valid repository group path for proper 404 handling
158 check for valid repository group path for proper 404 handling
158
159
159 :param environ:
160 :param environ:
160 :param match_dict:
161 :param match_dict:
161 """
162 """
162 repo_group_name = match_dict.get('group_name')
163 repo_group_name = match_dict.get('group_name')
163 repo_group_model = repo_group.RepoGroupModel()
164 repo_group_model = repo_group.RepoGroupModel()
164 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
165 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
165 if by_name_match:
166 if by_name_match:
166 return True
167 return True
167
168
168 return False
169 return False
169
170
170 def check_user_group(environ, match_dict):
171 def check_user_group(environ, match_dict):
171 """
172 """
172 check for valid user group for proper 404 handling
173 check for valid user group for proper 404 handling
173
174
174 :param environ:
175 :param environ:
175 :param match_dict:
176 :param match_dict:
176 """
177 """
177 return True
178 return True
178
179
179 def check_int(environ, match_dict):
180 def check_int(environ, match_dict):
180 return match_dict.get('id').isdigit()
181 return match_dict.get('id').isdigit()
181
182
182
183
183 #==========================================================================
184 #==========================================================================
184 # CUSTOM ROUTES HERE
185 # CUSTOM ROUTES HERE
185 #==========================================================================
186 #==========================================================================
186
187
187 # ping and pylons error test
188 # ping and pylons error test
188 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
189 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
189 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
190 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
190
191
191 # ADMIN REPOSITORY ROUTES
192 # ADMIN REPOSITORY ROUTES
192 with rmap.submapper(path_prefix=ADMIN_PREFIX,
193 with rmap.submapper(path_prefix=ADMIN_PREFIX,
193 controller='admin/repos') as m:
194 controller='admin/repos') as m:
194 m.connect('repos', '/repos',
195 m.connect('repos', '/repos',
195 action='create', conditions={'method': ['POST']})
196 action='create', conditions={'method': ['POST']})
196 m.connect('repos', '/repos',
197 m.connect('repos', '/repos',
197 action='index', conditions={'method': ['GET']})
198 action='index', conditions={'method': ['GET']})
198 m.connect('new_repo', '/create_repository', jsroute=True,
199 m.connect('new_repo', '/create_repository', jsroute=True,
199 action='create_repository', conditions={'method': ['GET']})
200 action='create_repository', conditions={'method': ['GET']})
200 m.connect('delete_repo', '/repos/{repo_name}',
201 m.connect('delete_repo', '/repos/{repo_name}',
201 action='delete', conditions={'method': ['DELETE']},
202 action='delete', conditions={'method': ['DELETE']},
202 requirements=URL_NAME_REQUIREMENTS)
203 requirements=URL_NAME_REQUIREMENTS)
203 m.connect('repo', '/repos/{repo_name}',
204 m.connect('repo', '/repos/{repo_name}',
204 action='show', conditions={'method': ['GET'],
205 action='show', conditions={'method': ['GET'],
205 'function': check_repo},
206 'function': check_repo},
206 requirements=URL_NAME_REQUIREMENTS)
207 requirements=URL_NAME_REQUIREMENTS)
207
208
208 # ADMIN REPOSITORY GROUPS ROUTES
209 # ADMIN REPOSITORY GROUPS ROUTES
209 with rmap.submapper(path_prefix=ADMIN_PREFIX,
210 with rmap.submapper(path_prefix=ADMIN_PREFIX,
210 controller='admin/repo_groups') as m:
211 controller='admin/repo_groups') as m:
211 m.connect('repo_groups', '/repo_groups',
212 m.connect('repo_groups', '/repo_groups',
212 action='create', conditions={'method': ['POST']})
213 action='create', conditions={'method': ['POST']})
213 m.connect('repo_groups', '/repo_groups',
214 m.connect('repo_groups', '/repo_groups',
214 action='index', conditions={'method': ['GET']})
215 action='index', conditions={'method': ['GET']})
215 m.connect('new_repo_group', '/repo_groups/new',
216 m.connect('new_repo_group', '/repo_groups/new',
216 action='new', conditions={'method': ['GET']})
217 action='new', conditions={'method': ['GET']})
217 m.connect('update_repo_group', '/repo_groups/{group_name}',
218 m.connect('update_repo_group', '/repo_groups/{group_name}',
218 action='update', conditions={'method': ['PUT'],
219 action='update', conditions={'method': ['PUT'],
219 'function': check_group},
220 'function': check_group},
220 requirements=URL_NAME_REQUIREMENTS)
221 requirements=URL_NAME_REQUIREMENTS)
221
222
222 # EXTRAS REPO GROUP ROUTES
223 # EXTRAS REPO GROUP ROUTES
223 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
224 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
224 action='edit',
225 action='edit',
225 conditions={'method': ['GET'], 'function': check_group},
226 conditions={'method': ['GET'], 'function': check_group},
226 requirements=URL_NAME_REQUIREMENTS)
227 requirements=URL_NAME_REQUIREMENTS)
227 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
228 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
228 action='edit',
229 action='edit',
229 conditions={'method': ['PUT'], 'function': check_group},
230 conditions={'method': ['PUT'], 'function': check_group},
230 requirements=URL_NAME_REQUIREMENTS)
231 requirements=URL_NAME_REQUIREMENTS)
231
232
232 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
233 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
233 action='edit_repo_group_advanced',
234 action='edit_repo_group_advanced',
234 conditions={'method': ['GET'], 'function': check_group},
235 conditions={'method': ['GET'], 'function': check_group},
235 requirements=URL_NAME_REQUIREMENTS)
236 requirements=URL_NAME_REQUIREMENTS)
236 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
237 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
237 action='edit_repo_group_advanced',
238 action='edit_repo_group_advanced',
238 conditions={'method': ['PUT'], 'function': check_group},
239 conditions={'method': ['PUT'], 'function': check_group},
239 requirements=URL_NAME_REQUIREMENTS)
240 requirements=URL_NAME_REQUIREMENTS)
240
241
241 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
242 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
242 action='edit_repo_group_perms',
243 action='edit_repo_group_perms',
243 conditions={'method': ['GET'], 'function': check_group},
244 conditions={'method': ['GET'], 'function': check_group},
244 requirements=URL_NAME_REQUIREMENTS)
245 requirements=URL_NAME_REQUIREMENTS)
245 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
246 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
246 action='update_perms',
247 action='update_perms',
247 conditions={'method': ['PUT'], 'function': check_group},
248 conditions={'method': ['PUT'], 'function': check_group},
248 requirements=URL_NAME_REQUIREMENTS)
249 requirements=URL_NAME_REQUIREMENTS)
249
250
250 m.connect('delete_repo_group', '/repo_groups/{group_name}',
251 m.connect('delete_repo_group', '/repo_groups/{group_name}',
251 action='delete', conditions={'method': ['DELETE'],
252 action='delete', conditions={'method': ['DELETE'],
252 'function': check_group},
253 'function': check_group},
253 requirements=URL_NAME_REQUIREMENTS)
254 requirements=URL_NAME_REQUIREMENTS)
254
255
255 # ADMIN USER ROUTES
256 # ADMIN USER ROUTES
256 with rmap.submapper(path_prefix=ADMIN_PREFIX,
257 with rmap.submapper(path_prefix=ADMIN_PREFIX,
257 controller='admin/users') as m:
258 controller='admin/users') as m:
258 m.connect('users', '/users',
259 m.connect('users', '/users',
259 action='create', conditions={'method': ['POST']})
260 action='create', conditions={'method': ['POST']})
260 m.connect('new_user', '/users/new',
261 m.connect('new_user', '/users/new',
261 action='new', conditions={'method': ['GET']})
262 action='new', conditions={'method': ['GET']})
262 m.connect('update_user', '/users/{user_id}',
263 m.connect('update_user', '/users/{user_id}',
263 action='update', conditions={'method': ['PUT']})
264 action='update', conditions={'method': ['PUT']})
264 m.connect('delete_user', '/users/{user_id}',
265 m.connect('delete_user', '/users/{user_id}',
265 action='delete', conditions={'method': ['DELETE']})
266 action='delete', conditions={'method': ['DELETE']})
266 m.connect('edit_user', '/users/{user_id}/edit',
267 m.connect('edit_user', '/users/{user_id}/edit',
267 action='edit', conditions={'method': ['GET']}, jsroute=True)
268 action='edit', conditions={'method': ['GET']}, jsroute=True)
268 m.connect('user', '/users/{user_id}',
269 m.connect('user', '/users/{user_id}',
269 action='show', conditions={'method': ['GET']})
270 action='show', conditions={'method': ['GET']})
270 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
271 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
271 action='reset_password', conditions={'method': ['POST']})
272 action='reset_password', conditions={'method': ['POST']})
272 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
273 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
273 action='create_personal_repo_group', conditions={'method': ['POST']})
274 action='create_personal_repo_group', conditions={'method': ['POST']})
274
275
275 # EXTRAS USER ROUTES
276 # EXTRAS USER ROUTES
276 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
277 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
277 action='edit_advanced', conditions={'method': ['GET']})
278 action='edit_advanced', conditions={'method': ['GET']})
278 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
279 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
279 action='update_advanced', conditions={'method': ['PUT']})
280 action='update_advanced', conditions={'method': ['PUT']})
280
281
281 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
282 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
282 action='edit_global_perms', conditions={'method': ['GET']})
283 action='edit_global_perms', conditions={'method': ['GET']})
283 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
284 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
284 action='update_global_perms', conditions={'method': ['PUT']})
285 action='update_global_perms', conditions={'method': ['PUT']})
285
286
286 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
287 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
287 action='edit_perms_summary', conditions={'method': ['GET']})
288 action='edit_perms_summary', conditions={'method': ['GET']})
288
289
289 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
290 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
290 action='edit_emails', conditions={'method': ['GET']})
291 action='edit_emails', conditions={'method': ['GET']})
291 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
292 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
292 action='add_email', conditions={'method': ['PUT']})
293 action='add_email', conditions={'method': ['PUT']})
293 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
294 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
294 action='delete_email', conditions={'method': ['DELETE']})
295 action='delete_email', conditions={'method': ['DELETE']})
295
296
296 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
297 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
297 action='edit_ips', conditions={'method': ['GET']})
298 action='edit_ips', conditions={'method': ['GET']})
298 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
299 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
299 action='add_ip', conditions={'method': ['PUT']})
300 action='add_ip', conditions={'method': ['PUT']})
300 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
301 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
301 action='delete_ip', conditions={'method': ['DELETE']})
302 action='delete_ip', conditions={'method': ['DELETE']})
302
303
303 # ADMIN USER GROUPS REST ROUTES
304 # ADMIN USER GROUPS REST ROUTES
304 with rmap.submapper(path_prefix=ADMIN_PREFIX,
305 with rmap.submapper(path_prefix=ADMIN_PREFIX,
305 controller='admin/user_groups') as m:
306 controller='admin/user_groups') as m:
306 m.connect('users_groups', '/user_groups',
307 m.connect('users_groups', '/user_groups',
307 action='create', conditions={'method': ['POST']})
308 action='create', conditions={'method': ['POST']})
308 m.connect('users_groups', '/user_groups',
309 m.connect('users_groups', '/user_groups',
309 action='index', conditions={'method': ['GET']})
310 action='index', conditions={'method': ['GET']})
310 m.connect('new_users_group', '/user_groups/new',
311 m.connect('new_users_group', '/user_groups/new',
311 action='new', conditions={'method': ['GET']})
312 action='new', conditions={'method': ['GET']})
312 m.connect('update_users_group', '/user_groups/{user_group_id}',
313 m.connect('update_users_group', '/user_groups/{user_group_id}',
313 action='update', conditions={'method': ['PUT']})
314 action='update', conditions={'method': ['PUT']})
314 m.connect('delete_users_group', '/user_groups/{user_group_id}',
315 m.connect('delete_users_group', '/user_groups/{user_group_id}',
315 action='delete', conditions={'method': ['DELETE']})
316 action='delete', conditions={'method': ['DELETE']})
316 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
317 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
317 action='edit', conditions={'method': ['GET']},
318 action='edit', conditions={'method': ['GET']},
318 function=check_user_group)
319 function=check_user_group)
319
320
320 # EXTRAS USER GROUP ROUTES
321 # EXTRAS USER GROUP ROUTES
321 m.connect('edit_user_group_global_perms',
322 m.connect('edit_user_group_global_perms',
322 '/user_groups/{user_group_id}/edit/global_permissions',
323 '/user_groups/{user_group_id}/edit/global_permissions',
323 action='edit_global_perms', conditions={'method': ['GET']})
324 action='edit_global_perms', conditions={'method': ['GET']})
324 m.connect('edit_user_group_global_perms',
325 m.connect('edit_user_group_global_perms',
325 '/user_groups/{user_group_id}/edit/global_permissions',
326 '/user_groups/{user_group_id}/edit/global_permissions',
326 action='update_global_perms', conditions={'method': ['PUT']})
327 action='update_global_perms', conditions={'method': ['PUT']})
327 m.connect('edit_user_group_perms_summary',
328 m.connect('edit_user_group_perms_summary',
328 '/user_groups/{user_group_id}/edit/permissions_summary',
329 '/user_groups/{user_group_id}/edit/permissions_summary',
329 action='edit_perms_summary', conditions={'method': ['GET']})
330 action='edit_perms_summary', conditions={'method': ['GET']})
330
331
331 m.connect('edit_user_group_perms',
332 m.connect('edit_user_group_perms',
332 '/user_groups/{user_group_id}/edit/permissions',
333 '/user_groups/{user_group_id}/edit/permissions',
333 action='edit_perms', conditions={'method': ['GET']})
334 action='edit_perms', conditions={'method': ['GET']})
334 m.connect('edit_user_group_perms',
335 m.connect('edit_user_group_perms',
335 '/user_groups/{user_group_id}/edit/permissions',
336 '/user_groups/{user_group_id}/edit/permissions',
336 action='update_perms', conditions={'method': ['PUT']})
337 action='update_perms', conditions={'method': ['PUT']})
337
338
338 m.connect('edit_user_group_advanced',
339 m.connect('edit_user_group_advanced',
339 '/user_groups/{user_group_id}/edit/advanced',
340 '/user_groups/{user_group_id}/edit/advanced',
340 action='edit_advanced', conditions={'method': ['GET']})
341 action='edit_advanced', conditions={'method': ['GET']})
341
342
342 m.connect('edit_user_group_advanced_sync',
343 m.connect('edit_user_group_advanced_sync',
343 '/user_groups/{user_group_id}/edit/advanced/sync',
344 '/user_groups/{user_group_id}/edit/advanced/sync',
344 action='edit_advanced_set_synchronization', conditions={'method': ['POST']})
345 action='edit_advanced_set_synchronization', conditions={'method': ['POST']})
345
346
346 m.connect('edit_user_group_members',
347 m.connect('edit_user_group_members',
347 '/user_groups/{user_group_id}/edit/members', jsroute=True,
348 '/user_groups/{user_group_id}/edit/members', jsroute=True,
348 action='user_group_members', conditions={'method': ['GET']})
349 action='user_group_members', conditions={'method': ['GET']})
349
350
350 # ADMIN PERMISSIONS ROUTES
351 # ADMIN PERMISSIONS ROUTES
351 with rmap.submapper(path_prefix=ADMIN_PREFIX,
352 with rmap.submapper(path_prefix=ADMIN_PREFIX,
352 controller='admin/permissions') as m:
353 controller='admin/permissions') as m:
353 m.connect('admin_permissions_application', '/permissions/application',
354 m.connect('admin_permissions_application', '/permissions/application',
354 action='permission_application_update', conditions={'method': ['POST']})
355 action='permission_application_update', conditions={'method': ['POST']})
355 m.connect('admin_permissions_application', '/permissions/application',
356 m.connect('admin_permissions_application', '/permissions/application',
356 action='permission_application', conditions={'method': ['GET']})
357 action='permission_application', conditions={'method': ['GET']})
357
358
358 m.connect('admin_permissions_global', '/permissions/global',
359 m.connect('admin_permissions_global', '/permissions/global',
359 action='permission_global_update', conditions={'method': ['POST']})
360 action='permission_global_update', conditions={'method': ['POST']})
360 m.connect('admin_permissions_global', '/permissions/global',
361 m.connect('admin_permissions_global', '/permissions/global',
361 action='permission_global', conditions={'method': ['GET']})
362 action='permission_global', conditions={'method': ['GET']})
362
363
363 m.connect('admin_permissions_object', '/permissions/object',
364 m.connect('admin_permissions_object', '/permissions/object',
364 action='permission_objects_update', conditions={'method': ['POST']})
365 action='permission_objects_update', conditions={'method': ['POST']})
365 m.connect('admin_permissions_object', '/permissions/object',
366 m.connect('admin_permissions_object', '/permissions/object',
366 action='permission_objects', conditions={'method': ['GET']})
367 action='permission_objects', conditions={'method': ['GET']})
367
368
368 m.connect('admin_permissions_ips', '/permissions/ips',
369 m.connect('admin_permissions_ips', '/permissions/ips',
369 action='permission_ips', conditions={'method': ['POST']})
370 action='permission_ips', conditions={'method': ['POST']})
370 m.connect('admin_permissions_ips', '/permissions/ips',
371 m.connect('admin_permissions_ips', '/permissions/ips',
371 action='permission_ips', conditions={'method': ['GET']})
372 action='permission_ips', conditions={'method': ['GET']})
372
373
373 m.connect('admin_permissions_overview', '/permissions/overview',
374 m.connect('admin_permissions_overview', '/permissions/overview',
374 action='permission_perms', conditions={'method': ['GET']})
375 action='permission_perms', conditions={'method': ['GET']})
375
376
376 # ADMIN DEFAULTS REST ROUTES
377 # ADMIN DEFAULTS REST ROUTES
377 with rmap.submapper(path_prefix=ADMIN_PREFIX,
378 with rmap.submapper(path_prefix=ADMIN_PREFIX,
378 controller='admin/defaults') as m:
379 controller='admin/defaults') as m:
379 m.connect('admin_defaults_repositories', '/defaults/repositories',
380 m.connect('admin_defaults_repositories', '/defaults/repositories',
380 action='update_repository_defaults', conditions={'method': ['POST']})
381 action='update_repository_defaults', conditions={'method': ['POST']})
381 m.connect('admin_defaults_repositories', '/defaults/repositories',
382 m.connect('admin_defaults_repositories', '/defaults/repositories',
382 action='index', conditions={'method': ['GET']})
383 action='index', conditions={'method': ['GET']})
383
384
384 # ADMIN DEBUG STYLE ROUTES
385 # ADMIN DEBUG STYLE ROUTES
385 if str2bool(config.get('debug_style')):
386 if str2bool(config.get('debug_style')):
386 with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style',
387 with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style',
387 controller='debug_style') as m:
388 controller='debug_style') as m:
388 m.connect('debug_style_home', '',
389 m.connect('debug_style_home', '',
389 action='index', conditions={'method': ['GET']})
390 action='index', conditions={'method': ['GET']})
390 m.connect('debug_style_template', '/t/{t_path}',
391 m.connect('debug_style_template', '/t/{t_path}',
391 action='template', conditions={'method': ['GET']})
392 action='template', conditions={'method': ['GET']})
392
393
393 # ADMIN SETTINGS ROUTES
394 # ADMIN SETTINGS ROUTES
394 with rmap.submapper(path_prefix=ADMIN_PREFIX,
395 with rmap.submapper(path_prefix=ADMIN_PREFIX,
395 controller='admin/settings') as m:
396 controller='admin/settings') as m:
396
397
397 # default
398 # default
398 m.connect('admin_settings', '/settings',
399 m.connect('admin_settings', '/settings',
399 action='settings_global_update',
400 action='settings_global_update',
400 conditions={'method': ['POST']})
401 conditions={'method': ['POST']})
401 m.connect('admin_settings', '/settings',
402 m.connect('admin_settings', '/settings',
402 action='settings_global', conditions={'method': ['GET']})
403 action='settings_global', conditions={'method': ['GET']})
403
404
404 m.connect('admin_settings_vcs', '/settings/vcs',
405 m.connect('admin_settings_vcs', '/settings/vcs',
405 action='settings_vcs_update',
406 action='settings_vcs_update',
406 conditions={'method': ['POST']})
407 conditions={'method': ['POST']})
407 m.connect('admin_settings_vcs', '/settings/vcs',
408 m.connect('admin_settings_vcs', '/settings/vcs',
408 action='settings_vcs',
409 action='settings_vcs',
409 conditions={'method': ['GET']})
410 conditions={'method': ['GET']})
410 m.connect('admin_settings_vcs', '/settings/vcs',
411 m.connect('admin_settings_vcs', '/settings/vcs',
411 action='delete_svn_pattern',
412 action='delete_svn_pattern',
412 conditions={'method': ['DELETE']})
413 conditions={'method': ['DELETE']})
413
414
414 m.connect('admin_settings_mapping', '/settings/mapping',
415 m.connect('admin_settings_mapping', '/settings/mapping',
415 action='settings_mapping_update',
416 action='settings_mapping_update',
416 conditions={'method': ['POST']})
417 conditions={'method': ['POST']})
417 m.connect('admin_settings_mapping', '/settings/mapping',
418 m.connect('admin_settings_mapping', '/settings/mapping',
418 action='settings_mapping', conditions={'method': ['GET']})
419 action='settings_mapping', conditions={'method': ['GET']})
419
420
420 m.connect('admin_settings_global', '/settings/global',
421 m.connect('admin_settings_global', '/settings/global',
421 action='settings_global_update',
422 action='settings_global_update',
422 conditions={'method': ['POST']})
423 conditions={'method': ['POST']})
423 m.connect('admin_settings_global', '/settings/global',
424 m.connect('admin_settings_global', '/settings/global',
424 action='settings_global', conditions={'method': ['GET']})
425 action='settings_global', conditions={'method': ['GET']})
425
426
426 m.connect('admin_settings_visual', '/settings/visual',
427 m.connect('admin_settings_visual', '/settings/visual',
427 action='settings_visual_update',
428 action='settings_visual_update',
428 conditions={'method': ['POST']})
429 conditions={'method': ['POST']})
429 m.connect('admin_settings_visual', '/settings/visual',
430 m.connect('admin_settings_visual', '/settings/visual',
430 action='settings_visual', conditions={'method': ['GET']})
431 action='settings_visual', conditions={'method': ['GET']})
431
432
432 m.connect('admin_settings_issuetracker',
433 m.connect('admin_settings_issuetracker',
433 '/settings/issue-tracker', action='settings_issuetracker',
434 '/settings/issue-tracker', action='settings_issuetracker',
434 conditions={'method': ['GET']})
435 conditions={'method': ['GET']})
435 m.connect('admin_settings_issuetracker_save',
436 m.connect('admin_settings_issuetracker_save',
436 '/settings/issue-tracker/save',
437 '/settings/issue-tracker/save',
437 action='settings_issuetracker_save',
438 action='settings_issuetracker_save',
438 conditions={'method': ['POST']})
439 conditions={'method': ['POST']})
439 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
440 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
440 action='settings_issuetracker_test',
441 action='settings_issuetracker_test',
441 conditions={'method': ['POST']})
442 conditions={'method': ['POST']})
442 m.connect('admin_issuetracker_delete',
443 m.connect('admin_issuetracker_delete',
443 '/settings/issue-tracker/delete',
444 '/settings/issue-tracker/delete',
444 action='settings_issuetracker_delete',
445 action='settings_issuetracker_delete',
445 conditions={'method': ['DELETE']})
446 conditions={'method': ['DELETE']})
446
447
447 m.connect('admin_settings_email', '/settings/email',
448 m.connect('admin_settings_email', '/settings/email',
448 action='settings_email_update',
449 action='settings_email_update',
449 conditions={'method': ['POST']})
450 conditions={'method': ['POST']})
450 m.connect('admin_settings_email', '/settings/email',
451 m.connect('admin_settings_email', '/settings/email',
451 action='settings_email', conditions={'method': ['GET']})
452 action='settings_email', conditions={'method': ['GET']})
452
453
453 m.connect('admin_settings_hooks', '/settings/hooks',
454 m.connect('admin_settings_hooks', '/settings/hooks',
454 action='settings_hooks_update',
455 action='settings_hooks_update',
455 conditions={'method': ['POST', 'DELETE']})
456 conditions={'method': ['POST', 'DELETE']})
456 m.connect('admin_settings_hooks', '/settings/hooks',
457 m.connect('admin_settings_hooks', '/settings/hooks',
457 action='settings_hooks', conditions={'method': ['GET']})
458 action='settings_hooks', conditions={'method': ['GET']})
458
459
459 m.connect('admin_settings_search', '/settings/search',
460 m.connect('admin_settings_search', '/settings/search',
460 action='settings_search', conditions={'method': ['GET']})
461 action='settings_search', conditions={'method': ['GET']})
461
462
462 m.connect('admin_settings_supervisor', '/settings/supervisor',
463 m.connect('admin_settings_supervisor', '/settings/supervisor',
463 action='settings_supervisor', conditions={'method': ['GET']})
464 action='settings_supervisor', conditions={'method': ['GET']})
464 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
465 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
465 action='settings_supervisor_log', conditions={'method': ['GET']})
466 action='settings_supervisor_log', conditions={'method': ['GET']})
466
467
467 m.connect('admin_settings_labs', '/settings/labs',
468 m.connect('admin_settings_labs', '/settings/labs',
468 action='settings_labs_update',
469 action='settings_labs_update',
469 conditions={'method': ['POST']})
470 conditions={'method': ['POST']})
470 m.connect('admin_settings_labs', '/settings/labs',
471 m.connect('admin_settings_labs', '/settings/labs',
471 action='settings_labs', conditions={'method': ['GET']})
472 action='settings_labs', conditions={'method': ['GET']})
472
473
473 # ADMIN MY ACCOUNT
474 # ADMIN MY ACCOUNT
474 with rmap.submapper(path_prefix=ADMIN_PREFIX,
475 with rmap.submapper(path_prefix=ADMIN_PREFIX,
475 controller='admin/my_account') as m:
476 controller='admin/my_account') as m:
476
477
477 m.connect('my_account_edit', '/my_account/edit',
478 m.connect('my_account_edit', '/my_account/edit',
478 action='my_account_edit', conditions={'method': ['GET']})
479 action='my_account_edit', conditions={'method': ['GET']})
479 m.connect('my_account', '/my_account/update',
480 m.connect('my_account', '/my_account/update',
480 action='my_account_update', conditions={'method': ['POST']})
481 action='my_account_update', conditions={'method': ['POST']})
481
482
482 # NOTE(marcink): this needs to be kept for password force flag to be
483 # NOTE(marcink): this needs to be kept for password force flag to be
483 # handler, remove after migration to pyramid
484 # handler, remove after migration to pyramid
484 m.connect('my_account_password', '/my_account/password',
485 m.connect('my_account_password', '/my_account/password',
485 action='my_account_password', conditions={'method': ['GET']})
486 action='my_account_password', conditions={'method': ['GET']})
486
487
487 m.connect('my_account_repos', '/my_account/repos',
488 m.connect('my_account_repos', '/my_account/repos',
488 action='my_account_repos', conditions={'method': ['GET']})
489 action='my_account_repos', conditions={'method': ['GET']})
489
490
490 m.connect('my_account_watched', '/my_account/watched',
491 m.connect('my_account_watched', '/my_account/watched',
491 action='my_account_watched', conditions={'method': ['GET']})
492 action='my_account_watched', conditions={'method': ['GET']})
492
493
493 m.connect('my_account_pullrequests', '/my_account/pull_requests',
494 m.connect('my_account_pullrequests', '/my_account/pull_requests',
494 action='my_account_pullrequests', conditions={'method': ['GET']})
495 action='my_account_pullrequests', conditions={'method': ['GET']})
495
496
496 m.connect('my_account_perms', '/my_account/perms',
497 m.connect('my_account_perms', '/my_account/perms',
497 action='my_account_perms', conditions={'method': ['GET']})
498 action='my_account_perms', conditions={'method': ['GET']})
498
499
499 m.connect('my_account_emails', '/my_account/emails',
500 m.connect('my_account_emails', '/my_account/emails',
500 action='my_account_emails', conditions={'method': ['GET']})
501 action='my_account_emails', conditions={'method': ['GET']})
501 m.connect('my_account_emails', '/my_account/emails',
502 m.connect('my_account_emails', '/my_account/emails',
502 action='my_account_emails_add', conditions={'method': ['POST']})
503 action='my_account_emails_add', conditions={'method': ['POST']})
503 m.connect('my_account_emails', '/my_account/emails',
504 m.connect('my_account_emails', '/my_account/emails',
504 action='my_account_emails_delete', conditions={'method': ['DELETE']})
505 action='my_account_emails_delete', conditions={'method': ['DELETE']})
505
506
506 m.connect('my_account_notifications', '/my_account/notifications',
507 m.connect('my_account_notifications', '/my_account/notifications',
507 action='my_notifications',
508 action='my_notifications',
508 conditions={'method': ['GET']})
509 conditions={'method': ['GET']})
509 m.connect('my_account_notifications_toggle_visibility',
510 m.connect('my_account_notifications_toggle_visibility',
510 '/my_account/toggle_visibility',
511 '/my_account/toggle_visibility',
511 action='my_notifications_toggle_visibility',
512 action='my_notifications_toggle_visibility',
512 conditions={'method': ['POST']})
513 conditions={'method': ['POST']})
513
514
514 # NOTIFICATION REST ROUTES
515 # NOTIFICATION REST ROUTES
515 with rmap.submapper(path_prefix=ADMIN_PREFIX,
516 with rmap.submapper(path_prefix=ADMIN_PREFIX,
516 controller='admin/notifications') as m:
517 controller='admin/notifications') as m:
517 m.connect('notifications', '/notifications',
518 m.connect('notifications', '/notifications',
518 action='index', conditions={'method': ['GET']})
519 action='index', conditions={'method': ['GET']})
519 m.connect('notifications_mark_all_read', '/notifications/mark_all_read',
520 m.connect('notifications_mark_all_read', '/notifications/mark_all_read',
520 action='mark_all_read', conditions={'method': ['POST']})
521 action='mark_all_read', conditions={'method': ['POST']})
521 m.connect('/notifications/{notification_id}',
522 m.connect('/notifications/{notification_id}',
522 action='update', conditions={'method': ['PUT']})
523 action='update', conditions={'method': ['PUT']})
523 m.connect('/notifications/{notification_id}',
524 m.connect('/notifications/{notification_id}',
524 action='delete', conditions={'method': ['DELETE']})
525 action='delete', conditions={'method': ['DELETE']})
525 m.connect('notification', '/notifications/{notification_id}',
526 m.connect('notification', '/notifications/{notification_id}',
526 action='show', conditions={'method': ['GET']})
527 action='show', conditions={'method': ['GET']})
527
528
528 # ADMIN GIST
529 # ADMIN GIST
529 with rmap.submapper(path_prefix=ADMIN_PREFIX,
530 with rmap.submapper(path_prefix=ADMIN_PREFIX,
530 controller='admin/gists') as m:
531 controller='admin/gists') as m:
531 m.connect('gists', '/gists',
532 m.connect('gists', '/gists',
532 action='create', conditions={'method': ['POST']})
533 action='create', conditions={'method': ['POST']})
533 m.connect('gists', '/gists', jsroute=True,
534 m.connect('gists', '/gists', jsroute=True,
534 action='index', conditions={'method': ['GET']})
535 action='index', conditions={'method': ['GET']})
535 m.connect('new_gist', '/gists/new', jsroute=True,
536 m.connect('new_gist', '/gists/new', jsroute=True,
536 action='new', conditions={'method': ['GET']})
537 action='new', conditions={'method': ['GET']})
537
538
538 m.connect('/gists/{gist_id}',
539 m.connect('/gists/{gist_id}',
539 action='delete', conditions={'method': ['DELETE']})
540 action='delete', conditions={'method': ['DELETE']})
540 m.connect('edit_gist', '/gists/{gist_id}/edit',
541 m.connect('edit_gist', '/gists/{gist_id}/edit',
541 action='edit_form', conditions={'method': ['GET']})
542 action='edit_form', conditions={'method': ['GET']})
542 m.connect('edit_gist', '/gists/{gist_id}/edit',
543 m.connect('edit_gist', '/gists/{gist_id}/edit',
543 action='edit', conditions={'method': ['POST']})
544 action='edit', conditions={'method': ['POST']})
544 m.connect(
545 m.connect(
545 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision',
546 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision',
546 action='check_revision', conditions={'method': ['GET']})
547 action='check_revision', conditions={'method': ['GET']})
547
548
548 m.connect('gist', '/gists/{gist_id}',
549 m.connect('gist', '/gists/{gist_id}',
549 action='show', conditions={'method': ['GET']})
550 action='show', conditions={'method': ['GET']})
550 m.connect('gist_rev', '/gists/{gist_id}/{revision}',
551 m.connect('gist_rev', '/gists/{gist_id}/{revision}',
551 revision='tip',
552 revision='tip',
552 action='show', conditions={'method': ['GET']})
553 action='show', conditions={'method': ['GET']})
553 m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}',
554 m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}',
554 revision='tip',
555 revision='tip',
555 action='show', conditions={'method': ['GET']})
556 action='show', conditions={'method': ['GET']})
556 m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}',
557 m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}',
557 revision='tip',
558 revision='tip',
558 action='show', conditions={'method': ['GET']},
559 action='show', conditions={'method': ['GET']},
559 requirements=URL_NAME_REQUIREMENTS)
560 requirements=URL_NAME_REQUIREMENTS)
560
561
561 # USER JOURNAL
562 # USER JOURNAL
562 rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,),
563 rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,),
563 controller='journal', action='index')
564 controller='journal', action='index')
564 rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,),
565 rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,),
565 controller='journal', action='journal_rss')
566 controller='journal', action='journal_rss')
566 rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,),
567 rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,),
567 controller='journal', action='journal_atom')
568 controller='journal', action='journal_atom')
568
569
569 rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,),
570 rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,),
570 controller='journal', action='public_journal')
571 controller='journal', action='public_journal')
571
572
572 rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,),
573 rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,),
573 controller='journal', action='public_journal_rss')
574 controller='journal', action='public_journal_rss')
574
575
575 rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,),
576 rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,),
576 controller='journal', action='public_journal_rss')
577 controller='journal', action='public_journal_rss')
577
578
578 rmap.connect('public_journal_atom',
579 rmap.connect('public_journal_atom',
579 '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal',
580 '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal',
580 action='public_journal_atom')
581 action='public_journal_atom')
581
582
582 rmap.connect('public_journal_atom_old',
583 rmap.connect('public_journal_atom_old',
583 '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal',
584 '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal',
584 action='public_journal_atom')
585 action='public_journal_atom')
585
586
586 rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,),
587 rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,),
587 controller='journal', action='toggle_following', jsroute=True,
588 controller='journal', action='toggle_following', jsroute=True,
588 conditions={'method': ['POST']})
589 conditions={'method': ['POST']})
589
590
590 # FEEDS
591 # FEEDS
591 rmap.connect('rss_feed_home', '/{repo_name}/feed/rss',
592 rmap.connect('rss_feed_home', '/{repo_name}/feed/rss',
592 controller='feed', action='rss',
593 controller='feed', action='rss',
593 conditions={'function': check_repo},
594 conditions={'function': check_repo},
594 requirements=URL_NAME_REQUIREMENTS)
595 requirements=URL_NAME_REQUIREMENTS)
595
596
596 rmap.connect('atom_feed_home', '/{repo_name}/feed/atom',
597 rmap.connect('atom_feed_home', '/{repo_name}/feed/atom',
597 controller='feed', action='atom',
598 controller='feed', action='atom',
598 conditions={'function': check_repo},
599 conditions={'function': check_repo},
599 requirements=URL_NAME_REQUIREMENTS)
600 requirements=URL_NAME_REQUIREMENTS)
600
601
601 #==========================================================================
602 #==========================================================================
602 # REPOSITORY ROUTES
603 # REPOSITORY ROUTES
603 #==========================================================================
604 #==========================================================================
604
605
605 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
606 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
606 controller='admin/repos', action='repo_creating',
607 controller='admin/repos', action='repo_creating',
607 requirements=URL_NAME_REQUIREMENTS)
608 requirements=URL_NAME_REQUIREMENTS)
608 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
609 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
609 controller='admin/repos', action='repo_check',
610 controller='admin/repos', action='repo_check',
610 requirements=URL_NAME_REQUIREMENTS)
611 requirements=URL_NAME_REQUIREMENTS)
611
612
612 rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}',
613 controller='summary', action='repo_stats',
614 conditions={'function': check_repo},
615 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
616
617 rmap.connect('repo_refs_data', '/{repo_name}/refs-data',
618 controller='summary', action='repo_refs_data',
619 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
620 rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog',
621 controller='summary', action='repo_refs_changelog_data',
622 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
623
624 rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}',
613 rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}',
625 controller='changeset', revision='tip',
614 controller='changeset', revision='tip',
626 conditions={'function': check_repo},
615 conditions={'function': check_repo},
627 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
616 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
628 rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}',
617 rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}',
629 controller='changeset', revision='tip', action='changeset_children',
618 controller='changeset', revision='tip', action='changeset_children',
630 conditions={'function': check_repo},
619 conditions={'function': check_repo},
631 requirements=URL_NAME_REQUIREMENTS)
620 requirements=URL_NAME_REQUIREMENTS)
632 rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}',
621 rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}',
633 controller='changeset', revision='tip', action='changeset_parents',
622 controller='changeset', revision='tip', action='changeset_parents',
634 conditions={'function': check_repo},
623 conditions={'function': check_repo},
635 requirements=URL_NAME_REQUIREMENTS)
624 requirements=URL_NAME_REQUIREMENTS)
636
625
637 # repo edit options
626 # repo edit options
638 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
627 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
639 controller='admin/repos', action='edit_fields',
628 controller='admin/repos', action='edit_fields',
640 conditions={'method': ['GET'], 'function': check_repo},
629 conditions={'method': ['GET'], 'function': check_repo},
641 requirements=URL_NAME_REQUIREMENTS)
630 requirements=URL_NAME_REQUIREMENTS)
642 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
631 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
643 controller='admin/repos', action='create_repo_field',
632 controller='admin/repos', action='create_repo_field',
644 conditions={'method': ['PUT'], 'function': check_repo},
633 conditions={'method': ['PUT'], 'function': check_repo},
645 requirements=URL_NAME_REQUIREMENTS)
634 requirements=URL_NAME_REQUIREMENTS)
646 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
635 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
647 controller='admin/repos', action='delete_repo_field',
636 controller='admin/repos', action='delete_repo_field',
648 conditions={'method': ['DELETE'], 'function': check_repo},
637 conditions={'method': ['DELETE'], 'function': check_repo},
649 requirements=URL_NAME_REQUIREMENTS)
638 requirements=URL_NAME_REQUIREMENTS)
650
639
651 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
640 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
652 controller='admin/repos', action='toggle_locking',
641 controller='admin/repos', action='toggle_locking',
653 conditions={'method': ['GET'], 'function': check_repo},
642 conditions={'method': ['GET'], 'function': check_repo},
654 requirements=URL_NAME_REQUIREMENTS)
643 requirements=URL_NAME_REQUIREMENTS)
655
644
656 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
645 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
657 controller='admin/repos', action='edit_remote_form',
646 controller='admin/repos', action='edit_remote_form',
658 conditions={'method': ['GET'], 'function': check_repo},
647 conditions={'method': ['GET'], 'function': check_repo},
659 requirements=URL_NAME_REQUIREMENTS)
648 requirements=URL_NAME_REQUIREMENTS)
660 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
649 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
661 controller='admin/repos', action='edit_remote',
650 controller='admin/repos', action='edit_remote',
662 conditions={'method': ['PUT'], 'function': check_repo},
651 conditions={'method': ['PUT'], 'function': check_repo},
663 requirements=URL_NAME_REQUIREMENTS)
652 requirements=URL_NAME_REQUIREMENTS)
664
653
665 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
654 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
666 controller='admin/repos', action='edit_statistics_form',
655 controller='admin/repos', action='edit_statistics_form',
667 conditions={'method': ['GET'], 'function': check_repo},
656 conditions={'method': ['GET'], 'function': check_repo},
668 requirements=URL_NAME_REQUIREMENTS)
657 requirements=URL_NAME_REQUIREMENTS)
669 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
658 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
670 controller='admin/repos', action='edit_statistics',
659 controller='admin/repos', action='edit_statistics',
671 conditions={'method': ['PUT'], 'function': check_repo},
660 conditions={'method': ['PUT'], 'function': check_repo},
672 requirements=URL_NAME_REQUIREMENTS)
661 requirements=URL_NAME_REQUIREMENTS)
673 rmap.connect('repo_settings_issuetracker',
662 rmap.connect('repo_settings_issuetracker',
674 '/{repo_name}/settings/issue-tracker',
663 '/{repo_name}/settings/issue-tracker',
675 controller='admin/repos', action='repo_issuetracker',
664 controller='admin/repos', action='repo_issuetracker',
676 conditions={'method': ['GET'], 'function': check_repo},
665 conditions={'method': ['GET'], 'function': check_repo},
677 requirements=URL_NAME_REQUIREMENTS)
666 requirements=URL_NAME_REQUIREMENTS)
678 rmap.connect('repo_issuetracker_test',
667 rmap.connect('repo_issuetracker_test',
679 '/{repo_name}/settings/issue-tracker/test',
668 '/{repo_name}/settings/issue-tracker/test',
680 controller='admin/repos', action='repo_issuetracker_test',
669 controller='admin/repos', action='repo_issuetracker_test',
681 conditions={'method': ['POST'], 'function': check_repo},
670 conditions={'method': ['POST'], 'function': check_repo},
682 requirements=URL_NAME_REQUIREMENTS)
671 requirements=URL_NAME_REQUIREMENTS)
683 rmap.connect('repo_issuetracker_delete',
672 rmap.connect('repo_issuetracker_delete',
684 '/{repo_name}/settings/issue-tracker/delete',
673 '/{repo_name}/settings/issue-tracker/delete',
685 controller='admin/repos', action='repo_issuetracker_delete',
674 controller='admin/repos', action='repo_issuetracker_delete',
686 conditions={'method': ['DELETE'], 'function': check_repo},
675 conditions={'method': ['DELETE'], 'function': check_repo},
687 requirements=URL_NAME_REQUIREMENTS)
676 requirements=URL_NAME_REQUIREMENTS)
688 rmap.connect('repo_issuetracker_save',
677 rmap.connect('repo_issuetracker_save',
689 '/{repo_name}/settings/issue-tracker/save',
678 '/{repo_name}/settings/issue-tracker/save',
690 controller='admin/repos', action='repo_issuetracker_save',
679 controller='admin/repos', action='repo_issuetracker_save',
691 conditions={'method': ['POST'], 'function': check_repo},
680 conditions={'method': ['POST'], 'function': check_repo},
692 requirements=URL_NAME_REQUIREMENTS)
681 requirements=URL_NAME_REQUIREMENTS)
693 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
682 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
694 controller='admin/repos', action='repo_settings_vcs_update',
683 controller='admin/repos', action='repo_settings_vcs_update',
695 conditions={'method': ['POST'], 'function': check_repo},
684 conditions={'method': ['POST'], 'function': check_repo},
696 requirements=URL_NAME_REQUIREMENTS)
685 requirements=URL_NAME_REQUIREMENTS)
697 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
686 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
698 controller='admin/repos', action='repo_settings_vcs',
687 controller='admin/repos', action='repo_settings_vcs',
699 conditions={'method': ['GET'], 'function': check_repo},
688 conditions={'method': ['GET'], 'function': check_repo},
700 requirements=URL_NAME_REQUIREMENTS)
689 requirements=URL_NAME_REQUIREMENTS)
701 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
690 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
702 controller='admin/repos', action='repo_delete_svn_pattern',
691 controller='admin/repos', action='repo_delete_svn_pattern',
703 conditions={'method': ['DELETE'], 'function': check_repo},
692 conditions={'method': ['DELETE'], 'function': check_repo},
704 requirements=URL_NAME_REQUIREMENTS)
693 requirements=URL_NAME_REQUIREMENTS)
705 rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest',
694 rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest',
706 controller='admin/repos', action='repo_settings_pullrequest',
695 controller='admin/repos', action='repo_settings_pullrequest',
707 conditions={'method': ['GET', 'POST'], 'function': check_repo},
696 conditions={'method': ['GET', 'POST'], 'function': check_repo},
708 requirements=URL_NAME_REQUIREMENTS)
697 requirements=URL_NAME_REQUIREMENTS)
709
698
710 # still working url for backward compat.
699 # still working url for backward compat.
711 rmap.connect('raw_changeset_home_depraced',
700 rmap.connect('raw_changeset_home_depraced',
712 '/{repo_name}/raw-changeset/{revision}',
701 '/{repo_name}/raw-changeset/{revision}',
713 controller='changeset', action='changeset_raw',
702 controller='changeset', action='changeset_raw',
714 revision='tip', conditions={'function': check_repo},
703 revision='tip', conditions={'function': check_repo},
715 requirements=URL_NAME_REQUIREMENTS)
704 requirements=URL_NAME_REQUIREMENTS)
716
705
717 # new URLs
706 # new URLs
718 rmap.connect('changeset_raw_home',
707 rmap.connect('changeset_raw_home',
719 '/{repo_name}/changeset-diff/{revision}',
708 '/{repo_name}/changeset-diff/{revision}',
720 controller='changeset', action='changeset_raw',
709 controller='changeset', action='changeset_raw',
721 revision='tip', conditions={'function': check_repo},
710 revision='tip', conditions={'function': check_repo},
722 requirements=URL_NAME_REQUIREMENTS)
711 requirements=URL_NAME_REQUIREMENTS)
723
712
724 rmap.connect('changeset_patch_home',
713 rmap.connect('changeset_patch_home',
725 '/{repo_name}/changeset-patch/{revision}',
714 '/{repo_name}/changeset-patch/{revision}',
726 controller='changeset', action='changeset_patch',
715 controller='changeset', action='changeset_patch',
727 revision='tip', conditions={'function': check_repo},
716 revision='tip', conditions={'function': check_repo},
728 requirements=URL_NAME_REQUIREMENTS)
717 requirements=URL_NAME_REQUIREMENTS)
729
718
730 rmap.connect('changeset_download_home',
719 rmap.connect('changeset_download_home',
731 '/{repo_name}/changeset-download/{revision}',
720 '/{repo_name}/changeset-download/{revision}',
732 controller='changeset', action='changeset_download',
721 controller='changeset', action='changeset_download',
733 revision='tip', conditions={'function': check_repo},
722 revision='tip', conditions={'function': check_repo},
734 requirements=URL_NAME_REQUIREMENTS)
723 requirements=URL_NAME_REQUIREMENTS)
735
724
736 rmap.connect('changeset_comment',
725 rmap.connect('changeset_comment',
737 '/{repo_name}/changeset/{revision}/comment', jsroute=True,
726 '/{repo_name}/changeset/{revision}/comment', jsroute=True,
738 controller='changeset', revision='tip', action='comment',
727 controller='changeset', revision='tip', action='comment',
739 conditions={'function': check_repo},
728 conditions={'function': check_repo},
740 requirements=URL_NAME_REQUIREMENTS)
729 requirements=URL_NAME_REQUIREMENTS)
741
730
742 rmap.connect('changeset_comment_preview',
731 rmap.connect('changeset_comment_preview',
743 '/{repo_name}/changeset/comment/preview', jsroute=True,
732 '/{repo_name}/changeset/comment/preview', jsroute=True,
744 controller='changeset', action='preview_comment',
733 controller='changeset', action='preview_comment',
745 conditions={'function': check_repo, 'method': ['POST']},
734 conditions={'function': check_repo, 'method': ['POST']},
746 requirements=URL_NAME_REQUIREMENTS)
735 requirements=URL_NAME_REQUIREMENTS)
747
736
748 rmap.connect('changeset_comment_delete',
737 rmap.connect('changeset_comment_delete',
749 '/{repo_name}/changeset/comment/{comment_id}/delete',
738 '/{repo_name}/changeset/comment/{comment_id}/delete',
750 controller='changeset', action='delete_comment',
739 controller='changeset', action='delete_comment',
751 conditions={'function': check_repo, 'method': ['DELETE']},
740 conditions={'function': check_repo, 'method': ['DELETE']},
752 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
741 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
753
742
754 rmap.connect('changeset_info', '/{repo_name}/changeset_info/{revision}',
743 rmap.connect('changeset_info', '/{repo_name}/changeset_info/{revision}',
755 controller='changeset', action='changeset_info',
744 controller='changeset', action='changeset_info',
756 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
745 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
757
746
758 rmap.connect('compare_home',
747 rmap.connect('compare_home',
759 '/{repo_name}/compare',
748 '/{repo_name}/compare',
760 controller='compare', action='index',
749 controller='compare', action='index',
761 conditions={'function': check_repo},
750 conditions={'function': check_repo},
762 requirements=URL_NAME_REQUIREMENTS)
751 requirements=URL_NAME_REQUIREMENTS)
763
752
764 rmap.connect('compare_url',
753 rmap.connect('compare_url',
765 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
754 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
766 controller='compare', action='compare',
755 controller='compare', action='compare',
767 conditions={'function': check_repo},
756 conditions={'function': check_repo},
768 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
757 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
769
758
770 rmap.connect('pullrequest_home',
759 rmap.connect('pullrequest_home',
771 '/{repo_name}/pull-request/new', controller='pullrequests',
760 '/{repo_name}/pull-request/new', controller='pullrequests',
772 action='index', conditions={'function': check_repo,
761 action='index', conditions={'function': check_repo,
773 'method': ['GET']},
762 'method': ['GET']},
774 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
763 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
775
764
776 rmap.connect('pullrequest',
765 rmap.connect('pullrequest',
777 '/{repo_name}/pull-request/new', controller='pullrequests',
766 '/{repo_name}/pull-request/new', controller='pullrequests',
778 action='create', conditions={'function': check_repo,
767 action='create', conditions={'function': check_repo,
779 'method': ['POST']},
768 'method': ['POST']},
780 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
769 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
781
770
782 rmap.connect('pullrequest_repo_refs',
771 rmap.connect('pullrequest_repo_refs',
783 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
772 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
784 controller='pullrequests',
773 controller='pullrequests',
785 action='get_repo_refs',
774 action='get_repo_refs',
786 conditions={'function': check_repo, 'method': ['GET']},
775 conditions={'function': check_repo, 'method': ['GET']},
787 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
776 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
788
777
789 rmap.connect('pullrequest_repo_destinations',
778 rmap.connect('pullrequest_repo_destinations',
790 '/{repo_name}/pull-request/repo-destinations',
779 '/{repo_name}/pull-request/repo-destinations',
791 controller='pullrequests',
780 controller='pullrequests',
792 action='get_repo_destinations',
781 action='get_repo_destinations',
793 conditions={'function': check_repo, 'method': ['GET']},
782 conditions={'function': check_repo, 'method': ['GET']},
794 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
783 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
795
784
796 rmap.connect('pullrequest_show',
785 rmap.connect('pullrequest_show',
797 '/{repo_name}/pull-request/{pull_request_id}',
786 '/{repo_name}/pull-request/{pull_request_id}',
798 controller='pullrequests',
787 controller='pullrequests',
799 action='show', conditions={'function': check_repo,
788 action='show', conditions={'function': check_repo,
800 'method': ['GET']},
789 'method': ['GET']},
801 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
790 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
802
791
803 rmap.connect('pullrequest_update',
792 rmap.connect('pullrequest_update',
804 '/{repo_name}/pull-request/{pull_request_id}',
793 '/{repo_name}/pull-request/{pull_request_id}',
805 controller='pullrequests',
794 controller='pullrequests',
806 action='update', conditions={'function': check_repo,
795 action='update', conditions={'function': check_repo,
807 'method': ['PUT']},
796 'method': ['PUT']},
808 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
797 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
809
798
810 rmap.connect('pullrequest_merge',
799 rmap.connect('pullrequest_merge',
811 '/{repo_name}/pull-request/{pull_request_id}',
800 '/{repo_name}/pull-request/{pull_request_id}',
812 controller='pullrequests',
801 controller='pullrequests',
813 action='merge', conditions={'function': check_repo,
802 action='merge', conditions={'function': check_repo,
814 'method': ['POST']},
803 'method': ['POST']},
815 requirements=URL_NAME_REQUIREMENTS)
804 requirements=URL_NAME_REQUIREMENTS)
816
805
817 rmap.connect('pullrequest_delete',
806 rmap.connect('pullrequest_delete',
818 '/{repo_name}/pull-request/{pull_request_id}',
807 '/{repo_name}/pull-request/{pull_request_id}',
819 controller='pullrequests',
808 controller='pullrequests',
820 action='delete', conditions={'function': check_repo,
809 action='delete', conditions={'function': check_repo,
821 'method': ['DELETE']},
810 'method': ['DELETE']},
822 requirements=URL_NAME_REQUIREMENTS)
811 requirements=URL_NAME_REQUIREMENTS)
823
812
824 rmap.connect('pullrequest_comment',
813 rmap.connect('pullrequest_comment',
825 '/{repo_name}/pull-request-comment/{pull_request_id}',
814 '/{repo_name}/pull-request-comment/{pull_request_id}',
826 controller='pullrequests',
815 controller='pullrequests',
827 action='comment', conditions={'function': check_repo,
816 action='comment', conditions={'function': check_repo,
828 'method': ['POST']},
817 'method': ['POST']},
829 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
818 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
830
819
831 rmap.connect('pullrequest_comment_delete',
820 rmap.connect('pullrequest_comment_delete',
832 '/{repo_name}/pull-request-comment/{comment_id}/delete',
821 '/{repo_name}/pull-request-comment/{comment_id}/delete',
833 controller='pullrequests', action='delete_comment',
822 controller='pullrequests', action='delete_comment',
834 conditions={'function': check_repo, 'method': ['DELETE']},
823 conditions={'function': check_repo, 'method': ['DELETE']},
835 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
824 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
836
825
837 rmap.connect('summary_home_explicit', '/{repo_name}/summary',
838 controller='summary', conditions={'function': check_repo},
839 requirements=URL_NAME_REQUIREMENTS)
840
841 rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True,
826 rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True,
842 controller='changelog', conditions={'function': check_repo},
827 controller='changelog', conditions={'function': check_repo},
843 requirements=URL_NAME_REQUIREMENTS)
828 requirements=URL_NAME_REQUIREMENTS)
844
829
845 rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary',
846 controller='changelog', action='changelog_summary',
847 conditions={'function': check_repo},
848 requirements=URL_NAME_REQUIREMENTS)
849
850 rmap.connect('changelog_file_home',
830 rmap.connect('changelog_file_home',
851 '/{repo_name}/changelog/{revision}/{f_path}',
831 '/{repo_name}/changelog/{revision}/{f_path}',
852 controller='changelog', f_path=None,
832 controller='changelog', f_path=None,
853 conditions={'function': check_repo},
833 conditions={'function': check_repo},
854 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
834 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
855
835
856 rmap.connect('changelog_elements', '/{repo_name}/changelog_details',
836 rmap.connect('changelog_elements', '/{repo_name}/changelog_details',
857 controller='changelog', action='changelog_elements',
837 controller='changelog', action='changelog_elements',
858 conditions={'function': check_repo},
838 conditions={'function': check_repo},
859 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
839 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
860
840
861 rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}',
841 rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}',
862 controller='files', revision='tip', f_path='',
842 controller='files', revision='tip', f_path='',
863 conditions={'function': check_repo},
843 conditions={'function': check_repo},
864 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
844 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
865
845
866 rmap.connect('files_home_simple_catchrev',
846 rmap.connect('files_home_simple_catchrev',
867 '/{repo_name}/files/{revision}',
847 '/{repo_name}/files/{revision}',
868 controller='files', revision='tip', f_path='',
848 controller='files', revision='tip', f_path='',
869 conditions={'function': check_repo},
849 conditions={'function': check_repo},
870 requirements=URL_NAME_REQUIREMENTS)
850 requirements=URL_NAME_REQUIREMENTS)
871
851
872 rmap.connect('files_home_simple_catchall',
852 rmap.connect('files_home_simple_catchall',
873 '/{repo_name}/files',
853 '/{repo_name}/files',
874 controller='files', revision='tip', f_path='',
854 controller='files', revision='tip', f_path='',
875 conditions={'function': check_repo},
855 conditions={'function': check_repo},
876 requirements=URL_NAME_REQUIREMENTS)
856 requirements=URL_NAME_REQUIREMENTS)
877
857
878 rmap.connect('files_history_home',
858 rmap.connect('files_history_home',
879 '/{repo_name}/history/{revision}/{f_path}',
859 '/{repo_name}/history/{revision}/{f_path}',
880 controller='files', action='history', revision='tip', f_path='',
860 controller='files', action='history', revision='tip', f_path='',
881 conditions={'function': check_repo},
861 conditions={'function': check_repo},
882 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
862 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
883
863
884 rmap.connect('files_authors_home',
864 rmap.connect('files_authors_home',
885 '/{repo_name}/authors/{revision}/{f_path}',
865 '/{repo_name}/authors/{revision}/{f_path}',
886 controller='files', action='authors', revision='tip', f_path='',
866 controller='files', action='authors', revision='tip', f_path='',
887 conditions={'function': check_repo},
867 conditions={'function': check_repo},
888 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
868 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
889
869
890 rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}',
870 rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}',
891 controller='files', action='diff', f_path='',
871 controller='files', action='diff', f_path='',
892 conditions={'function': check_repo},
872 conditions={'function': check_repo},
893 requirements=URL_NAME_REQUIREMENTS)
873 requirements=URL_NAME_REQUIREMENTS)
894
874
895 rmap.connect('files_diff_2way_home',
875 rmap.connect('files_diff_2way_home',
896 '/{repo_name}/diff-2way/{f_path}',
876 '/{repo_name}/diff-2way/{f_path}',
897 controller='files', action='diff_2way', f_path='',
877 controller='files', action='diff_2way', f_path='',
898 conditions={'function': check_repo},
878 conditions={'function': check_repo},
899 requirements=URL_NAME_REQUIREMENTS)
879 requirements=URL_NAME_REQUIREMENTS)
900
880
901 rmap.connect('files_rawfile_home',
881 rmap.connect('files_rawfile_home',
902 '/{repo_name}/rawfile/{revision}/{f_path}',
882 '/{repo_name}/rawfile/{revision}/{f_path}',
903 controller='files', action='rawfile', revision='tip',
883 controller='files', action='rawfile', revision='tip',
904 f_path='', conditions={'function': check_repo},
884 f_path='', conditions={'function': check_repo},
905 requirements=URL_NAME_REQUIREMENTS)
885 requirements=URL_NAME_REQUIREMENTS)
906
886
907 rmap.connect('files_raw_home',
887 rmap.connect('files_raw_home',
908 '/{repo_name}/raw/{revision}/{f_path}',
888 '/{repo_name}/raw/{revision}/{f_path}',
909 controller='files', action='raw', revision='tip', f_path='',
889 controller='files', action='raw', revision='tip', f_path='',
910 conditions={'function': check_repo},
890 conditions={'function': check_repo},
911 requirements=URL_NAME_REQUIREMENTS)
891 requirements=URL_NAME_REQUIREMENTS)
912
892
913 rmap.connect('files_render_home',
893 rmap.connect('files_render_home',
914 '/{repo_name}/render/{revision}/{f_path}',
894 '/{repo_name}/render/{revision}/{f_path}',
915 controller='files', action='index', revision='tip', f_path='',
895 controller='files', action='index', revision='tip', f_path='',
916 rendered=True, conditions={'function': check_repo},
896 rendered=True, conditions={'function': check_repo},
917 requirements=URL_NAME_REQUIREMENTS)
897 requirements=URL_NAME_REQUIREMENTS)
918
898
919 rmap.connect('files_annotate_home',
899 rmap.connect('files_annotate_home',
920 '/{repo_name}/annotate/{revision}/{f_path}',
900 '/{repo_name}/annotate/{revision}/{f_path}',
921 controller='files', action='index', revision='tip',
901 controller='files', action='index', revision='tip',
922 f_path='', annotate=True, conditions={'function': check_repo},
902 f_path='', annotate=True, conditions={'function': check_repo},
923 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
903 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
924
904
925 rmap.connect('files_annotate_previous',
905 rmap.connect('files_annotate_previous',
926 '/{repo_name}/annotate-previous/{revision}/{f_path}',
906 '/{repo_name}/annotate-previous/{revision}/{f_path}',
927 controller='files', action='annotate_previous', revision='tip',
907 controller='files', action='annotate_previous', revision='tip',
928 f_path='', annotate=True, conditions={'function': check_repo},
908 f_path='', annotate=True, conditions={'function': check_repo},
929 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
909 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
930
910
931 rmap.connect('files_edit',
911 rmap.connect('files_edit',
932 '/{repo_name}/edit/{revision}/{f_path}',
912 '/{repo_name}/edit/{revision}/{f_path}',
933 controller='files', action='edit', revision='tip',
913 controller='files', action='edit', revision='tip',
934 f_path='',
914 f_path='',
935 conditions={'function': check_repo, 'method': ['POST']},
915 conditions={'function': check_repo, 'method': ['POST']},
936 requirements=URL_NAME_REQUIREMENTS)
916 requirements=URL_NAME_REQUIREMENTS)
937
917
938 rmap.connect('files_edit_home',
918 rmap.connect('files_edit_home',
939 '/{repo_name}/edit/{revision}/{f_path}',
919 '/{repo_name}/edit/{revision}/{f_path}',
940 controller='files', action='edit_home', revision='tip',
920 controller='files', action='edit_home', revision='tip',
941 f_path='', conditions={'function': check_repo},
921 f_path='', conditions={'function': check_repo},
942 requirements=URL_NAME_REQUIREMENTS)
922 requirements=URL_NAME_REQUIREMENTS)
943
923
944 rmap.connect('files_add',
924 rmap.connect('files_add',
945 '/{repo_name}/add/{revision}/{f_path}',
925 '/{repo_name}/add/{revision}/{f_path}',
946 controller='files', action='add', revision='tip',
926 controller='files', action='add', revision='tip',
947 f_path='',
927 f_path='',
948 conditions={'function': check_repo, 'method': ['POST']},
928 conditions={'function': check_repo, 'method': ['POST']},
949 requirements=URL_NAME_REQUIREMENTS)
929 requirements=URL_NAME_REQUIREMENTS)
950
930
951 rmap.connect('files_add_home',
931 rmap.connect('files_add_home',
952 '/{repo_name}/add/{revision}/{f_path}',
932 '/{repo_name}/add/{revision}/{f_path}',
953 controller='files', action='add_home', revision='tip',
933 controller='files', action='add_home', revision='tip',
954 f_path='', conditions={'function': check_repo},
934 f_path='', conditions={'function': check_repo},
955 requirements=URL_NAME_REQUIREMENTS)
935 requirements=URL_NAME_REQUIREMENTS)
956
936
957 rmap.connect('files_delete',
937 rmap.connect('files_delete',
958 '/{repo_name}/delete/{revision}/{f_path}',
938 '/{repo_name}/delete/{revision}/{f_path}',
959 controller='files', action='delete', revision='tip',
939 controller='files', action='delete', revision='tip',
960 f_path='',
940 f_path='',
961 conditions={'function': check_repo, 'method': ['POST']},
941 conditions={'function': check_repo, 'method': ['POST']},
962 requirements=URL_NAME_REQUIREMENTS)
942 requirements=URL_NAME_REQUIREMENTS)
963
943
964 rmap.connect('files_delete_home',
944 rmap.connect('files_delete_home',
965 '/{repo_name}/delete/{revision}/{f_path}',
945 '/{repo_name}/delete/{revision}/{f_path}',
966 controller='files', action='delete_home', revision='tip',
946 controller='files', action='delete_home', revision='tip',
967 f_path='', conditions={'function': check_repo},
947 f_path='', conditions={'function': check_repo},
968 requirements=URL_NAME_REQUIREMENTS)
948 requirements=URL_NAME_REQUIREMENTS)
969
949
970 rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}',
950 rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}',
971 controller='files', action='archivefile',
951 controller='files', action='archivefile',
972 conditions={'function': check_repo},
952 conditions={'function': check_repo},
973 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
953 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
974
954
975 rmap.connect('files_nodelist_home',
955 rmap.connect('files_nodelist_home',
976 '/{repo_name}/nodelist/{revision}/{f_path}',
956 '/{repo_name}/nodelist/{revision}/{f_path}',
977 controller='files', action='nodelist',
957 controller='files', action='nodelist',
978 conditions={'function': check_repo},
958 conditions={'function': check_repo},
979 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
959 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
980
960
981 rmap.connect('files_nodetree_full',
961 rmap.connect('files_nodetree_full',
982 '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
962 '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
983 controller='files', action='nodetree_full',
963 controller='files', action='nodetree_full',
984 conditions={'function': check_repo},
964 conditions={'function': check_repo},
985 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
965 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
986
966
987 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
967 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
988 controller='forks', action='fork_create',
968 controller='forks', action='fork_create',
989 conditions={'function': check_repo, 'method': ['POST']},
969 conditions={'function': check_repo, 'method': ['POST']},
990 requirements=URL_NAME_REQUIREMENTS)
970 requirements=URL_NAME_REQUIREMENTS)
991
971
992 rmap.connect('repo_fork_home', '/{repo_name}/fork',
972 rmap.connect('repo_fork_home', '/{repo_name}/fork',
993 controller='forks', action='fork',
973 controller='forks', action='fork',
994 conditions={'function': check_repo},
974 conditions={'function': check_repo},
995 requirements=URL_NAME_REQUIREMENTS)
975 requirements=URL_NAME_REQUIREMENTS)
996
976
997 rmap.connect('repo_forks_home', '/{repo_name}/forks',
977 rmap.connect('repo_forks_home', '/{repo_name}/forks',
998 controller='forks', action='forks',
978 controller='forks', action='forks',
999 conditions={'function': check_repo},
979 conditions={'function': check_repo},
1000 requirements=URL_NAME_REQUIREMENTS)
980 requirements=URL_NAME_REQUIREMENTS)
1001
981
1002 # catch all, at the end
1003 _connect_with_slash(
1004 rmap, 'summary_home', '/{repo_name}', jsroute=True,
1005 controller='summary', action='index',
1006 conditions={'function': check_repo},
1007 requirements=URL_NAME_REQUIREMENTS)
1008
1009 return rmap
982 return rmap
1010
1011
1012 def _connect_with_slash(mapper, name, path, *args, **kwargs):
1013 """
1014 Connect a route with an optional trailing slash in `path`.
1015 """
1016 mapper.connect(name + '_slash', path + '/', *args, **kwargs)
1017 mapper.connect(name, path, *args, **kwargs)
@@ -1,610 +1,610 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2013-2017 RhodeCode GmbH
3 # Copyright (C) 2013-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Repositories controller for RhodeCode
23 Repositories controller for RhodeCode
24 """
24 """
25
25
26 import logging
26 import logging
27 import traceback
27 import traceback
28
28
29 import formencode
29 import formencode
30 from formencode import htmlfill
30 from formencode import htmlfill
31 from pylons import request, tmpl_context as c, url
31 from pylons import request, tmpl_context as c, url
32 from pylons.controllers.util import redirect
32 from pylons.controllers.util import redirect
33 from pylons.i18n.translation import _
33 from pylons.i18n.translation import _
34 from webob.exc import HTTPForbidden, HTTPNotFound, HTTPBadRequest
34 from webob.exc import HTTPForbidden, HTTPNotFound, HTTPBadRequest
35
35
36 import rhodecode
36 import rhodecode
37 from rhodecode.lib import auth, helpers as h
37 from rhodecode.lib import auth, helpers as h
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 LoginRequired, HasPermissionAllDecorator,
39 LoginRequired, HasPermissionAllDecorator,
40 HasRepoPermissionAllDecorator, NotAnonymous, HasPermissionAny,
40 HasRepoPermissionAllDecorator, NotAnonymous, HasPermissionAny,
41 HasRepoGroupPermissionAny, HasRepoPermissionAnyDecorator)
41 HasRepoGroupPermissionAny, HasRepoPermissionAnyDecorator)
42 from rhodecode.lib.base import BaseRepoController, render
42 from rhodecode.lib.base import BaseRepoController, render
43 from rhodecode.lib.ext_json import json
43 from rhodecode.lib.ext_json import json
44 from rhodecode.lib.exceptions import AttachedForksError
44 from rhodecode.lib.exceptions import AttachedForksError
45 from rhodecode.lib.utils import action_logger, repo_name_slug, jsonify
45 from rhodecode.lib.utils import action_logger, repo_name_slug, jsonify
46 from rhodecode.lib.utils2 import safe_int, str2bool
46 from rhodecode.lib.utils2 import safe_int, str2bool
47 from rhodecode.lib.vcs import RepositoryError
47 from rhodecode.lib.vcs import RepositoryError
48 from rhodecode.model.db import (
48 from rhodecode.model.db import (
49 User, Repository, UserFollowing, RepoGroup, RepositoryField)
49 User, Repository, UserFollowing, RepoGroup, RepositoryField)
50 from rhodecode.model.forms import (
50 from rhodecode.model.forms import (
51 RepoForm, RepoFieldForm, RepoPermsForm, RepoVcsSettingsForm,
51 RepoForm, RepoFieldForm, RepoPermsForm, RepoVcsSettingsForm,
52 IssueTrackerPatternsForm)
52 IssueTrackerPatternsForm)
53 from rhodecode.model.meta import Session
53 from rhodecode.model.meta import Session
54 from rhodecode.model.repo import RepoModel
54 from rhodecode.model.repo import RepoModel
55 from rhodecode.model.scm import ScmModel, RepoGroupList, RepoList
55 from rhodecode.model.scm import ScmModel, RepoGroupList, RepoList
56 from rhodecode.model.settings import (
56 from rhodecode.model.settings import (
57 SettingsModel, IssueTrackerSettingsModel, VcsSettingsModel,
57 SettingsModel, IssueTrackerSettingsModel, VcsSettingsModel,
58 SettingNotFound)
58 SettingNotFound)
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62
62
63 class ReposController(BaseRepoController):
63 class ReposController(BaseRepoController):
64 """
64 """
65 REST Controller styled on the Atom Publishing Protocol"""
65 REST Controller styled on the Atom Publishing Protocol"""
66 # To properly map this controller, ensure your config/routing.py
66 # To properly map this controller, ensure your config/routing.py
67 # file has a resource setup:
67 # file has a resource setup:
68 # map.resource('repo', 'repos')
68 # map.resource('repo', 'repos')
69
69
70 @LoginRequired()
70 @LoginRequired()
71 def __before__(self):
71 def __before__(self):
72 super(ReposController, self).__before__()
72 super(ReposController, self).__before__()
73
73
74 def _load_repo(self, repo_name):
74 def _load_repo(self, repo_name):
75 repo_obj = Repository.get_by_repo_name(repo_name)
75 repo_obj = Repository.get_by_repo_name(repo_name)
76
76
77 if repo_obj is None:
77 if repo_obj is None:
78 h.not_mapped_error(repo_name)
78 h.not_mapped_error(repo_name)
79 return redirect(url('repos'))
79 return redirect(url('repos'))
80
80
81 return repo_obj
81 return repo_obj
82
82
83 def __load_defaults(self, repo=None):
83 def __load_defaults(self, repo=None):
84 acl_groups = RepoGroupList(RepoGroup.query().all(),
84 acl_groups = RepoGroupList(RepoGroup.query().all(),
85 perm_set=['group.write', 'group.admin'])
85 perm_set=['group.write', 'group.admin'])
86 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
86 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
87 c.repo_groups_choices = map(lambda k: unicode(k[0]), c.repo_groups)
87 c.repo_groups_choices = map(lambda k: unicode(k[0]), c.repo_groups)
88
88
89 # in case someone no longer have a group.write access to a repository
89 # in case someone no longer have a group.write access to a repository
90 # pre fill the list with this entry, we don't care if this is the same
90 # pre fill the list with this entry, we don't care if this is the same
91 # but it will allow saving repo data properly.
91 # but it will allow saving repo data properly.
92
92
93 repo_group = None
93 repo_group = None
94 if repo:
94 if repo:
95 repo_group = repo.group
95 repo_group = repo.group
96 if repo_group and unicode(repo_group.group_id) not in c.repo_groups_choices:
96 if repo_group and unicode(repo_group.group_id) not in c.repo_groups_choices:
97 c.repo_groups_choices.append(unicode(repo_group.group_id))
97 c.repo_groups_choices.append(unicode(repo_group.group_id))
98 c.repo_groups.append(RepoGroup._generate_choice(repo_group))
98 c.repo_groups.append(RepoGroup._generate_choice(repo_group))
99
99
100 choices, c.landing_revs = ScmModel().get_repo_landing_revs()
100 choices, c.landing_revs = ScmModel().get_repo_landing_revs()
101 c.landing_revs_choices = choices
101 c.landing_revs_choices = choices
102
102
103 def __load_data(self, repo_name=None):
103 def __load_data(self, repo_name=None):
104 """
104 """
105 Load defaults settings for edit, and update
105 Load defaults settings for edit, and update
106
106
107 :param repo_name:
107 :param repo_name:
108 """
108 """
109 c.repo_info = self._load_repo(repo_name)
109 c.repo_info = self._load_repo(repo_name)
110 self.__load_defaults(c.repo_info)
110 self.__load_defaults(c.repo_info)
111
111
112 # override defaults for exact repo info here git/hg etc
112 # override defaults for exact repo info here git/hg etc
113 if not c.repository_requirements_missing:
113 if not c.repository_requirements_missing:
114 choices, c.landing_revs = ScmModel().get_repo_landing_revs(
114 choices, c.landing_revs = ScmModel().get_repo_landing_revs(
115 c.repo_info)
115 c.repo_info)
116 c.landing_revs_choices = choices
116 c.landing_revs_choices = choices
117 defaults = RepoModel()._get_defaults(repo_name)
117 defaults = RepoModel()._get_defaults(repo_name)
118
118
119 return defaults
119 return defaults
120
120
121 def _log_creation_exception(self, e, repo_name):
121 def _log_creation_exception(self, e, repo_name):
122 reason = None
122 reason = None
123 if len(e.args) == 2:
123 if len(e.args) == 2:
124 reason = e.args[1]
124 reason = e.args[1]
125
125
126 if reason == 'INVALID_CERTIFICATE':
126 if reason == 'INVALID_CERTIFICATE':
127 log.exception(
127 log.exception(
128 'Exception creating a repository: invalid certificate')
128 'Exception creating a repository: invalid certificate')
129 msg = (_('Error creating repository %s: invalid certificate')
129 msg = (_('Error creating repository %s: invalid certificate')
130 % repo_name)
130 % repo_name)
131 else:
131 else:
132 log.exception("Exception creating a repository")
132 log.exception("Exception creating a repository")
133 msg = (_('Error creating repository %s')
133 msg = (_('Error creating repository %s')
134 % repo_name)
134 % repo_name)
135
135
136 return msg
136 return msg
137
137
138 @NotAnonymous()
138 @NotAnonymous()
139 def index(self, format='html'):
139 def index(self, format='html'):
140 """GET /repos: All items in the collection"""
140 """GET /repos: All items in the collection"""
141 # url('repos')
141 # url('repos')
142
142
143 repo_list = Repository.get_all_repos()
143 repo_list = Repository.get_all_repos()
144 c.repo_list = RepoList(repo_list, perm_set=['repository.admin'])
144 c.repo_list = RepoList(repo_list, perm_set=['repository.admin'])
145 repos_data = RepoModel().get_repos_as_dict(
145 repos_data = RepoModel().get_repos_as_dict(
146 repo_list=c.repo_list, admin=True, super_user_actions=True)
146 repo_list=c.repo_list, admin=True, super_user_actions=True)
147 # json used to render the grid
147 # json used to render the grid
148 c.data = json.dumps(repos_data)
148 c.data = json.dumps(repos_data)
149
149
150 return render('admin/repos/repos.mako')
150 return render('admin/repos/repos.mako')
151
151
152 # perms check inside
152 # perms check inside
153 @NotAnonymous()
153 @NotAnonymous()
154 @auth.CSRFRequired()
154 @auth.CSRFRequired()
155 def create(self):
155 def create(self):
156 """
156 """
157 POST /repos: Create a new item"""
157 POST /repos: Create a new item"""
158 # url('repos')
158 # url('repos')
159
159
160 self.__load_defaults()
160 self.__load_defaults()
161 form_result = {}
161 form_result = {}
162 task_id = None
162 task_id = None
163 c.personal_repo_group = c.rhodecode_user.personal_repo_group
163 c.personal_repo_group = c.rhodecode_user.personal_repo_group
164 try:
164 try:
165 # CanWriteToGroup validators checks permissions of this POST
165 # CanWriteToGroup validators checks permissions of this POST
166 form_result = RepoForm(repo_groups=c.repo_groups_choices,
166 form_result = RepoForm(repo_groups=c.repo_groups_choices,
167 landing_revs=c.landing_revs_choices)()\
167 landing_revs=c.landing_revs_choices)()\
168 .to_python(dict(request.POST))
168 .to_python(dict(request.POST))
169
169
170 # create is done sometimes async on celery, db transaction
170 # create is done sometimes async on celery, db transaction
171 # management is handled there.
171 # management is handled there.
172 task = RepoModel().create(form_result, c.rhodecode_user.user_id)
172 task = RepoModel().create(form_result, c.rhodecode_user.user_id)
173 from celery.result import BaseAsyncResult
173 from celery.result import BaseAsyncResult
174 if isinstance(task, BaseAsyncResult):
174 if isinstance(task, BaseAsyncResult):
175 task_id = task.task_id
175 task_id = task.task_id
176 except formencode.Invalid as errors:
176 except formencode.Invalid as errors:
177 return htmlfill.render(
177 return htmlfill.render(
178 render('admin/repos/repo_add.mako'),
178 render('admin/repos/repo_add.mako'),
179 defaults=errors.value,
179 defaults=errors.value,
180 errors=errors.error_dict or {},
180 errors=errors.error_dict or {},
181 prefix_error=False,
181 prefix_error=False,
182 encoding="UTF-8",
182 encoding="UTF-8",
183 force_defaults=False)
183 force_defaults=False)
184
184
185 except Exception as e:
185 except Exception as e:
186 msg = self._log_creation_exception(e, form_result.get('repo_name'))
186 msg = self._log_creation_exception(e, form_result.get('repo_name'))
187 h.flash(msg, category='error')
187 h.flash(msg, category='error')
188 return redirect(h.route_path('home'))
188 return redirect(h.route_path('home'))
189
189
190 return redirect(h.url('repo_creating_home',
190 return redirect(h.url('repo_creating_home',
191 repo_name=form_result['repo_name_full'],
191 repo_name=form_result['repo_name_full'],
192 task_id=task_id))
192 task_id=task_id))
193
193
194 # perms check inside
194 # perms check inside
195 @NotAnonymous()
195 @NotAnonymous()
196 def create_repository(self):
196 def create_repository(self):
197 """GET /_admin/create_repository: Form to create a new item"""
197 """GET /_admin/create_repository: Form to create a new item"""
198 new_repo = request.GET.get('repo', '')
198 new_repo = request.GET.get('repo', '')
199 parent_group = safe_int(request.GET.get('parent_group'))
199 parent_group = safe_int(request.GET.get('parent_group'))
200 _gr = RepoGroup.get(parent_group)
200 _gr = RepoGroup.get(parent_group)
201
201
202 if not HasPermissionAny('hg.admin', 'hg.create.repository')():
202 if not HasPermissionAny('hg.admin', 'hg.create.repository')():
203 # you're not super admin nor have global create permissions,
203 # you're not super admin nor have global create permissions,
204 # but maybe you have at least write permission to a parent group ?
204 # but maybe you have at least write permission to a parent group ?
205
205
206 gr_name = _gr.group_name if _gr else None
206 gr_name = _gr.group_name if _gr else None
207 # create repositories with write permission on group is set to true
207 # create repositories with write permission on group is set to true
208 create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')()
208 create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')()
209 group_admin = HasRepoGroupPermissionAny('group.admin')(group_name=gr_name)
209 group_admin = HasRepoGroupPermissionAny('group.admin')(group_name=gr_name)
210 group_write = HasRepoGroupPermissionAny('group.write')(group_name=gr_name)
210 group_write = HasRepoGroupPermissionAny('group.write')(group_name=gr_name)
211 if not (group_admin or (group_write and create_on_write)):
211 if not (group_admin or (group_write and create_on_write)):
212 raise HTTPForbidden
212 raise HTTPForbidden
213
213
214 acl_groups = RepoGroupList(RepoGroup.query().all(),
214 acl_groups = RepoGroupList(RepoGroup.query().all(),
215 perm_set=['group.write', 'group.admin'])
215 perm_set=['group.write', 'group.admin'])
216 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
216 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
217 c.repo_groups_choices = map(lambda k: unicode(k[0]), c.repo_groups)
217 c.repo_groups_choices = map(lambda k: unicode(k[0]), c.repo_groups)
218 choices, c.landing_revs = ScmModel().get_repo_landing_revs()
218 choices, c.landing_revs = ScmModel().get_repo_landing_revs()
219 c.personal_repo_group = c.rhodecode_user.personal_repo_group
219 c.personal_repo_group = c.rhodecode_user.personal_repo_group
220 c.new_repo = repo_name_slug(new_repo)
220 c.new_repo = repo_name_slug(new_repo)
221
221
222 # apply the defaults from defaults page
222 # apply the defaults from defaults page
223 defaults = SettingsModel().get_default_repo_settings(strip_prefix=True)
223 defaults = SettingsModel().get_default_repo_settings(strip_prefix=True)
224 # set checkbox to autochecked
224 # set checkbox to autochecked
225 defaults['repo_copy_permissions'] = True
225 defaults['repo_copy_permissions'] = True
226
226
227 parent_group_choice = '-1'
227 parent_group_choice = '-1'
228 if not c.rhodecode_user.is_admin and c.rhodecode_user.personal_repo_group:
228 if not c.rhodecode_user.is_admin and c.rhodecode_user.personal_repo_group:
229 parent_group_choice = c.rhodecode_user.personal_repo_group
229 parent_group_choice = c.rhodecode_user.personal_repo_group
230
230
231 if parent_group and _gr:
231 if parent_group and _gr:
232 if parent_group in [x[0] for x in c.repo_groups]:
232 if parent_group in [x[0] for x in c.repo_groups]:
233 parent_group_choice = unicode(parent_group)
233 parent_group_choice = unicode(parent_group)
234
234
235 defaults.update({'repo_group': parent_group_choice})
235 defaults.update({'repo_group': parent_group_choice})
236
236
237 return htmlfill.render(
237 return htmlfill.render(
238 render('admin/repos/repo_add.mako'),
238 render('admin/repos/repo_add.mako'),
239 defaults=defaults,
239 defaults=defaults,
240 errors={},
240 errors={},
241 prefix_error=False,
241 prefix_error=False,
242 encoding="UTF-8",
242 encoding="UTF-8",
243 force_defaults=False
243 force_defaults=False
244 )
244 )
245
245
246 @NotAnonymous()
246 @NotAnonymous()
247 def repo_creating(self, repo_name):
247 def repo_creating(self, repo_name):
248 c.repo = repo_name
248 c.repo = repo_name
249 c.task_id = request.GET.get('task_id')
249 c.task_id = request.GET.get('task_id')
250 if not c.repo:
250 if not c.repo:
251 raise HTTPNotFound()
251 raise HTTPNotFound()
252 return render('admin/repos/repo_creating.mako')
252 return render('admin/repos/repo_creating.mako')
253
253
254 @NotAnonymous()
254 @NotAnonymous()
255 @jsonify
255 @jsonify
256 def repo_check(self, repo_name):
256 def repo_check(self, repo_name):
257 c.repo = repo_name
257 c.repo = repo_name
258 task_id = request.GET.get('task_id')
258 task_id = request.GET.get('task_id')
259
259
260 if task_id and task_id not in ['None']:
260 if task_id and task_id not in ['None']:
261 import rhodecode
261 import rhodecode
262 from celery.result import AsyncResult
262 from celery.result import AsyncResult
263 if rhodecode.CELERY_ENABLED:
263 if rhodecode.CELERY_ENABLED:
264 task = AsyncResult(task_id)
264 task = AsyncResult(task_id)
265 if task.failed():
265 if task.failed():
266 msg = self._log_creation_exception(task.result, c.repo)
266 msg = self._log_creation_exception(task.result, c.repo)
267 h.flash(msg, category='error')
267 h.flash(msg, category='error')
268 return redirect(h.route_path('home'), code=501)
268 return redirect(h.route_path('home'), code=501)
269
269
270 repo = Repository.get_by_repo_name(repo_name)
270 repo = Repository.get_by_repo_name(repo_name)
271 if repo and repo.repo_state == Repository.STATE_CREATED:
271 if repo and repo.repo_state == Repository.STATE_CREATED:
272 if repo.clone_uri:
272 if repo.clone_uri:
273 clone_uri = repo.clone_uri_hidden
273 clone_uri = repo.clone_uri_hidden
274 h.flash(_('Created repository %s from %s')
274 h.flash(_('Created repository %s from %s')
275 % (repo.repo_name, clone_uri), category='success')
275 % (repo.repo_name, clone_uri), category='success')
276 else:
276 else:
277 repo_url = h.link_to(repo.repo_name,
277 repo_url = h.link_to(
278 h.url('summary_home',
278 repo.repo_name,
279 repo_name=repo.repo_name))
279 h.route_path('repo_summary',repo_name=repo.repo_name))
280 fork = repo.fork
280 fork = repo.fork
281 if fork:
281 if fork:
282 fork_name = fork.repo_name
282 fork_name = fork.repo_name
283 h.flash(h.literal(_('Forked repository %s as %s')
283 h.flash(h.literal(_('Forked repository %s as %s')
284 % (fork_name, repo_url)), category='success')
284 % (fork_name, repo_url)), category='success')
285 else:
285 else:
286 h.flash(h.literal(_('Created repository %s') % repo_url),
286 h.flash(h.literal(_('Created repository %s') % repo_url),
287 category='success')
287 category='success')
288 return {'result': True}
288 return {'result': True}
289 return {'result': False}
289 return {'result': False}
290
290
291 @HasPermissionAllDecorator('hg.admin')
291 @HasPermissionAllDecorator('hg.admin')
292 def show(self, repo_name, format='html'):
292 def show(self, repo_name, format='html'):
293 """GET /repos/repo_name: Show a specific item"""
293 """GET /repos/repo_name: Show a specific item"""
294 # url('repo', repo_name=ID)
294 # url('repo', repo_name=ID)
295
295
296 @HasRepoPermissionAllDecorator('repository.admin')
296 @HasRepoPermissionAllDecorator('repository.admin')
297 def edit_fields(self, repo_name):
297 def edit_fields(self, repo_name):
298 """GET /repo_name/settings: Form to edit an existing item"""
298 """GET /repo_name/settings: Form to edit an existing item"""
299 c.repo_info = self._load_repo(repo_name)
299 c.repo_info = self._load_repo(repo_name)
300 c.repo_fields = RepositoryField.query()\
300 c.repo_fields = RepositoryField.query()\
301 .filter(RepositoryField.repository == c.repo_info).all()
301 .filter(RepositoryField.repository == c.repo_info).all()
302 c.active = 'fields'
302 c.active = 'fields'
303 if request.POST:
303 if request.POST:
304
304
305 return redirect(url('repo_edit_fields'))
305 return redirect(url('repo_edit_fields'))
306 return render('admin/repos/repo_edit.mako')
306 return render('admin/repos/repo_edit.mako')
307
307
308 @HasRepoPermissionAllDecorator('repository.admin')
308 @HasRepoPermissionAllDecorator('repository.admin')
309 @auth.CSRFRequired()
309 @auth.CSRFRequired()
310 def create_repo_field(self, repo_name):
310 def create_repo_field(self, repo_name):
311 try:
311 try:
312 form_result = RepoFieldForm()().to_python(dict(request.POST))
312 form_result = RepoFieldForm()().to_python(dict(request.POST))
313 RepoModel().add_repo_field(
313 RepoModel().add_repo_field(
314 repo_name, form_result['new_field_key'],
314 repo_name, form_result['new_field_key'],
315 field_type=form_result['new_field_type'],
315 field_type=form_result['new_field_type'],
316 field_value=form_result['new_field_value'],
316 field_value=form_result['new_field_value'],
317 field_label=form_result['new_field_label'],
317 field_label=form_result['new_field_label'],
318 field_desc=form_result['new_field_desc'])
318 field_desc=form_result['new_field_desc'])
319
319
320 Session().commit()
320 Session().commit()
321 except Exception as e:
321 except Exception as e:
322 log.exception("Exception creating field")
322 log.exception("Exception creating field")
323 msg = _('An error occurred during creation of field')
323 msg = _('An error occurred during creation of field')
324 if isinstance(e, formencode.Invalid):
324 if isinstance(e, formencode.Invalid):
325 msg += ". " + e.msg
325 msg += ". " + e.msg
326 h.flash(msg, category='error')
326 h.flash(msg, category='error')
327 return redirect(url('edit_repo_fields', repo_name=repo_name))
327 return redirect(url('edit_repo_fields', repo_name=repo_name))
328
328
329 @HasRepoPermissionAllDecorator('repository.admin')
329 @HasRepoPermissionAllDecorator('repository.admin')
330 @auth.CSRFRequired()
330 @auth.CSRFRequired()
331 def delete_repo_field(self, repo_name, field_id):
331 def delete_repo_field(self, repo_name, field_id):
332 field = RepositoryField.get_or_404(field_id)
332 field = RepositoryField.get_or_404(field_id)
333 try:
333 try:
334 RepoModel().delete_repo_field(repo_name, field.field_key)
334 RepoModel().delete_repo_field(repo_name, field.field_key)
335 Session().commit()
335 Session().commit()
336 except Exception as e:
336 except Exception as e:
337 log.exception("Exception during removal of field")
337 log.exception("Exception during removal of field")
338 msg = _('An error occurred during removal of field')
338 msg = _('An error occurred during removal of field')
339 h.flash(msg, category='error')
339 h.flash(msg, category='error')
340 return redirect(url('edit_repo_fields', repo_name=repo_name))
340 return redirect(url('edit_repo_fields', repo_name=repo_name))
341
341
342 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
342 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
343 @auth.CSRFRequired()
343 @auth.CSRFRequired()
344 def toggle_locking(self, repo_name):
344 def toggle_locking(self, repo_name):
345 """
345 """
346 Toggle locking of repository by simple GET call to url
346 Toggle locking of repository by simple GET call to url
347
347
348 :param repo_name:
348 :param repo_name:
349 """
349 """
350
350
351 try:
351 try:
352 repo = Repository.get_by_repo_name(repo_name)
352 repo = Repository.get_by_repo_name(repo_name)
353
353
354 if repo.enable_locking:
354 if repo.enable_locking:
355 if repo.locked[0]:
355 if repo.locked[0]:
356 Repository.unlock(repo)
356 Repository.unlock(repo)
357 action = _('Unlocked')
357 action = _('Unlocked')
358 else:
358 else:
359 Repository.lock(repo, c.rhodecode_user.user_id,
359 Repository.lock(repo, c.rhodecode_user.user_id,
360 lock_reason=Repository.LOCK_WEB)
360 lock_reason=Repository.LOCK_WEB)
361 action = _('Locked')
361 action = _('Locked')
362
362
363 h.flash(_('Repository has been %s') % action,
363 h.flash(_('Repository has been %s') % action,
364 category='success')
364 category='success')
365 except Exception:
365 except Exception:
366 log.exception("Exception during unlocking")
366 log.exception("Exception during unlocking")
367 h.flash(_('An error occurred during unlocking'),
367 h.flash(_('An error occurred during unlocking'),
368 category='error')
368 category='error')
369 return redirect(url('summary_home', repo_name=repo_name))
369 return redirect(h.route_path('repo_summary', repo_name=repo_name))
370
370
371 @HasRepoPermissionAllDecorator('repository.admin')
371 @HasRepoPermissionAllDecorator('repository.admin')
372 @auth.CSRFRequired()
372 @auth.CSRFRequired()
373 def edit_remote(self, repo_name):
373 def edit_remote(self, repo_name):
374 """PUT /{repo_name}/settings/remote: edit the repo remote."""
374 """PUT /{repo_name}/settings/remote: edit the repo remote."""
375 try:
375 try:
376 ScmModel().pull_changes(repo_name, c.rhodecode_user.username)
376 ScmModel().pull_changes(repo_name, c.rhodecode_user.username)
377 h.flash(_('Pulled from remote location'), category='success')
377 h.flash(_('Pulled from remote location'), category='success')
378 except Exception:
378 except Exception:
379 log.exception("Exception during pull from remote")
379 log.exception("Exception during pull from remote")
380 h.flash(_('An error occurred during pull from remote location'),
380 h.flash(_('An error occurred during pull from remote location'),
381 category='error')
381 category='error')
382 return redirect(url('edit_repo_remote', repo_name=c.repo_name))
382 return redirect(url('edit_repo_remote', repo_name=c.repo_name))
383
383
384 @HasRepoPermissionAllDecorator('repository.admin')
384 @HasRepoPermissionAllDecorator('repository.admin')
385 def edit_remote_form(self, repo_name):
385 def edit_remote_form(self, repo_name):
386 """GET /repo_name/settings: Form to edit an existing item"""
386 """GET /repo_name/settings: Form to edit an existing item"""
387 c.repo_info = self._load_repo(repo_name)
387 c.repo_info = self._load_repo(repo_name)
388 c.active = 'remote'
388 c.active = 'remote'
389
389
390 return render('admin/repos/repo_edit.mako')
390 return render('admin/repos/repo_edit.mako')
391
391
392 @HasRepoPermissionAllDecorator('repository.admin')
392 @HasRepoPermissionAllDecorator('repository.admin')
393 @auth.CSRFRequired()
393 @auth.CSRFRequired()
394 def edit_statistics(self, repo_name):
394 def edit_statistics(self, repo_name):
395 """PUT /{repo_name}/settings/statistics: reset the repo statistics."""
395 """PUT /{repo_name}/settings/statistics: reset the repo statistics."""
396 try:
396 try:
397 RepoModel().delete_stats(repo_name)
397 RepoModel().delete_stats(repo_name)
398 Session().commit()
398 Session().commit()
399 except Exception as e:
399 except Exception as e:
400 log.error(traceback.format_exc())
400 log.error(traceback.format_exc())
401 h.flash(_('An error occurred during deletion of repository stats'),
401 h.flash(_('An error occurred during deletion of repository stats'),
402 category='error')
402 category='error')
403 return redirect(url('edit_repo_statistics', repo_name=c.repo_name))
403 return redirect(url('edit_repo_statistics', repo_name=c.repo_name))
404
404
405 @HasRepoPermissionAllDecorator('repository.admin')
405 @HasRepoPermissionAllDecorator('repository.admin')
406 def edit_statistics_form(self, repo_name):
406 def edit_statistics_form(self, repo_name):
407 """GET /repo_name/settings: Form to edit an existing item"""
407 """GET /repo_name/settings: Form to edit an existing item"""
408 c.repo_info = self._load_repo(repo_name)
408 c.repo_info = self._load_repo(repo_name)
409 repo = c.repo_info.scm_instance()
409 repo = c.repo_info.scm_instance()
410
410
411 if c.repo_info.stats:
411 if c.repo_info.stats:
412 # this is on what revision we ended up so we add +1 for count
412 # this is on what revision we ended up so we add +1 for count
413 last_rev = c.repo_info.stats.stat_on_revision + 1
413 last_rev = c.repo_info.stats.stat_on_revision + 1
414 else:
414 else:
415 last_rev = 0
415 last_rev = 0
416 c.stats_revision = last_rev
416 c.stats_revision = last_rev
417
417
418 c.repo_last_rev = repo.count()
418 c.repo_last_rev = repo.count()
419
419
420 if last_rev == 0 or c.repo_last_rev == 0:
420 if last_rev == 0 or c.repo_last_rev == 0:
421 c.stats_percentage = 0
421 c.stats_percentage = 0
422 else:
422 else:
423 c.stats_percentage = '%.2f' % ((float((last_rev)) / c.repo_last_rev) * 100)
423 c.stats_percentage = '%.2f' % ((float((last_rev)) / c.repo_last_rev) * 100)
424
424
425 c.active = 'statistics'
425 c.active = 'statistics'
426
426
427 return render('admin/repos/repo_edit.mako')
427 return render('admin/repos/repo_edit.mako')
428
428
429 @HasRepoPermissionAllDecorator('repository.admin')
429 @HasRepoPermissionAllDecorator('repository.admin')
430 @auth.CSRFRequired()
430 @auth.CSRFRequired()
431 def repo_issuetracker_test(self, repo_name):
431 def repo_issuetracker_test(self, repo_name):
432 if request.is_xhr:
432 if request.is_xhr:
433 return h.urlify_commit_message(
433 return h.urlify_commit_message(
434 request.POST.get('test_text', ''),
434 request.POST.get('test_text', ''),
435 repo_name)
435 repo_name)
436 else:
436 else:
437 raise HTTPBadRequest()
437 raise HTTPBadRequest()
438
438
439 @HasRepoPermissionAllDecorator('repository.admin')
439 @HasRepoPermissionAllDecorator('repository.admin')
440 @auth.CSRFRequired()
440 @auth.CSRFRequired()
441 def repo_issuetracker_delete(self, repo_name):
441 def repo_issuetracker_delete(self, repo_name):
442 uid = request.POST.get('uid')
442 uid = request.POST.get('uid')
443 repo_settings = IssueTrackerSettingsModel(repo=repo_name)
443 repo_settings = IssueTrackerSettingsModel(repo=repo_name)
444 try:
444 try:
445 repo_settings.delete_entries(uid)
445 repo_settings.delete_entries(uid)
446 except Exception:
446 except Exception:
447 h.flash(_('Error occurred during deleting issue tracker entry'),
447 h.flash(_('Error occurred during deleting issue tracker entry'),
448 category='error')
448 category='error')
449 else:
449 else:
450 h.flash(_('Removed issue tracker entry'), category='success')
450 h.flash(_('Removed issue tracker entry'), category='success')
451 return redirect(url('repo_settings_issuetracker',
451 return redirect(url('repo_settings_issuetracker',
452 repo_name=repo_name))
452 repo_name=repo_name))
453
453
454 def _update_patterns(self, form, repo_settings):
454 def _update_patterns(self, form, repo_settings):
455 for uid in form['delete_patterns']:
455 for uid in form['delete_patterns']:
456 repo_settings.delete_entries(uid)
456 repo_settings.delete_entries(uid)
457
457
458 for pattern in form['patterns']:
458 for pattern in form['patterns']:
459 for setting, value, type_ in pattern:
459 for setting, value, type_ in pattern:
460 sett = repo_settings.create_or_update_setting(
460 sett = repo_settings.create_or_update_setting(
461 setting, value, type_)
461 setting, value, type_)
462 Session().add(sett)
462 Session().add(sett)
463
463
464 Session().commit()
464 Session().commit()
465
465
466 @HasRepoPermissionAllDecorator('repository.admin')
466 @HasRepoPermissionAllDecorator('repository.admin')
467 @auth.CSRFRequired()
467 @auth.CSRFRequired()
468 def repo_issuetracker_save(self, repo_name):
468 def repo_issuetracker_save(self, repo_name):
469 # Save inheritance
469 # Save inheritance
470 repo_settings = IssueTrackerSettingsModel(repo=repo_name)
470 repo_settings = IssueTrackerSettingsModel(repo=repo_name)
471 inherited = (request.POST.get('inherit_global_issuetracker')
471 inherited = (request.POST.get('inherit_global_issuetracker')
472 == "inherited")
472 == "inherited")
473 repo_settings.inherit_global_settings = inherited
473 repo_settings.inherit_global_settings = inherited
474 Session().commit()
474 Session().commit()
475
475
476 form = IssueTrackerPatternsForm()().to_python(request.POST)
476 form = IssueTrackerPatternsForm()().to_python(request.POST)
477 if form:
477 if form:
478 self._update_patterns(form, repo_settings)
478 self._update_patterns(form, repo_settings)
479
479
480 h.flash(_('Updated issue tracker entries'), category='success')
480 h.flash(_('Updated issue tracker entries'), category='success')
481 return redirect(url('repo_settings_issuetracker',
481 return redirect(url('repo_settings_issuetracker',
482 repo_name=repo_name))
482 repo_name=repo_name))
483
483
484 @HasRepoPermissionAllDecorator('repository.admin')
484 @HasRepoPermissionAllDecorator('repository.admin')
485 def repo_issuetracker(self, repo_name):
485 def repo_issuetracker(self, repo_name):
486 """GET /admin/settings/issue-tracker: All items in the collection"""
486 """GET /admin/settings/issue-tracker: All items in the collection"""
487 c.active = 'issuetracker'
487 c.active = 'issuetracker'
488 c.data = 'data'
488 c.data = 'data'
489 c.repo_info = self._load_repo(repo_name)
489 c.repo_info = self._load_repo(repo_name)
490
490
491 repo = Repository.get_by_repo_name(repo_name)
491 repo = Repository.get_by_repo_name(repo_name)
492 c.settings_model = IssueTrackerSettingsModel(repo=repo)
492 c.settings_model = IssueTrackerSettingsModel(repo=repo)
493 c.global_patterns = c.settings_model.get_global_settings()
493 c.global_patterns = c.settings_model.get_global_settings()
494 c.repo_patterns = c.settings_model.get_repo_settings()
494 c.repo_patterns = c.settings_model.get_repo_settings()
495
495
496 return render('admin/repos/repo_edit.mako')
496 return render('admin/repos/repo_edit.mako')
497
497
498 @HasRepoPermissionAllDecorator('repository.admin')
498 @HasRepoPermissionAllDecorator('repository.admin')
499 def repo_settings_vcs(self, repo_name):
499 def repo_settings_vcs(self, repo_name):
500 """GET /{repo_name}/settings/vcs/: All items in the collection"""
500 """GET /{repo_name}/settings/vcs/: All items in the collection"""
501
501
502 model = VcsSettingsModel(repo=repo_name)
502 model = VcsSettingsModel(repo=repo_name)
503
503
504 c.active = 'vcs'
504 c.active = 'vcs'
505 c.global_svn_branch_patterns = model.get_global_svn_branch_patterns()
505 c.global_svn_branch_patterns = model.get_global_svn_branch_patterns()
506 c.global_svn_tag_patterns = model.get_global_svn_tag_patterns()
506 c.global_svn_tag_patterns = model.get_global_svn_tag_patterns()
507 c.svn_branch_patterns = model.get_repo_svn_branch_patterns()
507 c.svn_branch_patterns = model.get_repo_svn_branch_patterns()
508 c.svn_tag_patterns = model.get_repo_svn_tag_patterns()
508 c.svn_tag_patterns = model.get_repo_svn_tag_patterns()
509 c.repo_info = self._load_repo(repo_name)
509 c.repo_info = self._load_repo(repo_name)
510 defaults = self._vcs_form_defaults(repo_name)
510 defaults = self._vcs_form_defaults(repo_name)
511 c.inherit_global_settings = defaults['inherit_global_settings']
511 c.inherit_global_settings = defaults['inherit_global_settings']
512 c.labs_active = str2bool(
512 c.labs_active = str2bool(
513 rhodecode.CONFIG.get('labs_settings_active', 'true'))
513 rhodecode.CONFIG.get('labs_settings_active', 'true'))
514
514
515 return htmlfill.render(
515 return htmlfill.render(
516 render('admin/repos/repo_edit.mako'),
516 render('admin/repos/repo_edit.mako'),
517 defaults=defaults,
517 defaults=defaults,
518 encoding="UTF-8",
518 encoding="UTF-8",
519 force_defaults=False)
519 force_defaults=False)
520
520
521 @HasRepoPermissionAllDecorator('repository.admin')
521 @HasRepoPermissionAllDecorator('repository.admin')
522 @auth.CSRFRequired()
522 @auth.CSRFRequired()
523 def repo_settings_vcs_update(self, repo_name):
523 def repo_settings_vcs_update(self, repo_name):
524 """POST /{repo_name}/settings/vcs/: All items in the collection"""
524 """POST /{repo_name}/settings/vcs/: All items in the collection"""
525 c.active = 'vcs'
525 c.active = 'vcs'
526
526
527 model = VcsSettingsModel(repo=repo_name)
527 model = VcsSettingsModel(repo=repo_name)
528 c.global_svn_branch_patterns = model.get_global_svn_branch_patterns()
528 c.global_svn_branch_patterns = model.get_global_svn_branch_patterns()
529 c.global_svn_tag_patterns = model.get_global_svn_tag_patterns()
529 c.global_svn_tag_patterns = model.get_global_svn_tag_patterns()
530 c.svn_branch_patterns = model.get_repo_svn_branch_patterns()
530 c.svn_branch_patterns = model.get_repo_svn_branch_patterns()
531 c.svn_tag_patterns = model.get_repo_svn_tag_patterns()
531 c.svn_tag_patterns = model.get_repo_svn_tag_patterns()
532 c.repo_info = self._load_repo(repo_name)
532 c.repo_info = self._load_repo(repo_name)
533 defaults = self._vcs_form_defaults(repo_name)
533 defaults = self._vcs_form_defaults(repo_name)
534 c.inherit_global_settings = defaults['inherit_global_settings']
534 c.inherit_global_settings = defaults['inherit_global_settings']
535
535
536 application_form = RepoVcsSettingsForm(repo_name)()
536 application_form = RepoVcsSettingsForm(repo_name)()
537 try:
537 try:
538 form_result = application_form.to_python(dict(request.POST))
538 form_result = application_form.to_python(dict(request.POST))
539 except formencode.Invalid as errors:
539 except formencode.Invalid as errors:
540 h.flash(
540 h.flash(
541 _("Some form inputs contain invalid data."),
541 _("Some form inputs contain invalid data."),
542 category='error')
542 category='error')
543 return htmlfill.render(
543 return htmlfill.render(
544 render('admin/repos/repo_edit.mako'),
544 render('admin/repos/repo_edit.mako'),
545 defaults=errors.value,
545 defaults=errors.value,
546 errors=errors.error_dict or {},
546 errors=errors.error_dict or {},
547 prefix_error=False,
547 prefix_error=False,
548 encoding="UTF-8",
548 encoding="UTF-8",
549 force_defaults=False
549 force_defaults=False
550 )
550 )
551
551
552 try:
552 try:
553 inherit_global_settings = form_result['inherit_global_settings']
553 inherit_global_settings = form_result['inherit_global_settings']
554 model.create_or_update_repo_settings(
554 model.create_or_update_repo_settings(
555 form_result, inherit_global_settings=inherit_global_settings)
555 form_result, inherit_global_settings=inherit_global_settings)
556 except Exception:
556 except Exception:
557 log.exception("Exception while updating settings")
557 log.exception("Exception while updating settings")
558 h.flash(
558 h.flash(
559 _('Error occurred during updating repository VCS settings'),
559 _('Error occurred during updating repository VCS settings'),
560 category='error')
560 category='error')
561 else:
561 else:
562 Session().commit()
562 Session().commit()
563 h.flash(_('Updated VCS settings'), category='success')
563 h.flash(_('Updated VCS settings'), category='success')
564 return redirect(url('repo_vcs_settings', repo_name=repo_name))
564 return redirect(url('repo_vcs_settings', repo_name=repo_name))
565
565
566 return htmlfill.render(
566 return htmlfill.render(
567 render('admin/repos/repo_edit.mako'),
567 render('admin/repos/repo_edit.mako'),
568 defaults=self._vcs_form_defaults(repo_name),
568 defaults=self._vcs_form_defaults(repo_name),
569 encoding="UTF-8",
569 encoding="UTF-8",
570 force_defaults=False)
570 force_defaults=False)
571
571
572 @HasRepoPermissionAllDecorator('repository.admin')
572 @HasRepoPermissionAllDecorator('repository.admin')
573 @auth.CSRFRequired()
573 @auth.CSRFRequired()
574 @jsonify
574 @jsonify
575 def repo_delete_svn_pattern(self, repo_name):
575 def repo_delete_svn_pattern(self, repo_name):
576 if not request.is_xhr:
576 if not request.is_xhr:
577 return False
577 return False
578
578
579 delete_pattern_id = request.POST.get('delete_svn_pattern')
579 delete_pattern_id = request.POST.get('delete_svn_pattern')
580 model = VcsSettingsModel(repo=repo_name)
580 model = VcsSettingsModel(repo=repo_name)
581 try:
581 try:
582 model.delete_repo_svn_pattern(delete_pattern_id)
582 model.delete_repo_svn_pattern(delete_pattern_id)
583 except SettingNotFound:
583 except SettingNotFound:
584 raise HTTPBadRequest()
584 raise HTTPBadRequest()
585
585
586 Session().commit()
586 Session().commit()
587 return True
587 return True
588
588
589 def _vcs_form_defaults(self, repo_name):
589 def _vcs_form_defaults(self, repo_name):
590 model = VcsSettingsModel(repo=repo_name)
590 model = VcsSettingsModel(repo=repo_name)
591 global_defaults = model.get_global_settings()
591 global_defaults = model.get_global_settings()
592
592
593 repo_defaults = {}
593 repo_defaults = {}
594 repo_defaults.update(global_defaults)
594 repo_defaults.update(global_defaults)
595 repo_defaults.update(model.get_repo_settings())
595 repo_defaults.update(model.get_repo_settings())
596
596
597 global_defaults = {
597 global_defaults = {
598 '{}_inherited'.format(k): global_defaults[k]
598 '{}_inherited'.format(k): global_defaults[k]
599 for k in global_defaults}
599 for k in global_defaults}
600
600
601 defaults = {
601 defaults = {
602 'inherit_global_settings': model.inherit_global_settings
602 'inherit_global_settings': model.inherit_global_settings
603 }
603 }
604 defaults.update(global_defaults)
604 defaults.update(global_defaults)
605 defaults.update(repo_defaults)
605 defaults.update(repo_defaults)
606 defaults.update({
606 defaults.update({
607 'new_svn_branch': '',
607 'new_svn_branch': '',
608 'new_svn_tag': '',
608 'new_svn_tag': '',
609 })
609 })
610 return defaults
610 return defaults
@@ -1,290 +1,260 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 changelog controller for rhodecode
22 changelog controller for rhodecode
23 """
23 """
24
24
25 import logging
25 import logging
26
26
27 from pylons import request, url, session, tmpl_context as c
27 from pylons import request, url, session, tmpl_context as c
28 from pylons.controllers.util import redirect
28 from pylons.controllers.util import redirect
29 from pylons.i18n.translation import _
29 from pylons.i18n.translation import _
30 from webob.exc import HTTPNotFound, HTTPBadRequest
30 from webob.exc import HTTPNotFound, HTTPBadRequest
31
31
32 import rhodecode.lib.helpers as h
32 import rhodecode.lib.helpers as h
33 from rhodecode.lib.auth import (
33 from rhodecode.lib.auth import (
34 LoginRequired, HasRepoPermissionAnyDecorator, XHRRequired)
34 LoginRequired, HasRepoPermissionAnyDecorator, XHRRequired)
35 from rhodecode.lib.base import BaseRepoController, render
35 from rhodecode.lib.base import BaseRepoController, render
36 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.graphmod import _colored, _dagwalker
37 from rhodecode.lib.graphmod import _colored, _dagwalker
38 from rhodecode.lib.helpers import RepoPage
38 from rhodecode.lib.helpers import RepoPage
39 from rhodecode.lib.utils2 import safe_int, safe_str
39 from rhodecode.lib.utils2 import safe_int, safe_str
40 from rhodecode.lib.vcs.exceptions import (
40 from rhodecode.lib.vcs.exceptions import (
41 RepositoryError, CommitDoesNotExistError,
41 RepositoryError, CommitDoesNotExistError,
42 CommitError, NodeDoesNotExistError, EmptyRepositoryError)
42 CommitError, NodeDoesNotExistError, EmptyRepositoryError)
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46 DEFAULT_CHANGELOG_SIZE = 20
46 DEFAULT_CHANGELOG_SIZE = 20
47
47
48
48
49 def _load_changelog_summary():
50 p = safe_int(request.GET.get('page'), 1)
51 size = safe_int(request.GET.get('size'), 10)
52
53 def url_generator(**kw):
54 return url('summary_home',
55 repo_name=c.rhodecode_db_repo.repo_name, size=size, **kw)
56
57 pre_load = ['author', 'branch', 'date', 'message']
58 try:
59 collection = c.rhodecode_repo.get_commits(pre_load=pre_load)
60 except EmptyRepositoryError:
61 collection = c.rhodecode_repo
62
63 c.repo_commits = RepoPage(
64 collection, page=p, items_per_page=size, url=url_generator)
65 page_ids = [x.raw_id for x in c.repo_commits]
66 c.comments = c.rhodecode_db_repo.get_comments(page_ids)
67 c.statuses = c.rhodecode_db_repo.statuses(page_ids)
68
69
70 class ChangelogController(BaseRepoController):
49 class ChangelogController(BaseRepoController):
71
50
72 def __before__(self):
51 def __before__(self):
73 super(ChangelogController, self).__before__()
52 super(ChangelogController, self).__before__()
74 c.affected_files_cut_off = 60
53 c.affected_files_cut_off = 60
75
54
76 def __get_commit_or_redirect(
55 def __get_commit_or_redirect(
77 self, commit_id, repo, redirect_after=True, partial=False):
56 self, commit_id, repo, redirect_after=True, partial=False):
78 """
57 """
79 This is a safe way to get a commit. If an error occurs it
58 This is a safe way to get a commit. If an error occurs it
80 redirects to a commit with a proper message. If partial is set
59 redirects to a commit with a proper message. If partial is set
81 then it does not do redirect raise and throws an exception instead.
60 then it does not do redirect raise and throws an exception instead.
82
61
83 :param commit_id: commit to fetch
62 :param commit_id: commit to fetch
84 :param repo: repo instance
63 :param repo: repo instance
85 """
64 """
86 try:
65 try:
87 return c.rhodecode_repo.get_commit(commit_id)
66 return c.rhodecode_repo.get_commit(commit_id)
88 except EmptyRepositoryError:
67 except EmptyRepositoryError:
89 if not redirect_after:
68 if not redirect_after:
90 return None
69 return None
91 h.flash(h.literal(_('There are no commits yet')),
70 h.flash(h.literal(_('There are no commits yet')),
92 category='warning')
71 category='warning')
93 redirect(url('changelog_home', repo_name=repo.repo_name))
72 redirect(url('changelog_home', repo_name=repo.repo_name))
94 except RepositoryError as e:
73 except RepositoryError as e:
95 msg = safe_str(e)
74 msg = safe_str(e)
96 log.exception(msg)
75 log.exception(msg)
97 h.flash(msg, category='warning')
76 h.flash(msg, category='warning')
98 if not partial:
77 if not partial:
99 redirect(h.url('changelog_home', repo_name=repo.repo_name))
78 redirect(h.url('changelog_home', repo_name=repo.repo_name))
100 raise HTTPBadRequest()
79 raise HTTPBadRequest()
101
80
102 def _graph(self, repo, commits, prev_data=None, next_data=None):
81 def _graph(self, repo, commits, prev_data=None, next_data=None):
103 """
82 """
104 Generates a DAG graph for repo
83 Generates a DAG graph for repo
105
84
106 :param repo: repo instance
85 :param repo: repo instance
107 :param commits: list of commits
86 :param commits: list of commits
108 """
87 """
109 if not commits:
88 if not commits:
110 return json.dumps([])
89 return json.dumps([])
111
90
112 def serialize(commit, parents=True):
91 def serialize(commit, parents=True):
113 data = dict(
92 data = dict(
114 raw_id=commit.raw_id,
93 raw_id=commit.raw_id,
115 idx=commit.idx,
94 idx=commit.idx,
116 branch=commit.branch,
95 branch=commit.branch,
117 )
96 )
118 if parents:
97 if parents:
119 data['parents'] = [
98 data['parents'] = [
120 serialize(x, parents=False) for x in commit.parents]
99 serialize(x, parents=False) for x in commit.parents]
121 return data
100 return data
122
101
123 prev_data = prev_data or []
102 prev_data = prev_data or []
124 next_data = next_data or []
103 next_data = next_data or []
125
104
126 current = [serialize(x) for x in commits]
105 current = [serialize(x) for x in commits]
127 commits = prev_data + current + next_data
106 commits = prev_data + current + next_data
128
107
129 dag = _dagwalker(repo, commits)
108 dag = _dagwalker(repo, commits)
130
109
131 data = [[commit_id, vtx, edges, branch]
110 data = [[commit_id, vtx, edges, branch]
132 for commit_id, vtx, edges, branch in _colored(dag)]
111 for commit_id, vtx, edges, branch in _colored(dag)]
133 return json.dumps(data), json.dumps(current)
112 return json.dumps(data), json.dumps(current)
134
113
135 def _check_if_valid_branch(self, branch_name, repo_name, f_path):
114 def _check_if_valid_branch(self, branch_name, repo_name, f_path):
136 if branch_name not in c.rhodecode_repo.branches_all:
115 if branch_name not in c.rhodecode_repo.branches_all:
137 h.flash('Branch {} is not found.'.format(branch_name),
116 h.flash('Branch {} is not found.'.format(branch_name),
138 category='warning')
117 category='warning')
139 redirect(url('changelog_file_home', repo_name=repo_name,
118 redirect(url('changelog_file_home', repo_name=repo_name,
140 revision=branch_name, f_path=f_path or ''))
119 revision=branch_name, f_path=f_path or ''))
141
120
142 def _load_changelog_data(self, collection, page, chunk_size, branch_name=None, dynamic=False):
121 def _load_changelog_data(self, collection, page, chunk_size, branch_name=None, dynamic=False):
143 c.total_cs = len(collection)
122 c.total_cs = len(collection)
144 c.showing_commits = min(chunk_size, c.total_cs)
123 c.showing_commits = min(chunk_size, c.total_cs)
145 c.pagination = RepoPage(collection, page=page, item_count=c.total_cs,
124 c.pagination = RepoPage(collection, page=page, item_count=c.total_cs,
146 items_per_page=chunk_size, branch=branch_name)
125 items_per_page=chunk_size, branch=branch_name)
147
126
148 c.next_page = c.pagination.next_page
127 c.next_page = c.pagination.next_page
149 c.prev_page = c.pagination.previous_page
128 c.prev_page = c.pagination.previous_page
150
129
151 if dynamic:
130 if dynamic:
152 if request.GET.get('chunk') != 'next':
131 if request.GET.get('chunk') != 'next':
153 c.next_page = None
132 c.next_page = None
154 if request.GET.get('chunk') != 'prev':
133 if request.GET.get('chunk') != 'prev':
155 c.prev_page = None
134 c.prev_page = None
156
135
157 page_commit_ids = [x.raw_id for x in c.pagination]
136 page_commit_ids = [x.raw_id for x in c.pagination]
158 c.comments = c.rhodecode_db_repo.get_comments(page_commit_ids)
137 c.comments = c.rhodecode_db_repo.get_comments(page_commit_ids)
159 c.statuses = c.rhodecode_db_repo.statuses(page_commit_ids)
138 c.statuses = c.rhodecode_db_repo.statuses(page_commit_ids)
160
139
161 @LoginRequired()
140 @LoginRequired()
162 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
141 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
163 'repository.admin')
142 'repository.admin')
164 def index(self, repo_name, revision=None, f_path=None):
143 def index(self, repo_name, revision=None, f_path=None):
165 commit_id = revision
144 commit_id = revision
166 chunk_size = 20
145 chunk_size = 20
167
146
168 c.branch_name = branch_name = request.GET.get('branch', None)
147 c.branch_name = branch_name = request.GET.get('branch', None)
169 c.book_name = book_name = request.GET.get('bookmark', None)
148 c.book_name = book_name = request.GET.get('bookmark', None)
170 hist_limit = safe_int(request.GET.get('limit')) or None
149 hist_limit = safe_int(request.GET.get('limit')) or None
171
150
172 p = safe_int(request.GET.get('page', 1), 1)
151 p = safe_int(request.GET.get('page', 1), 1)
173
152
174 c.selected_name = branch_name or book_name
153 c.selected_name = branch_name or book_name
175 if not commit_id and branch_name:
154 if not commit_id and branch_name:
176 self._check_if_valid_branch(branch_name, repo_name, f_path)
155 self._check_if_valid_branch(branch_name, repo_name, f_path)
177
156
178 c.changelog_for_path = f_path
157 c.changelog_for_path = f_path
179 pre_load = ['author', 'branch', 'date', 'message', 'parents']
158 pre_load = ['author', 'branch', 'date', 'message', 'parents']
180 commit_ids = []
159 commit_ids = []
181
160
182 try:
161 try:
183 if f_path:
162 if f_path:
184 log.debug('generating changelog for path %s', f_path)
163 log.debug('generating changelog for path %s', f_path)
185 # get the history for the file !
164 # get the history for the file !
186 base_commit = c.rhodecode_repo.get_commit(revision)
165 base_commit = c.rhodecode_repo.get_commit(revision)
187 try:
166 try:
188 collection = base_commit.get_file_history(
167 collection = base_commit.get_file_history(
189 f_path, limit=hist_limit, pre_load=pre_load)
168 f_path, limit=hist_limit, pre_load=pre_load)
190 if (collection
169 if (collection
191 and request.environ.get('HTTP_X_PARTIAL_XHR')):
170 and request.environ.get('HTTP_X_PARTIAL_XHR')):
192 # for ajax call we remove first one since we're looking
171 # for ajax call we remove first one since we're looking
193 # at it right now in the context of a file commit
172 # at it right now in the context of a file commit
194 collection.pop(0)
173 collection.pop(0)
195 except (NodeDoesNotExistError, CommitError):
174 except (NodeDoesNotExistError, CommitError):
196 # this node is not present at tip!
175 # this node is not present at tip!
197 try:
176 try:
198 commit = self.__get_commit_or_redirect(
177 commit = self.__get_commit_or_redirect(
199 commit_id, repo_name)
178 commit_id, repo_name)
200 collection = commit.get_file_history(f_path)
179 collection = commit.get_file_history(f_path)
201 except RepositoryError as e:
180 except RepositoryError as e:
202 h.flash(safe_str(e), category='warning')
181 h.flash(safe_str(e), category='warning')
203 redirect(h.url('changelog_home', repo_name=repo_name))
182 redirect(h.url('changelog_home', repo_name=repo_name))
204 collection = list(reversed(collection))
183 collection = list(reversed(collection))
205 else:
184 else:
206 collection = c.rhodecode_repo.get_commits(
185 collection = c.rhodecode_repo.get_commits(
207 branch_name=branch_name, pre_load=pre_load)
186 branch_name=branch_name, pre_load=pre_load)
208
187
209 self._load_changelog_data(
188 self._load_changelog_data(
210 collection, p, chunk_size, c.branch_name, dynamic=f_path)
189 collection, p, chunk_size, c.branch_name, dynamic=f_path)
211
190
212 except EmptyRepositoryError as e:
191 except EmptyRepositoryError as e:
213 h.flash(safe_str(e), category='warning')
192 h.flash(safe_str(e), category='warning')
214 return redirect(url('summary_home', repo_name=repo_name))
193 return redirect(h.route_path('repo_summary', repo_name=repo_name))
215 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
194 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
216 msg = safe_str(e)
195 msg = safe_str(e)
217 log.exception(msg)
196 log.exception(msg)
218 h.flash(msg, category='error')
197 h.flash(msg, category='error')
219 return redirect(url('changelog_home', repo_name=repo_name))
198 return redirect(url('changelog_home', repo_name=repo_name))
220
199
221 if (request.environ.get('HTTP_X_PARTIAL_XHR')
200 if (request.environ.get('HTTP_X_PARTIAL_XHR')
222 or request.environ.get('HTTP_X_PJAX')):
201 or request.environ.get('HTTP_X_PJAX')):
223 # loading from ajax, we don't want the first result, it's popped
202 # loading from ajax, we don't want the first result, it's popped
224 return render('changelog/changelog_file_history.mako')
203 return render('changelog/changelog_file_history.mako')
225
204
226 if not f_path:
205 if not f_path:
227 commit_ids = c.pagination
206 commit_ids = c.pagination
228
207
229 c.graph_data, c.graph_commits = self._graph(
208 c.graph_data, c.graph_commits = self._graph(
230 c.rhodecode_repo, commit_ids)
209 c.rhodecode_repo, commit_ids)
231
210
232 return render('changelog/changelog.mako')
211 return render('changelog/changelog.mako')
233
212
234 @LoginRequired()
213 @LoginRequired()
235 @XHRRequired()
214 @XHRRequired()
236 @HasRepoPermissionAnyDecorator(
215 @HasRepoPermissionAnyDecorator(
237 'repository.read', 'repository.write', 'repository.admin')
216 'repository.read', 'repository.write', 'repository.admin')
238 def changelog_elements(self, repo_name):
217 def changelog_elements(self, repo_name):
239 commit_id = None
218 commit_id = None
240 chunk_size = 20
219 chunk_size = 20
241
220
242 def wrap_for_error(err):
221 def wrap_for_error(err):
243 return '<tr><td colspan="9" class="alert alert-error">ERROR: {}</td></tr>'.format(err)
222 return '<tr><td colspan="9" class="alert alert-error">ERROR: {}</td></tr>'.format(err)
244
223
245 c.branch_name = branch_name = request.GET.get('branch', None)
224 c.branch_name = branch_name = request.GET.get('branch', None)
246 c.book_name = book_name = request.GET.get('bookmark', None)
225 c.book_name = book_name = request.GET.get('bookmark', None)
247
226
248 p = safe_int(request.GET.get('page', 1), 1)
227 p = safe_int(request.GET.get('page', 1), 1)
249
228
250 c.selected_name = branch_name or book_name
229 c.selected_name = branch_name or book_name
251 if not commit_id and branch_name:
230 if not commit_id and branch_name:
252 if branch_name not in c.rhodecode_repo.branches_all:
231 if branch_name not in c.rhodecode_repo.branches_all:
253 return wrap_for_error(
232 return wrap_for_error(
254 safe_str('Missing branch: {}'.format(branch_name)))
233 safe_str('Missing branch: {}'.format(branch_name)))
255
234
256 pre_load = ['author', 'branch', 'date', 'message', 'parents']
235 pre_load = ['author', 'branch', 'date', 'message', 'parents']
257 collection = c.rhodecode_repo.get_commits(
236 collection = c.rhodecode_repo.get_commits(
258 branch_name=branch_name, pre_load=pre_load)
237 branch_name=branch_name, pre_load=pre_load)
259
238
260 try:
239 try:
261 self._load_changelog_data(collection, p, chunk_size, dynamic=True)
240 self._load_changelog_data(collection, p, chunk_size, dynamic=True)
262 except EmptyRepositoryError as e:
241 except EmptyRepositoryError as e:
263 return wrap_for_error(safe_str(e))
242 return wrap_for_error(safe_str(e))
264 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
243 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
265 log.exception('Failed to fetch commits')
244 log.exception('Failed to fetch commits')
266 return wrap_for_error(safe_str(e))
245 return wrap_for_error(safe_str(e))
267
246
268 prev_data = None
247 prev_data = None
269 next_data = None
248 next_data = None
270
249
271 prev_graph = json.loads(request.POST.get('graph', ''))
250 prev_graph = json.loads(request.POST.get('graph', ''))
272
251
273 if request.GET.get('chunk') == 'prev':
252 if request.GET.get('chunk') == 'prev':
274 next_data = prev_graph
253 next_data = prev_graph
275 elif request.GET.get('chunk') == 'next':
254 elif request.GET.get('chunk') == 'next':
276 prev_data = prev_graph
255 prev_data = prev_graph
277
256
278 c.graph_data, c.graph_commits = self._graph(
257 c.graph_data, c.graph_commits = self._graph(
279 c.rhodecode_repo, c.pagination,
258 c.rhodecode_repo, c.pagination,
280 prev_data=prev_data, next_data=next_data)
259 prev_data=prev_data, next_data=next_data)
281 return render('changelog/changelog_elements.mako')
260 return render('changelog/changelog_elements.mako')
282
283 @LoginRequired()
284 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
285 'repository.admin')
286 def changelog_summary(self, repo_name):
287 if request.environ.get('HTTP_X_PJAX'):
288 _load_changelog_summary()
289 return render('changelog/changelog_summary_data.mako')
290 raise HTTPNotFound()
@@ -1,282 +1,282 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Compare controller for showing differences between two commits/refs/tags etc.
22 Compare controller for showing differences between two commits/refs/tags etc.
23 """
23 """
24
24
25 import logging
25 import logging
26
26
27 from webob.exc import HTTPBadRequest
27 from webob.exc import HTTPBadRequest
28 from pylons import request, tmpl_context as c, url
28 from pylons import request, tmpl_context as c, url
29 from pylons.controllers.util import redirect
29 from pylons.controllers.util import redirect
30 from pylons.i18n.translation import _
30 from pylons.i18n.translation import _
31
31
32 from rhodecode.controllers.utils import parse_path_ref, get_commit_from_ref_name
32 from rhodecode.controllers.utils import parse_path_ref, get_commit_from_ref_name
33 from rhodecode.lib import helpers as h
33 from rhodecode.lib import helpers as h
34 from rhodecode.lib import diffs, codeblocks
34 from rhodecode.lib import diffs, codeblocks
35 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
35 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
36 from rhodecode.lib.base import BaseRepoController, render
36 from rhodecode.lib.base import BaseRepoController, render
37 from rhodecode.lib.utils import safe_str
37 from rhodecode.lib.utils import safe_str
38 from rhodecode.lib.utils2 import safe_unicode, str2bool
38 from rhodecode.lib.utils2 import safe_unicode, str2bool
39 from rhodecode.lib.vcs.exceptions import (
39 from rhodecode.lib.vcs.exceptions import (
40 EmptyRepositoryError, RepositoryError, RepositoryRequirementError,
40 EmptyRepositoryError, RepositoryError, RepositoryRequirementError,
41 NodeDoesNotExistError)
41 NodeDoesNotExistError)
42 from rhodecode.model.db import Repository, ChangesetStatus
42 from rhodecode.model.db import Repository, ChangesetStatus
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 class CompareController(BaseRepoController):
47 class CompareController(BaseRepoController):
48
48
49 def __before__(self):
49 def __before__(self):
50 super(CompareController, self).__before__()
50 super(CompareController, self).__before__()
51
51
52 def _get_commit_or_redirect(
52 def _get_commit_or_redirect(
53 self, ref, ref_type, repo, redirect_after=True, partial=False):
53 self, ref, ref_type, repo, redirect_after=True, partial=False):
54 """
54 """
55 This is a safe way to get a commit. If an error occurs it
55 This is a safe way to get a commit. If an error occurs it
56 redirects to a commit with a proper message. If partial is set
56 redirects to a commit with a proper message. If partial is set
57 then it does not do redirect raise and throws an exception instead.
57 then it does not do redirect raise and throws an exception instead.
58 """
58 """
59 try:
59 try:
60 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
60 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
61 except EmptyRepositoryError:
61 except EmptyRepositoryError:
62 if not redirect_after:
62 if not redirect_after:
63 return repo.scm_instance().EMPTY_COMMIT
63 return repo.scm_instance().EMPTY_COMMIT
64 h.flash(h.literal(_('There are no commits yet')),
64 h.flash(h.literal(_('There are no commits yet')),
65 category='warning')
65 category='warning')
66 redirect(url('summary_home', repo_name=repo.repo_name))
66 redirect(h.route_path('repo_summary', repo_name=repo.repo_name))
67
67
68 except RepositoryError as e:
68 except RepositoryError as e:
69 msg = safe_str(e)
69 msg = safe_str(e)
70 log.exception(msg)
70 log.exception(msg)
71 h.flash(msg, category='warning')
71 h.flash(msg, category='warning')
72 if not partial:
72 if not partial:
73 redirect(h.url('summary_home', repo_name=repo.repo_name))
73 redirect(h.route_path('repo_summary', repo_name=repo.repo_name))
74 raise HTTPBadRequest()
74 raise HTTPBadRequest()
75
75
76 @LoginRequired()
76 @LoginRequired()
77 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
77 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
78 'repository.admin')
78 'repository.admin')
79 def index(self, repo_name):
79 def index(self, repo_name):
80 c.compare_home = True
80 c.compare_home = True
81 c.commit_ranges = []
81 c.commit_ranges = []
82 c.collapse_all_commits = False
82 c.collapse_all_commits = False
83 c.diffset = None
83 c.diffset = None
84 c.limited_diff = False
84 c.limited_diff = False
85 source_repo = c.rhodecode_db_repo.repo_name
85 source_repo = c.rhodecode_db_repo.repo_name
86 target_repo = request.GET.get('target_repo', source_repo)
86 target_repo = request.GET.get('target_repo', source_repo)
87 c.source_repo = Repository.get_by_repo_name(source_repo)
87 c.source_repo = Repository.get_by_repo_name(source_repo)
88 c.target_repo = Repository.get_by_repo_name(target_repo)
88 c.target_repo = Repository.get_by_repo_name(target_repo)
89 c.source_ref = c.target_ref = _('Select commit')
89 c.source_ref = c.target_ref = _('Select commit')
90 c.source_ref_type = ""
90 c.source_ref_type = ""
91 c.target_ref_type = ""
91 c.target_ref_type = ""
92 c.commit_statuses = ChangesetStatus.STATUSES
92 c.commit_statuses = ChangesetStatus.STATUSES
93 c.preview_mode = False
93 c.preview_mode = False
94 c.file_path = None
94 c.file_path = None
95 return render('compare/compare_diff.mako')
95 return render('compare/compare_diff.mako')
96
96
97 @LoginRequired()
97 @LoginRequired()
98 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
98 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
99 'repository.admin')
99 'repository.admin')
100 def compare(self, repo_name, source_ref_type, source_ref,
100 def compare(self, repo_name, source_ref_type, source_ref,
101 target_ref_type, target_ref):
101 target_ref_type, target_ref):
102 # source_ref will be evaluated in source_repo
102 # source_ref will be evaluated in source_repo
103 source_repo_name = c.rhodecode_db_repo.repo_name
103 source_repo_name = c.rhodecode_db_repo.repo_name
104 source_path, source_id = parse_path_ref(source_ref)
104 source_path, source_id = parse_path_ref(source_ref)
105
105
106 # target_ref will be evaluated in target_repo
106 # target_ref will be evaluated in target_repo
107 target_repo_name = request.GET.get('target_repo', source_repo_name)
107 target_repo_name = request.GET.get('target_repo', source_repo_name)
108 target_path, target_id = parse_path_ref(
108 target_path, target_id = parse_path_ref(
109 target_ref, default_path=request.GET.get('f_path', ''))
109 target_ref, default_path=request.GET.get('f_path', ''))
110
110
111 c.file_path = target_path
111 c.file_path = target_path
112 c.commit_statuses = ChangesetStatus.STATUSES
112 c.commit_statuses = ChangesetStatus.STATUSES
113
113
114 # if merge is True
114 # if merge is True
115 # Show what changes since the shared ancestor commit of target/source
115 # Show what changes since the shared ancestor commit of target/source
116 # the source would get if it was merged with target. Only commits
116 # the source would get if it was merged with target. Only commits
117 # which are in target but not in source will be shown.
117 # which are in target but not in source will be shown.
118 merge = str2bool(request.GET.get('merge'))
118 merge = str2bool(request.GET.get('merge'))
119 # if merge is False
119 # if merge is False
120 # Show a raw diff of source/target refs even if no ancestor exists
120 # Show a raw diff of source/target refs even if no ancestor exists
121
121
122 # c.fulldiff disables cut_off_limit
122 # c.fulldiff disables cut_off_limit
123 c.fulldiff = str2bool(request.GET.get('fulldiff'))
123 c.fulldiff = str2bool(request.GET.get('fulldiff'))
124
124
125 # if partial, returns just compare_commits.html (commits log)
125 # if partial, returns just compare_commits.html (commits log)
126 partial = request.is_xhr
126 partial = request.is_xhr
127
127
128 # swap url for compare_diff page
128 # swap url for compare_diff page
129 c.swap_url = h.url(
129 c.swap_url = h.url(
130 'compare_url',
130 'compare_url',
131 repo_name=target_repo_name,
131 repo_name=target_repo_name,
132 source_ref_type=target_ref_type,
132 source_ref_type=target_ref_type,
133 source_ref=target_ref,
133 source_ref=target_ref,
134 target_repo=source_repo_name,
134 target_repo=source_repo_name,
135 target_ref_type=source_ref_type,
135 target_ref_type=source_ref_type,
136 target_ref=source_ref,
136 target_ref=source_ref,
137 merge=merge and '1' or '',
137 merge=merge and '1' or '',
138 f_path=target_path)
138 f_path=target_path)
139
139
140 source_repo = Repository.get_by_repo_name(source_repo_name)
140 source_repo = Repository.get_by_repo_name(source_repo_name)
141 target_repo = Repository.get_by_repo_name(target_repo_name)
141 target_repo = Repository.get_by_repo_name(target_repo_name)
142
142
143 if source_repo is None:
143 if source_repo is None:
144 msg = _('Could not find the original repo: %(repo)s') % {
144 msg = _('Could not find the original repo: %(repo)s') % {
145 'repo': source_repo}
145 'repo': source_repo}
146
146
147 log.error(msg)
147 log.error(msg)
148 h.flash(msg, category='error')
148 h.flash(msg, category='error')
149 return redirect(url('compare_home', repo_name=c.repo_name))
149 return redirect(url('compare_home', repo_name=c.repo_name))
150
150
151 if target_repo is None:
151 if target_repo is None:
152 msg = _('Could not find the other repo: %(repo)s') % {
152 msg = _('Could not find the other repo: %(repo)s') % {
153 'repo': target_repo_name}
153 'repo': target_repo_name}
154 log.error(msg)
154 log.error(msg)
155 h.flash(msg, category='error')
155 h.flash(msg, category='error')
156 return redirect(url('compare_home', repo_name=c.repo_name))
156 return redirect(url('compare_home', repo_name=c.repo_name))
157
157
158 source_scm = source_repo.scm_instance()
158 source_scm = source_repo.scm_instance()
159 target_scm = target_repo.scm_instance()
159 target_scm = target_repo.scm_instance()
160
160
161 source_alias = source_scm.alias
161 source_alias = source_scm.alias
162 target_alias = target_scm.alias
162 target_alias = target_scm.alias
163 if source_alias != target_alias:
163 if source_alias != target_alias:
164 msg = _('The comparison of two different kinds of remote repos '
164 msg = _('The comparison of two different kinds of remote repos '
165 'is not available')
165 'is not available')
166 log.error(msg)
166 log.error(msg)
167 h.flash(msg, category='error')
167 h.flash(msg, category='error')
168 return redirect(url('compare_home', repo_name=c.repo_name))
168 return redirect(url('compare_home', repo_name=c.repo_name))
169
169
170 source_commit = self._get_commit_or_redirect(
170 source_commit = self._get_commit_or_redirect(
171 ref=source_id, ref_type=source_ref_type, repo=source_repo,
171 ref=source_id, ref_type=source_ref_type, repo=source_repo,
172 partial=partial)
172 partial=partial)
173 target_commit = self._get_commit_or_redirect(
173 target_commit = self._get_commit_or_redirect(
174 ref=target_id, ref_type=target_ref_type, repo=target_repo,
174 ref=target_id, ref_type=target_ref_type, repo=target_repo,
175 partial=partial)
175 partial=partial)
176
176
177 c.compare_home = False
177 c.compare_home = False
178 c.source_repo = source_repo
178 c.source_repo = source_repo
179 c.target_repo = target_repo
179 c.target_repo = target_repo
180 c.source_ref = source_ref
180 c.source_ref = source_ref
181 c.target_ref = target_ref
181 c.target_ref = target_ref
182 c.source_ref_type = source_ref_type
182 c.source_ref_type = source_ref_type
183 c.target_ref_type = target_ref_type
183 c.target_ref_type = target_ref_type
184
184
185 pre_load = ["author", "branch", "date", "message"]
185 pre_load = ["author", "branch", "date", "message"]
186 c.ancestor = None
186 c.ancestor = None
187
187
188 if c.file_path:
188 if c.file_path:
189 if source_commit == target_commit:
189 if source_commit == target_commit:
190 c.commit_ranges = []
190 c.commit_ranges = []
191 else:
191 else:
192 c.commit_ranges = [target_commit]
192 c.commit_ranges = [target_commit]
193 else:
193 else:
194 try:
194 try:
195 c.commit_ranges = source_scm.compare(
195 c.commit_ranges = source_scm.compare(
196 source_commit.raw_id, target_commit.raw_id,
196 source_commit.raw_id, target_commit.raw_id,
197 target_scm, merge, pre_load=pre_load)
197 target_scm, merge, pre_load=pre_load)
198 if merge:
198 if merge:
199 c.ancestor = source_scm.get_common_ancestor(
199 c.ancestor = source_scm.get_common_ancestor(
200 source_commit.raw_id, target_commit.raw_id, target_scm)
200 source_commit.raw_id, target_commit.raw_id, target_scm)
201 except RepositoryRequirementError:
201 except RepositoryRequirementError:
202 msg = _('Could not compare repos with different '
202 msg = _('Could not compare repos with different '
203 'large file settings')
203 'large file settings')
204 log.error(msg)
204 log.error(msg)
205 if partial:
205 if partial:
206 return msg
206 return msg
207 h.flash(msg, category='error')
207 h.flash(msg, category='error')
208 return redirect(url('compare_home', repo_name=c.repo_name))
208 return redirect(url('compare_home', repo_name=c.repo_name))
209
209
210 c.statuses = c.rhodecode_db_repo.statuses(
210 c.statuses = c.rhodecode_db_repo.statuses(
211 [x.raw_id for x in c.commit_ranges])
211 [x.raw_id for x in c.commit_ranges])
212
212
213 # auto collapse if we have more than limit
213 # auto collapse if we have more than limit
214 collapse_limit = diffs.DiffProcessor._collapse_commits_over
214 collapse_limit = diffs.DiffProcessor._collapse_commits_over
215 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
215 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
216
216
217 if partial: # for PR ajax commits loader
217 if partial: # for PR ajax commits loader
218 if not c.ancestor:
218 if not c.ancestor:
219 return '' # cannot merge if there is no ancestor
219 return '' # cannot merge if there is no ancestor
220 return render('compare/compare_commits.mako')
220 return render('compare/compare_commits.mako')
221
221
222 if c.ancestor:
222 if c.ancestor:
223 # case we want a simple diff without incoming commits,
223 # case we want a simple diff without incoming commits,
224 # previewing what will be merged.
224 # previewing what will be merged.
225 # Make the diff on target repo (which is known to have target_ref)
225 # Make the diff on target repo (which is known to have target_ref)
226 log.debug('Using ancestor %s as source_ref instead of %s'
226 log.debug('Using ancestor %s as source_ref instead of %s'
227 % (c.ancestor, source_ref))
227 % (c.ancestor, source_ref))
228 source_repo = target_repo
228 source_repo = target_repo
229 source_commit = target_repo.get_commit(commit_id=c.ancestor)
229 source_commit = target_repo.get_commit(commit_id=c.ancestor)
230
230
231 # diff_limit will cut off the whole diff if the limit is applied
231 # diff_limit will cut off the whole diff if the limit is applied
232 # otherwise it will just hide the big files from the front-end
232 # otherwise it will just hide the big files from the front-end
233 diff_limit = self.cut_off_limit_diff
233 diff_limit = self.cut_off_limit_diff
234 file_limit = self.cut_off_limit_file
234 file_limit = self.cut_off_limit_file
235
235
236 log.debug('calculating diff between '
236 log.debug('calculating diff between '
237 'source_ref:%s and target_ref:%s for repo `%s`',
237 'source_ref:%s and target_ref:%s for repo `%s`',
238 source_commit, target_commit,
238 source_commit, target_commit,
239 safe_unicode(source_repo.scm_instance().path))
239 safe_unicode(source_repo.scm_instance().path))
240
240
241 if source_commit.repository != target_commit.repository:
241 if source_commit.repository != target_commit.repository:
242 msg = _(
242 msg = _(
243 "Repositories unrelated. "
243 "Repositories unrelated. "
244 "Cannot compare commit %(commit1)s from repository %(repo1)s "
244 "Cannot compare commit %(commit1)s from repository %(repo1)s "
245 "with commit %(commit2)s from repository %(repo2)s.") % {
245 "with commit %(commit2)s from repository %(repo2)s.") % {
246 'commit1': h.show_id(source_commit),
246 'commit1': h.show_id(source_commit),
247 'repo1': source_repo.repo_name,
247 'repo1': source_repo.repo_name,
248 'commit2': h.show_id(target_commit),
248 'commit2': h.show_id(target_commit),
249 'repo2': target_repo.repo_name,
249 'repo2': target_repo.repo_name,
250 }
250 }
251 h.flash(msg, category='error')
251 h.flash(msg, category='error')
252 raise HTTPBadRequest()
252 raise HTTPBadRequest()
253
253
254 txtdiff = source_repo.scm_instance().get_diff(
254 txtdiff = source_repo.scm_instance().get_diff(
255 commit1=source_commit, commit2=target_commit,
255 commit1=source_commit, commit2=target_commit,
256 path=target_path, path1=source_path)
256 path=target_path, path1=source_path)
257
257
258 diff_processor = diffs.DiffProcessor(
258 diff_processor = diffs.DiffProcessor(
259 txtdiff, format='newdiff', diff_limit=diff_limit,
259 txtdiff, format='newdiff', diff_limit=diff_limit,
260 file_limit=file_limit, show_full_diff=c.fulldiff)
260 file_limit=file_limit, show_full_diff=c.fulldiff)
261 _parsed = diff_processor.prepare()
261 _parsed = diff_processor.prepare()
262
262
263 def _node_getter(commit):
263 def _node_getter(commit):
264 """ Returns a function that returns a node for a commit or None """
264 """ Returns a function that returns a node for a commit or None """
265 def get_node(fname):
265 def get_node(fname):
266 try:
266 try:
267 return commit.get_node(fname)
267 return commit.get_node(fname)
268 except NodeDoesNotExistError:
268 except NodeDoesNotExistError:
269 return None
269 return None
270 return get_node
270 return get_node
271
271
272 c.diffset = codeblocks.DiffSet(
272 c.diffset = codeblocks.DiffSet(
273 repo_name=source_repo.repo_name,
273 repo_name=source_repo.repo_name,
274 source_node_getter=_node_getter(source_commit),
274 source_node_getter=_node_getter(source_commit),
275 target_node_getter=_node_getter(target_commit),
275 target_node_getter=_node_getter(target_commit),
276 ).render_patchset(_parsed, source_ref, target_ref)
276 ).render_patchset(_parsed, source_ref, target_ref)
277
277
278 c.preview_mode = merge
278 c.preview_mode = merge
279 c.source_commit = source_commit
279 c.source_commit = source_commit
280 c.target_commit = target_commit
280 c.target_commit = target_commit
281
281
282 return render('compare/compare_diff.mako')
282 return render('compare/compare_diff.mako')
@@ -1,180 +1,179 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Feed controller for RhodeCode
22 Feed controller for RhodeCode
23 """
23 """
24
24
25 import logging
25 import logging
26
26
27 import pytz
27 import pytz
28 from pylons import url, response, tmpl_context as c
28 from pylons import url, response, tmpl_context as c
29 from pylons.i18n.translation import _
29 from pylons.i18n.translation import _
30
30
31 from beaker.cache import cache_region
31 from beaker.cache import cache_region
32 from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
32 from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
33
33
34 from rhodecode.model.db import CacheKey, UserApiKeys
34 from rhodecode.model.db import CacheKey, UserApiKeys
35 from rhodecode.lib import helpers as h
35 from rhodecode.lib import helpers as h
36 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
36 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
37 from rhodecode.lib.base import BaseRepoController
37 from rhodecode.lib.base import BaseRepoController
38 from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer
38 from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer
39 from rhodecode.lib.utils2 import safe_int, str2bool
39 from rhodecode.lib.utils2 import safe_int, str2bool
40 from rhodecode.lib.utils import PartialRenderer
40 from rhodecode.lib.utils import PartialRenderer
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 class FeedController(BaseRepoController):
45 class FeedController(BaseRepoController):
46
46
47 def _get_config(self):
47 def _get_config(self):
48 import rhodecode
48 import rhodecode
49 config = rhodecode.CONFIG
49 config = rhodecode.CONFIG
50
50
51 return {
51 return {
52 'language': 'en-us',
52 'language': 'en-us',
53 'feed_ttl': '5', # TTL of feed,
53 'feed_ttl': '5', # TTL of feed,
54 'feed_include_diff':
54 'feed_include_diff':
55 str2bool(config.get('rss_include_diff', False)),
55 str2bool(config.get('rss_include_diff', False)),
56 'feed_items_per_page':
56 'feed_items_per_page':
57 safe_int(config.get('rss_items_per_page', 20)),
57 safe_int(config.get('rss_items_per_page', 20)),
58 'feed_diff_limit':
58 'feed_diff_limit':
59 # we need to protect from parsing huge diffs here other way
59 # we need to protect from parsing huge diffs here other way
60 # we can kill the server
60 # we can kill the server
61 safe_int(config.get('rss_cut_off_limit', 32 * 1024)),
61 safe_int(config.get('rss_cut_off_limit', 32 * 1024)),
62 }
62 }
63
63
64 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
64 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
65 def __before__(self):
65 def __before__(self):
66 super(FeedController, self).__before__()
66 super(FeedController, self).__before__()
67 config = self._get_config()
67 config = self._get_config()
68 # common values for feeds
68 # common values for feeds
69 self.description = _('Changes on %s repository')
69 self.description = _('Changes on %s repository')
70 self.title = self.title = _('%s %s feed') % (c.rhodecode_name, '%s')
70 self.title = self.title = _('%s %s feed') % (c.rhodecode_name, '%s')
71 self.language = config["language"]
71 self.language = config["language"]
72 self.ttl = config["feed_ttl"]
72 self.ttl = config["feed_ttl"]
73 self.feed_include_diff = config['feed_include_diff']
73 self.feed_include_diff = config['feed_include_diff']
74 self.feed_diff_limit = config['feed_diff_limit']
74 self.feed_diff_limit = config['feed_diff_limit']
75 self.feed_items_per_page = config['feed_items_per_page']
75 self.feed_items_per_page = config['feed_items_per_page']
76
76
77 def __changes(self, commit):
77 def __changes(self, commit):
78 diff_processor = DiffProcessor(
78 diff_processor = DiffProcessor(
79 commit.diff(), diff_limit=self.feed_diff_limit)
79 commit.diff(), diff_limit=self.feed_diff_limit)
80 _parsed = diff_processor.prepare(inline_diff=False)
80 _parsed = diff_processor.prepare(inline_diff=False)
81 limited_diff = isinstance(_parsed, LimitedDiffContainer)
81 limited_diff = isinstance(_parsed, LimitedDiffContainer)
82
82
83 return _parsed, limited_diff
83 return _parsed, limited_diff
84
84
85 def _get_title(self, commit):
85 def _get_title(self, commit):
86 return h.shorter(commit.message, 160)
86 return h.shorter(commit.message, 160)
87
87
88 def _get_description(self, commit):
88 def _get_description(self, commit):
89 _renderer = PartialRenderer('feed/atom_feed_entry.mako')
89 _renderer = PartialRenderer('feed/atom_feed_entry.mako')
90 parsed_diff, limited_diff = self.__changes(commit)
90 parsed_diff, limited_diff = self.__changes(commit)
91 return _renderer(
91 return _renderer(
92 'body',
92 'body',
93 commit=commit,
93 commit=commit,
94 parsed_diff=parsed_diff,
94 parsed_diff=parsed_diff,
95 limited_diff=limited_diff,
95 limited_diff=limited_diff,
96 feed_include_diff=self.feed_include_diff,
96 feed_include_diff=self.feed_include_diff,
97 )
97 )
98
98
99 def _set_timezone(self, date, tzinfo=pytz.utc):
99 def _set_timezone(self, date, tzinfo=pytz.utc):
100 if not getattr(date, "tzinfo", None):
100 if not getattr(date, "tzinfo", None):
101 date.replace(tzinfo=tzinfo)
101 date.replace(tzinfo=tzinfo)
102 return date
102 return date
103
103
104 def _get_commits(self):
104 def _get_commits(self):
105 return list(c.rhodecode_repo[-self.feed_items_per_page:])
105 return list(c.rhodecode_repo[-self.feed_items_per_page:])
106
106
107 @HasRepoPermissionAnyDecorator(
107 @HasRepoPermissionAnyDecorator(
108 'repository.read', 'repository.write', 'repository.admin')
108 'repository.read', 'repository.write', 'repository.admin')
109 def atom(self, repo_name):
109 def atom(self, repo_name):
110 """Produce an atom-1.0 feed via feedgenerator module"""
110 """Produce an atom-1.0 feed via feedgenerator module"""
111
111
112 @cache_region('long_term')
112 @cache_region('long_term')
113 def _generate_feed(cache_key):
113 def _generate_feed(cache_key):
114 feed = Atom1Feed(
114 feed = Atom1Feed(
115 title=self.title % repo_name,
115 title=self.title % repo_name,
116 link=url('summary_home', repo_name=repo_name, qualified=True),
116 link=h.route_url('repo_summary', repo_name=repo_name),
117 description=self.description % repo_name,
117 description=self.description % repo_name,
118 language=self.language,
118 language=self.language,
119 ttl=self.ttl
119 ttl=self.ttl
120 )
120 )
121
121
122 for commit in reversed(self._get_commits()):
122 for commit in reversed(self._get_commits()):
123 date = self._set_timezone(commit.date)
123 date = self._set_timezone(commit.date)
124 feed.add_item(
124 feed.add_item(
125 title=self._get_title(commit),
125 title=self._get_title(commit),
126 author_name=commit.author,
126 author_name=commit.author,
127 description=self._get_description(commit),
127 description=self._get_description(commit),
128 link=url('changeset_home', repo_name=repo_name,
128 link=url('changeset_home', repo_name=repo_name,
129 revision=commit.raw_id, qualified=True),
129 revision=commit.raw_id, qualified=True),
130 pubdate=date,)
130 pubdate=date,)
131
131
132 return feed.mime_type, feed.writeString('utf-8')
132 return feed.mime_type, feed.writeString('utf-8')
133
133
134 invalidator_context = CacheKey.repo_context_cache(
134 invalidator_context = CacheKey.repo_context_cache(
135 _generate_feed, repo_name, CacheKey.CACHE_TYPE_ATOM)
135 _generate_feed, repo_name, CacheKey.CACHE_TYPE_ATOM)
136
136
137 with invalidator_context as context:
137 with invalidator_context as context:
138 context.invalidate()
138 context.invalidate()
139 mime_type, feed = context.compute()
139 mime_type, feed = context.compute()
140
140
141 response.content_type = mime_type
141 response.content_type = mime_type
142 return feed
142 return feed
143
143
144 @HasRepoPermissionAnyDecorator(
144 @HasRepoPermissionAnyDecorator(
145 'repository.read', 'repository.write', 'repository.admin')
145 'repository.read', 'repository.write', 'repository.admin')
146 def rss(self, repo_name):
146 def rss(self, repo_name):
147 """Produce an rss2 feed via feedgenerator module"""
147 """Produce an rss2 feed via feedgenerator module"""
148
148
149 @cache_region('long_term')
149 @cache_region('long_term')
150 def _generate_feed(cache_key):
150 def _generate_feed(cache_key):
151 feed = Rss201rev2Feed(
151 feed = Rss201rev2Feed(
152 title=self.title % repo_name,
152 title=self.title % repo_name,
153 link=url('summary_home', repo_name=repo_name,
153 link=h.route_url('repo_summary', repo_name=repo_name),
154 qualified=True),
155 description=self.description % repo_name,
154 description=self.description % repo_name,
156 language=self.language,
155 language=self.language,
157 ttl=self.ttl
156 ttl=self.ttl
158 )
157 )
159
158
160 for commit in reversed(self._get_commits()):
159 for commit in reversed(self._get_commits()):
161 date = self._set_timezone(commit.date)
160 date = self._set_timezone(commit.date)
162 feed.add_item(
161 feed.add_item(
163 title=self._get_title(commit),
162 title=self._get_title(commit),
164 author_name=commit.author,
163 author_name=commit.author,
165 description=self._get_description(commit),
164 description=self._get_description(commit),
166 link=url('changeset_home', repo_name=repo_name,
165 link=url('changeset_home', repo_name=repo_name,
167 revision=commit.raw_id, qualified=True),
166 revision=commit.raw_id, qualified=True),
168 pubdate=date,)
167 pubdate=date,)
169
168
170 return feed.mime_type, feed.writeString('utf-8')
169 return feed.mime_type, feed.writeString('utf-8')
171
170
172 invalidator_context = CacheKey.repo_context_cache(
171 invalidator_context = CacheKey.repo_context_cache(
173 _generate_feed, repo_name, CacheKey.CACHE_TYPE_RSS)
172 _generate_feed, repo_name, CacheKey.CACHE_TYPE_RSS)
174
173
175 with invalidator_context as context:
174 with invalidator_context as context:
176 context.invalidate()
175 context.invalidate()
177 mime_type, feed = context.compute()
176 mime_type, feed = context.compute()
178
177
179 response.content_type = mime_type
178 response.content_type = mime_type
180 return feed
179 return feed
@@ -1,1110 +1,1110 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Files controller for RhodeCode Enterprise
22 Files controller for RhodeCode Enterprise
23 """
23 """
24
24
25 import itertools
25 import itertools
26 import logging
26 import logging
27 import os
27 import os
28 import shutil
28 import shutil
29 import tempfile
29 import tempfile
30
30
31 from pylons import request, response, tmpl_context as c, url
31 from pylons import request, response, tmpl_context as c, url
32 from pylons.i18n.translation import _
32 from pylons.i18n.translation import _
33 from pylons.controllers.util import redirect
33 from pylons.controllers.util import redirect
34 from webob.exc import HTTPNotFound, HTTPBadRequest
34 from webob.exc import HTTPNotFound, HTTPBadRequest
35
35
36 from rhodecode.controllers.utils import parse_path_ref
36 from rhodecode.controllers.utils import parse_path_ref
37 from rhodecode.lib import diffs, helpers as h, caches
37 from rhodecode.lib import diffs, helpers as h, caches
38 from rhodecode.lib import audit_logger
38 from rhodecode.lib import audit_logger
39 from rhodecode.lib.codeblocks import (
39 from rhodecode.lib.codeblocks import (
40 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
40 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
41 from rhodecode.lib.utils import jsonify, action_logger
41 from rhodecode.lib.utils import jsonify, action_logger
42 from rhodecode.lib.utils2 import (
42 from rhodecode.lib.utils2 import (
43 convert_line_endings, detect_mode, safe_str, str2bool)
43 convert_line_endings, detect_mode, safe_str, str2bool)
44 from rhodecode.lib.auth import (
44 from rhodecode.lib.auth import (
45 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired)
45 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired)
46 from rhodecode.lib.base import BaseRepoController, render
46 from rhodecode.lib.base import BaseRepoController, render
47 from rhodecode.lib.vcs import path as vcspath
47 from rhodecode.lib.vcs import path as vcspath
48 from rhodecode.lib.vcs.backends.base import EmptyCommit
48 from rhodecode.lib.vcs.backends.base import EmptyCommit
49 from rhodecode.lib.vcs.conf import settings
49 from rhodecode.lib.vcs.conf import settings
50 from rhodecode.lib.vcs.exceptions import (
50 from rhodecode.lib.vcs.exceptions import (
51 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
51 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
52 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
52 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
53 NodeDoesNotExistError, CommitError, NodeError)
53 NodeDoesNotExistError, CommitError, NodeError)
54 from rhodecode.lib.vcs.nodes import FileNode
54 from rhodecode.lib.vcs.nodes import FileNode
55
55
56 from rhodecode.model.repo import RepoModel
56 from rhodecode.model.repo import RepoModel
57 from rhodecode.model.scm import ScmModel
57 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.db import Repository
58 from rhodecode.model.db import Repository
59
59
60 from rhodecode.controllers.changeset import (
60 from rhodecode.controllers.changeset import (
61 _ignorews_url, _context_url, get_line_ctx, get_ignore_ws)
61 _ignorews_url, _context_url, get_line_ctx, get_ignore_ws)
62 from rhodecode.lib.exceptions import NonRelativePathError
62 from rhodecode.lib.exceptions import NonRelativePathError
63
63
64 log = logging.getLogger(__name__)
64 log = logging.getLogger(__name__)
65
65
66
66
67 class FilesController(BaseRepoController):
67 class FilesController(BaseRepoController):
68
68
69 def __before__(self):
69 def __before__(self):
70 super(FilesController, self).__before__()
70 super(FilesController, self).__before__()
71 c.cut_off_limit = self.cut_off_limit_file
71 c.cut_off_limit = self.cut_off_limit_file
72
72
73 def _get_default_encoding(self):
73 def _get_default_encoding(self):
74 enc_list = getattr(c, 'default_encodings', [])
74 enc_list = getattr(c, 'default_encodings', [])
75 return enc_list[0] if enc_list else 'UTF-8'
75 return enc_list[0] if enc_list else 'UTF-8'
76
76
77 def __get_commit_or_redirect(self, commit_id, repo_name,
77 def __get_commit_or_redirect(self, commit_id, repo_name,
78 redirect_after=True):
78 redirect_after=True):
79 """
79 """
80 This is a safe way to get commit. If an error occurs it redirects to
80 This is a safe way to get commit. If an error occurs it redirects to
81 tip with proper message
81 tip with proper message
82
82
83 :param commit_id: id of commit to fetch
83 :param commit_id: id of commit to fetch
84 :param repo_name: repo name to redirect after
84 :param repo_name: repo name to redirect after
85 :param redirect_after: toggle redirection
85 :param redirect_after: toggle redirection
86 """
86 """
87 try:
87 try:
88 return c.rhodecode_repo.get_commit(commit_id)
88 return c.rhodecode_repo.get_commit(commit_id)
89 except EmptyRepositoryError:
89 except EmptyRepositoryError:
90 if not redirect_after:
90 if not redirect_after:
91 return None
91 return None
92 url_ = url('files_add_home',
92 url_ = url('files_add_home',
93 repo_name=c.repo_name,
93 repo_name=c.repo_name,
94 revision=0, f_path='', anchor='edit')
94 revision=0, f_path='', anchor='edit')
95 if h.HasRepoPermissionAny(
95 if h.HasRepoPermissionAny(
96 'repository.write', 'repository.admin')(c.repo_name):
96 'repository.write', 'repository.admin')(c.repo_name):
97 add_new = h.link_to(
97 add_new = h.link_to(
98 _('Click here to add a new file.'),
98 _('Click here to add a new file.'),
99 url_, class_="alert-link")
99 url_, class_="alert-link")
100 else:
100 else:
101 add_new = ""
101 add_new = ""
102 h.flash(h.literal(
102 h.flash(h.literal(
103 _('There are no files yet. %s') % add_new), category='warning')
103 _('There are no files yet. %s') % add_new), category='warning')
104 redirect(h.url('summary_home', repo_name=repo_name))
104 redirect(h.route_path('repo_summary', repo_name=repo_name))
105 except (CommitDoesNotExistError, LookupError):
105 except (CommitDoesNotExistError, LookupError):
106 msg = _('No such commit exists for this repository')
106 msg = _('No such commit exists for this repository')
107 h.flash(msg, category='error')
107 h.flash(msg, category='error')
108 raise HTTPNotFound()
108 raise HTTPNotFound()
109 except RepositoryError as e:
109 except RepositoryError as e:
110 h.flash(safe_str(e), category='error')
110 h.flash(safe_str(e), category='error')
111 raise HTTPNotFound()
111 raise HTTPNotFound()
112
112
113 def __get_filenode_or_redirect(self, repo_name, commit, path):
113 def __get_filenode_or_redirect(self, repo_name, commit, path):
114 """
114 """
115 Returns file_node, if error occurs or given path is directory,
115 Returns file_node, if error occurs or given path is directory,
116 it'll redirect to top level path
116 it'll redirect to top level path
117
117
118 :param repo_name: repo_name
118 :param repo_name: repo_name
119 :param commit: given commit
119 :param commit: given commit
120 :param path: path to lookup
120 :param path: path to lookup
121 """
121 """
122 try:
122 try:
123 file_node = commit.get_node(path)
123 file_node = commit.get_node(path)
124 if file_node.is_dir():
124 if file_node.is_dir():
125 raise RepositoryError('The given path is a directory')
125 raise RepositoryError('The given path is a directory')
126 except CommitDoesNotExistError:
126 except CommitDoesNotExistError:
127 msg = _('No such commit exists for this repository')
127 msg = _('No such commit exists for this repository')
128 log.exception(msg)
128 log.exception(msg)
129 h.flash(msg, category='error')
129 h.flash(msg, category='error')
130 raise HTTPNotFound()
130 raise HTTPNotFound()
131 except RepositoryError as e:
131 except RepositoryError as e:
132 h.flash(safe_str(e), category='error')
132 h.flash(safe_str(e), category='error')
133 raise HTTPNotFound()
133 raise HTTPNotFound()
134
134
135 return file_node
135 return file_node
136
136
137 def __get_tree_cache_manager(self, repo_name, namespace_type):
137 def __get_tree_cache_manager(self, repo_name, namespace_type):
138 _namespace = caches.get_repo_namespace_key(namespace_type, repo_name)
138 _namespace = caches.get_repo_namespace_key(namespace_type, repo_name)
139 return caches.get_cache_manager('repo_cache_long', _namespace)
139 return caches.get_cache_manager('repo_cache_long', _namespace)
140
140
141 def _get_tree_at_commit(self, repo_name, commit_id, f_path,
141 def _get_tree_at_commit(self, repo_name, commit_id, f_path,
142 full_load=False, force=False):
142 full_load=False, force=False):
143 def _cached_tree():
143 def _cached_tree():
144 log.debug('Generating cached file tree for %s, %s, %s',
144 log.debug('Generating cached file tree for %s, %s, %s',
145 repo_name, commit_id, f_path)
145 repo_name, commit_id, f_path)
146 c.full_load = full_load
146 c.full_load = full_load
147 return render('files/files_browser_tree.mako')
147 return render('files/files_browser_tree.mako')
148
148
149 cache_manager = self.__get_tree_cache_manager(
149 cache_manager = self.__get_tree_cache_manager(
150 repo_name, caches.FILE_TREE)
150 repo_name, caches.FILE_TREE)
151
151
152 cache_key = caches.compute_key_from_params(
152 cache_key = caches.compute_key_from_params(
153 repo_name, commit_id, f_path)
153 repo_name, commit_id, f_path)
154
154
155 if force:
155 if force:
156 # we want to force recompute of caches
156 # we want to force recompute of caches
157 cache_manager.remove_value(cache_key)
157 cache_manager.remove_value(cache_key)
158
158
159 return cache_manager.get(cache_key, createfunc=_cached_tree)
159 return cache_manager.get(cache_key, createfunc=_cached_tree)
160
160
161 def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
161 def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
162 def _cached_nodes():
162 def _cached_nodes():
163 log.debug('Generating cached nodelist for %s, %s, %s',
163 log.debug('Generating cached nodelist for %s, %s, %s',
164 repo_name, commit_id, f_path)
164 repo_name, commit_id, f_path)
165 _d, _f = ScmModel().get_nodes(
165 _d, _f = ScmModel().get_nodes(
166 repo_name, commit_id, f_path, flat=False)
166 repo_name, commit_id, f_path, flat=False)
167 return _d + _f
167 return _d + _f
168
168
169 cache_manager = self.__get_tree_cache_manager(
169 cache_manager = self.__get_tree_cache_manager(
170 repo_name, caches.FILE_SEARCH_TREE_META)
170 repo_name, caches.FILE_SEARCH_TREE_META)
171
171
172 cache_key = caches.compute_key_from_params(
172 cache_key = caches.compute_key_from_params(
173 repo_name, commit_id, f_path)
173 repo_name, commit_id, f_path)
174 return cache_manager.get(cache_key, createfunc=_cached_nodes)
174 return cache_manager.get(cache_key, createfunc=_cached_nodes)
175
175
176 @LoginRequired()
176 @LoginRequired()
177 @HasRepoPermissionAnyDecorator(
177 @HasRepoPermissionAnyDecorator(
178 'repository.read', 'repository.write', 'repository.admin')
178 'repository.read', 'repository.write', 'repository.admin')
179 def index(
179 def index(
180 self, repo_name, revision, f_path, annotate=False, rendered=False):
180 self, repo_name, revision, f_path, annotate=False, rendered=False):
181 commit_id = revision
181 commit_id = revision
182
182
183 # redirect to given commit_id from form if given
183 # redirect to given commit_id from form if given
184 get_commit_id = request.GET.get('at_rev', None)
184 get_commit_id = request.GET.get('at_rev', None)
185 if get_commit_id:
185 if get_commit_id:
186 self.__get_commit_or_redirect(get_commit_id, repo_name)
186 self.__get_commit_or_redirect(get_commit_id, repo_name)
187
187
188 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
188 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
189 c.branch = request.GET.get('branch', None)
189 c.branch = request.GET.get('branch', None)
190 c.f_path = f_path
190 c.f_path = f_path
191 c.annotate = annotate
191 c.annotate = annotate
192 # default is false, but .rst/.md files later are autorendered, we can
192 # default is false, but .rst/.md files later are autorendered, we can
193 # overwrite autorendering by setting this GET flag
193 # overwrite autorendering by setting this GET flag
194 c.renderer = rendered or not request.GET.get('no-render', False)
194 c.renderer = rendered or not request.GET.get('no-render', False)
195
195
196 # prev link
196 # prev link
197 try:
197 try:
198 prev_commit = c.commit.prev(c.branch)
198 prev_commit = c.commit.prev(c.branch)
199 c.prev_commit = prev_commit
199 c.prev_commit = prev_commit
200 c.url_prev = url('files_home', repo_name=c.repo_name,
200 c.url_prev = url('files_home', repo_name=c.repo_name,
201 revision=prev_commit.raw_id, f_path=f_path)
201 revision=prev_commit.raw_id, f_path=f_path)
202 if c.branch:
202 if c.branch:
203 c.url_prev += '?branch=%s' % c.branch
203 c.url_prev += '?branch=%s' % c.branch
204 except (CommitDoesNotExistError, VCSError):
204 except (CommitDoesNotExistError, VCSError):
205 c.url_prev = '#'
205 c.url_prev = '#'
206 c.prev_commit = EmptyCommit()
206 c.prev_commit = EmptyCommit()
207
207
208 # next link
208 # next link
209 try:
209 try:
210 next_commit = c.commit.next(c.branch)
210 next_commit = c.commit.next(c.branch)
211 c.next_commit = next_commit
211 c.next_commit = next_commit
212 c.url_next = url('files_home', repo_name=c.repo_name,
212 c.url_next = url('files_home', repo_name=c.repo_name,
213 revision=next_commit.raw_id, f_path=f_path)
213 revision=next_commit.raw_id, f_path=f_path)
214 if c.branch:
214 if c.branch:
215 c.url_next += '?branch=%s' % c.branch
215 c.url_next += '?branch=%s' % c.branch
216 except (CommitDoesNotExistError, VCSError):
216 except (CommitDoesNotExistError, VCSError):
217 c.url_next = '#'
217 c.url_next = '#'
218 c.next_commit = EmptyCommit()
218 c.next_commit = EmptyCommit()
219
219
220 # files or dirs
220 # files or dirs
221 try:
221 try:
222 c.file = c.commit.get_node(f_path)
222 c.file = c.commit.get_node(f_path)
223 c.file_author = True
223 c.file_author = True
224 c.file_tree = ''
224 c.file_tree = ''
225 if c.file.is_file():
225 if c.file.is_file():
226 c.lf_node = c.file.get_largefile_node()
226 c.lf_node = c.file.get_largefile_node()
227
227
228 c.file_source_page = 'true'
228 c.file_source_page = 'true'
229 c.file_last_commit = c.file.last_commit
229 c.file_last_commit = c.file.last_commit
230 if c.file.size < self.cut_off_limit_file:
230 if c.file.size < self.cut_off_limit_file:
231 if c.annotate: # annotation has precedence over renderer
231 if c.annotate: # annotation has precedence over renderer
232 c.annotated_lines = filenode_as_annotated_lines_tokens(
232 c.annotated_lines = filenode_as_annotated_lines_tokens(
233 c.file
233 c.file
234 )
234 )
235 else:
235 else:
236 c.renderer = (
236 c.renderer = (
237 c.renderer and h.renderer_from_filename(c.file.path)
237 c.renderer and h.renderer_from_filename(c.file.path)
238 )
238 )
239 if not c.renderer:
239 if not c.renderer:
240 c.lines = filenode_as_lines_tokens(c.file)
240 c.lines = filenode_as_lines_tokens(c.file)
241
241
242 c.on_branch_head = self._is_valid_head(
242 c.on_branch_head = self._is_valid_head(
243 commit_id, c.rhodecode_repo)
243 commit_id, c.rhodecode_repo)
244
244
245 branch = c.commit.branch if (
245 branch = c.commit.branch if (
246 c.commit.branch and '/' not in c.commit.branch) else None
246 c.commit.branch and '/' not in c.commit.branch) else None
247 c.branch_or_raw_id = branch or c.commit.raw_id
247 c.branch_or_raw_id = branch or c.commit.raw_id
248 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
248 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
249
249
250 author = c.file_last_commit.author
250 author = c.file_last_commit.author
251 c.authors = [(h.email(author),
251 c.authors = [(h.email(author),
252 h.person(author, 'username_or_name_or_email'))]
252 h.person(author, 'username_or_name_or_email'))]
253 else:
253 else:
254 c.file_source_page = 'false'
254 c.file_source_page = 'false'
255 c.authors = []
255 c.authors = []
256 c.file_tree = self._get_tree_at_commit(
256 c.file_tree = self._get_tree_at_commit(
257 repo_name, c.commit.raw_id, f_path)
257 repo_name, c.commit.raw_id, f_path)
258
258
259 except RepositoryError as e:
259 except RepositoryError as e:
260 h.flash(safe_str(e), category='error')
260 h.flash(safe_str(e), category='error')
261 raise HTTPNotFound()
261 raise HTTPNotFound()
262
262
263 if request.environ.get('HTTP_X_PJAX'):
263 if request.environ.get('HTTP_X_PJAX'):
264 return render('files/files_pjax.mako')
264 return render('files/files_pjax.mako')
265
265
266 return render('files/files.mako')
266 return render('files/files.mako')
267
267
268 @LoginRequired()
268 @LoginRequired()
269 @HasRepoPermissionAnyDecorator(
269 @HasRepoPermissionAnyDecorator(
270 'repository.read', 'repository.write', 'repository.admin')
270 'repository.read', 'repository.write', 'repository.admin')
271 def annotate_previous(self, repo_name, revision, f_path):
271 def annotate_previous(self, repo_name, revision, f_path):
272
272
273 commit_id = revision
273 commit_id = revision
274 commit = self.__get_commit_or_redirect(commit_id, repo_name)
274 commit = self.__get_commit_or_redirect(commit_id, repo_name)
275 prev_commit_id = commit.raw_id
275 prev_commit_id = commit.raw_id
276
276
277 f_path = f_path
277 f_path = f_path
278 is_file = False
278 is_file = False
279 try:
279 try:
280 _file = commit.get_node(f_path)
280 _file = commit.get_node(f_path)
281 is_file = _file.is_file()
281 is_file = _file.is_file()
282 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
282 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
283 pass
283 pass
284
284
285 if is_file:
285 if is_file:
286 history = commit.get_file_history(f_path)
286 history = commit.get_file_history(f_path)
287 prev_commit_id = history[1].raw_id \
287 prev_commit_id = history[1].raw_id \
288 if len(history) > 1 else prev_commit_id
288 if len(history) > 1 else prev_commit_id
289
289
290 return redirect(h.url(
290 return redirect(h.url(
291 'files_annotate_home', repo_name=repo_name,
291 'files_annotate_home', repo_name=repo_name,
292 revision=prev_commit_id, f_path=f_path))
292 revision=prev_commit_id, f_path=f_path))
293
293
294 @LoginRequired()
294 @LoginRequired()
295 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
295 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
296 'repository.admin')
296 'repository.admin')
297 @jsonify
297 @jsonify
298 def history(self, repo_name, revision, f_path):
298 def history(self, repo_name, revision, f_path):
299 commit = self.__get_commit_or_redirect(revision, repo_name)
299 commit = self.__get_commit_or_redirect(revision, repo_name)
300 f_path = f_path
300 f_path = f_path
301 _file = commit.get_node(f_path)
301 _file = commit.get_node(f_path)
302 if _file.is_file():
302 if _file.is_file():
303 file_history, _hist = self._get_node_history(commit, f_path)
303 file_history, _hist = self._get_node_history(commit, f_path)
304
304
305 res = []
305 res = []
306 for obj in file_history:
306 for obj in file_history:
307 res.append({
307 res.append({
308 'text': obj[1],
308 'text': obj[1],
309 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
309 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
310 })
310 })
311
311
312 data = {
312 data = {
313 'more': False,
313 'more': False,
314 'results': res
314 'results': res
315 }
315 }
316 return data
316 return data
317
317
318 @LoginRequired()
318 @LoginRequired()
319 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
319 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
320 'repository.admin')
320 'repository.admin')
321 def authors(self, repo_name, revision, f_path):
321 def authors(self, repo_name, revision, f_path):
322 commit = self.__get_commit_or_redirect(revision, repo_name)
322 commit = self.__get_commit_or_redirect(revision, repo_name)
323 file_node = commit.get_node(f_path)
323 file_node = commit.get_node(f_path)
324 if file_node.is_file():
324 if file_node.is_file():
325 c.file_last_commit = file_node.last_commit
325 c.file_last_commit = file_node.last_commit
326 if request.GET.get('annotate') == '1':
326 if request.GET.get('annotate') == '1':
327 # use _hist from annotation if annotation mode is on
327 # use _hist from annotation if annotation mode is on
328 commit_ids = set(x[1] for x in file_node.annotate)
328 commit_ids = set(x[1] for x in file_node.annotate)
329 _hist = (
329 _hist = (
330 c.rhodecode_repo.get_commit(commit_id)
330 c.rhodecode_repo.get_commit(commit_id)
331 for commit_id in commit_ids)
331 for commit_id in commit_ids)
332 else:
332 else:
333 _f_history, _hist = self._get_node_history(commit, f_path)
333 _f_history, _hist = self._get_node_history(commit, f_path)
334 c.file_author = False
334 c.file_author = False
335 c.authors = []
335 c.authors = []
336 for author in set(commit.author for commit in _hist):
336 for author in set(commit.author for commit in _hist):
337 c.authors.append((
337 c.authors.append((
338 h.email(author),
338 h.email(author),
339 h.person(author, 'username_or_name_or_email')))
339 h.person(author, 'username_or_name_or_email')))
340 return render('files/file_authors_box.mako')
340 return render('files/file_authors_box.mako')
341
341
342 @LoginRequired()
342 @LoginRequired()
343 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
343 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
344 'repository.admin')
344 'repository.admin')
345 def rawfile(self, repo_name, revision, f_path):
345 def rawfile(self, repo_name, revision, f_path):
346 """
346 """
347 Action for download as raw
347 Action for download as raw
348 """
348 """
349 commit = self.__get_commit_or_redirect(revision, repo_name)
349 commit = self.__get_commit_or_redirect(revision, repo_name)
350 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
350 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
351
351
352 if request.GET.get('lf'):
352 if request.GET.get('lf'):
353 # only if lf get flag is passed, we download this file
353 # only if lf get flag is passed, we download this file
354 # as LFS/Largefile
354 # as LFS/Largefile
355 lf_node = file_node.get_largefile_node()
355 lf_node = file_node.get_largefile_node()
356 if lf_node:
356 if lf_node:
357 # overwrite our pointer with the REAL large-file
357 # overwrite our pointer with the REAL large-file
358 file_node = lf_node
358 file_node = lf_node
359
359
360 response.content_disposition = 'attachment; filename=%s' % \
360 response.content_disposition = 'attachment; filename=%s' % \
361 safe_str(f_path.split(Repository.NAME_SEP)[-1])
361 safe_str(f_path.split(Repository.NAME_SEP)[-1])
362
362
363 response.content_type = file_node.mimetype
363 response.content_type = file_node.mimetype
364 charset = self._get_default_encoding()
364 charset = self._get_default_encoding()
365 if charset:
365 if charset:
366 response.charset = charset
366 response.charset = charset
367
367
368 return file_node.content
368 return file_node.content
369
369
370 @LoginRequired()
370 @LoginRequired()
371 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
371 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
372 'repository.admin')
372 'repository.admin')
373 def raw(self, repo_name, revision, f_path):
373 def raw(self, repo_name, revision, f_path):
374 """
374 """
375 Action for show as raw, some mimetypes are "rendered",
375 Action for show as raw, some mimetypes are "rendered",
376 those include images, icons.
376 those include images, icons.
377 """
377 """
378 commit = self.__get_commit_or_redirect(revision, repo_name)
378 commit = self.__get_commit_or_redirect(revision, repo_name)
379 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
379 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
380
380
381 raw_mimetype_mapping = {
381 raw_mimetype_mapping = {
382 # map original mimetype to a mimetype used for "show as raw"
382 # map original mimetype to a mimetype used for "show as raw"
383 # you can also provide a content-disposition to override the
383 # you can also provide a content-disposition to override the
384 # default "attachment" disposition.
384 # default "attachment" disposition.
385 # orig_type: (new_type, new_dispo)
385 # orig_type: (new_type, new_dispo)
386
386
387 # show images inline:
387 # show images inline:
388 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
388 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
389 # for example render an SVG with javascript inside or even render
389 # for example render an SVG with javascript inside or even render
390 # HTML.
390 # HTML.
391 'image/x-icon': ('image/x-icon', 'inline'),
391 'image/x-icon': ('image/x-icon', 'inline'),
392 'image/png': ('image/png', 'inline'),
392 'image/png': ('image/png', 'inline'),
393 'image/gif': ('image/gif', 'inline'),
393 'image/gif': ('image/gif', 'inline'),
394 'image/jpeg': ('image/jpeg', 'inline'),
394 'image/jpeg': ('image/jpeg', 'inline'),
395 'application/pdf': ('application/pdf', 'inline'),
395 'application/pdf': ('application/pdf', 'inline'),
396 }
396 }
397
397
398 mimetype = file_node.mimetype
398 mimetype = file_node.mimetype
399 try:
399 try:
400 mimetype, dispo = raw_mimetype_mapping[mimetype]
400 mimetype, dispo = raw_mimetype_mapping[mimetype]
401 except KeyError:
401 except KeyError:
402 # we don't know anything special about this, handle it safely
402 # we don't know anything special about this, handle it safely
403 if file_node.is_binary:
403 if file_node.is_binary:
404 # do same as download raw for binary files
404 # do same as download raw for binary files
405 mimetype, dispo = 'application/octet-stream', 'attachment'
405 mimetype, dispo = 'application/octet-stream', 'attachment'
406 else:
406 else:
407 # do not just use the original mimetype, but force text/plain,
407 # do not just use the original mimetype, but force text/plain,
408 # otherwise it would serve text/html and that might be unsafe.
408 # otherwise it would serve text/html and that might be unsafe.
409 # Note: underlying vcs library fakes text/plain mimetype if the
409 # Note: underlying vcs library fakes text/plain mimetype if the
410 # mimetype can not be determined and it thinks it is not
410 # mimetype can not be determined and it thinks it is not
411 # binary.This might lead to erroneous text display in some
411 # binary.This might lead to erroneous text display in some
412 # cases, but helps in other cases, like with text files
412 # cases, but helps in other cases, like with text files
413 # without extension.
413 # without extension.
414 mimetype, dispo = 'text/plain', 'inline'
414 mimetype, dispo = 'text/plain', 'inline'
415
415
416 if dispo == 'attachment':
416 if dispo == 'attachment':
417 dispo = 'attachment; filename=%s' % safe_str(
417 dispo = 'attachment; filename=%s' % safe_str(
418 f_path.split(os.sep)[-1])
418 f_path.split(os.sep)[-1])
419
419
420 response.content_disposition = dispo
420 response.content_disposition = dispo
421 response.content_type = mimetype
421 response.content_type = mimetype
422 charset = self._get_default_encoding()
422 charset = self._get_default_encoding()
423 if charset:
423 if charset:
424 response.charset = charset
424 response.charset = charset
425 return file_node.content
425 return file_node.content
426
426
427 @CSRFRequired()
427 @CSRFRequired()
428 @LoginRequired()
428 @LoginRequired()
429 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
429 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
430 def delete(self, repo_name, revision, f_path):
430 def delete(self, repo_name, revision, f_path):
431 commit_id = revision
431 commit_id = revision
432
432
433 repo = c.rhodecode_db_repo
433 repo = c.rhodecode_db_repo
434 if repo.enable_locking and repo.locked[0]:
434 if repo.enable_locking and repo.locked[0]:
435 h.flash(_('This repository has been locked by %s on %s')
435 h.flash(_('This repository has been locked by %s on %s')
436 % (h.person_by_id(repo.locked[0]),
436 % (h.person_by_id(repo.locked[0]),
437 h.format_date(h.time_to_datetime(repo.locked[1]))),
437 h.format_date(h.time_to_datetime(repo.locked[1]))),
438 'warning')
438 'warning')
439 return redirect(h.url('files_home',
439 return redirect(h.url('files_home',
440 repo_name=repo_name, revision='tip'))
440 repo_name=repo_name, revision='tip'))
441
441
442 if not self._is_valid_head(commit_id, repo.scm_instance()):
442 if not self._is_valid_head(commit_id, repo.scm_instance()):
443 h.flash(_('You can only delete files with revision '
443 h.flash(_('You can only delete files with revision '
444 'being a valid branch '), category='warning')
444 'being a valid branch '), category='warning')
445 return redirect(h.url('files_home',
445 return redirect(h.url('files_home',
446 repo_name=repo_name, revision='tip',
446 repo_name=repo_name, revision='tip',
447 f_path=f_path))
447 f_path=f_path))
448
448
449 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
449 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
450 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
450 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
451
451
452 c.default_message = _(
452 c.default_message = _(
453 'Deleted file %s via RhodeCode Enterprise') % (f_path)
453 'Deleted file %s via RhodeCode Enterprise') % (f_path)
454 c.f_path = f_path
454 c.f_path = f_path
455 node_path = f_path
455 node_path = f_path
456 author = c.rhodecode_user.full_contact
456 author = c.rhodecode_user.full_contact
457 message = request.POST.get('message') or c.default_message
457 message = request.POST.get('message') or c.default_message
458 try:
458 try:
459 nodes = {
459 nodes = {
460 node_path: {
460 node_path: {
461 'content': ''
461 'content': ''
462 }
462 }
463 }
463 }
464 self.scm_model.delete_nodes(
464 self.scm_model.delete_nodes(
465 user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo,
465 user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo,
466 message=message,
466 message=message,
467 nodes=nodes,
467 nodes=nodes,
468 parent_commit=c.commit,
468 parent_commit=c.commit,
469 author=author,
469 author=author,
470 )
470 )
471
471
472 h.flash(_('Successfully deleted file %s') % f_path,
472 h.flash(_('Successfully deleted file %s') % f_path,
473 category='success')
473 category='success')
474 except Exception:
474 except Exception:
475 msg = _('Error occurred during commit')
475 msg = _('Error occurred during commit')
476 log.exception(msg)
476 log.exception(msg)
477 h.flash(msg, category='error')
477 h.flash(msg, category='error')
478 return redirect(url('changeset_home',
478 return redirect(url('changeset_home',
479 repo_name=c.repo_name, revision='tip'))
479 repo_name=c.repo_name, revision='tip'))
480
480
481 @LoginRequired()
481 @LoginRequired()
482 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
482 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
483 def delete_home(self, repo_name, revision, f_path):
483 def delete_home(self, repo_name, revision, f_path):
484 commit_id = revision
484 commit_id = revision
485
485
486 repo = c.rhodecode_db_repo
486 repo = c.rhodecode_db_repo
487 if repo.enable_locking and repo.locked[0]:
487 if repo.enable_locking and repo.locked[0]:
488 h.flash(_('This repository has been locked by %s on %s')
488 h.flash(_('This repository has been locked by %s on %s')
489 % (h.person_by_id(repo.locked[0]),
489 % (h.person_by_id(repo.locked[0]),
490 h.format_date(h.time_to_datetime(repo.locked[1]))),
490 h.format_date(h.time_to_datetime(repo.locked[1]))),
491 'warning')
491 'warning')
492 return redirect(h.url('files_home',
492 return redirect(h.url('files_home',
493 repo_name=repo_name, revision='tip'))
493 repo_name=repo_name, revision='tip'))
494
494
495 if not self._is_valid_head(commit_id, repo.scm_instance()):
495 if not self._is_valid_head(commit_id, repo.scm_instance()):
496 h.flash(_('You can only delete files with revision '
496 h.flash(_('You can only delete files with revision '
497 'being a valid branch '), category='warning')
497 'being a valid branch '), category='warning')
498 return redirect(h.url('files_home',
498 return redirect(h.url('files_home',
499 repo_name=repo_name, revision='tip',
499 repo_name=repo_name, revision='tip',
500 f_path=f_path))
500 f_path=f_path))
501
501
502 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
502 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
503 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
503 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
504
504
505 c.default_message = _(
505 c.default_message = _(
506 'Deleted file %s via RhodeCode Enterprise') % (f_path)
506 'Deleted file %s via RhodeCode Enterprise') % (f_path)
507 c.f_path = f_path
507 c.f_path = f_path
508
508
509 return render('files/files_delete.mako')
509 return render('files/files_delete.mako')
510
510
511 @CSRFRequired()
511 @CSRFRequired()
512 @LoginRequired()
512 @LoginRequired()
513 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
513 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
514 def edit(self, repo_name, revision, f_path):
514 def edit(self, repo_name, revision, f_path):
515 commit_id = revision
515 commit_id = revision
516
516
517 repo = c.rhodecode_db_repo
517 repo = c.rhodecode_db_repo
518 if repo.enable_locking and repo.locked[0]:
518 if repo.enable_locking and repo.locked[0]:
519 h.flash(_('This repository has been locked by %s on %s')
519 h.flash(_('This repository has been locked by %s on %s')
520 % (h.person_by_id(repo.locked[0]),
520 % (h.person_by_id(repo.locked[0]),
521 h.format_date(h.time_to_datetime(repo.locked[1]))),
521 h.format_date(h.time_to_datetime(repo.locked[1]))),
522 'warning')
522 'warning')
523 return redirect(h.url('files_home',
523 return redirect(h.url('files_home',
524 repo_name=repo_name, revision='tip'))
524 repo_name=repo_name, revision='tip'))
525
525
526 if not self._is_valid_head(commit_id, repo.scm_instance()):
526 if not self._is_valid_head(commit_id, repo.scm_instance()):
527 h.flash(_('You can only edit files with revision '
527 h.flash(_('You can only edit files with revision '
528 'being a valid branch '), category='warning')
528 'being a valid branch '), category='warning')
529 return redirect(h.url('files_home',
529 return redirect(h.url('files_home',
530 repo_name=repo_name, revision='tip',
530 repo_name=repo_name, revision='tip',
531 f_path=f_path))
531 f_path=f_path))
532
532
533 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
533 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
534 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
534 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
535
535
536 if c.file.is_binary:
536 if c.file.is_binary:
537 return redirect(url('files_home', repo_name=c.repo_name,
537 return redirect(url('files_home', repo_name=c.repo_name,
538 revision=c.commit.raw_id, f_path=f_path))
538 revision=c.commit.raw_id, f_path=f_path))
539 c.default_message = _(
539 c.default_message = _(
540 'Edited file %s via RhodeCode Enterprise') % (f_path)
540 'Edited file %s via RhodeCode Enterprise') % (f_path)
541 c.f_path = f_path
541 c.f_path = f_path
542 old_content = c.file.content
542 old_content = c.file.content
543 sl = old_content.splitlines(1)
543 sl = old_content.splitlines(1)
544 first_line = sl[0] if sl else ''
544 first_line = sl[0] if sl else ''
545
545
546 # modes: 0 - Unix, 1 - Mac, 2 - DOS
546 # modes: 0 - Unix, 1 - Mac, 2 - DOS
547 mode = detect_mode(first_line, 0)
547 mode = detect_mode(first_line, 0)
548 content = convert_line_endings(request.POST.get('content', ''), mode)
548 content = convert_line_endings(request.POST.get('content', ''), mode)
549
549
550 message = request.POST.get('message') or c.default_message
550 message = request.POST.get('message') or c.default_message
551 org_f_path = c.file.unicode_path
551 org_f_path = c.file.unicode_path
552 filename = request.POST['filename']
552 filename = request.POST['filename']
553 org_filename = c.file.name
553 org_filename = c.file.name
554
554
555 if content == old_content and filename == org_filename:
555 if content == old_content and filename == org_filename:
556 h.flash(_('No changes'), category='warning')
556 h.flash(_('No changes'), category='warning')
557 return redirect(url('changeset_home', repo_name=c.repo_name,
557 return redirect(url('changeset_home', repo_name=c.repo_name,
558 revision='tip'))
558 revision='tip'))
559 try:
559 try:
560 mapping = {
560 mapping = {
561 org_f_path: {
561 org_f_path: {
562 'org_filename': org_f_path,
562 'org_filename': org_f_path,
563 'filename': os.path.join(c.file.dir_path, filename),
563 'filename': os.path.join(c.file.dir_path, filename),
564 'content': content,
564 'content': content,
565 'lexer': '',
565 'lexer': '',
566 'op': 'mod',
566 'op': 'mod',
567 }
567 }
568 }
568 }
569
569
570 ScmModel().update_nodes(
570 ScmModel().update_nodes(
571 user=c.rhodecode_user.user_id,
571 user=c.rhodecode_user.user_id,
572 repo=c.rhodecode_db_repo,
572 repo=c.rhodecode_db_repo,
573 message=message,
573 message=message,
574 nodes=mapping,
574 nodes=mapping,
575 parent_commit=c.commit,
575 parent_commit=c.commit,
576 )
576 )
577
577
578 h.flash(_('Successfully committed to %s') % f_path,
578 h.flash(_('Successfully committed to %s') % f_path,
579 category='success')
579 category='success')
580 except Exception:
580 except Exception:
581 msg = _('Error occurred during commit')
581 msg = _('Error occurred during commit')
582 log.exception(msg)
582 log.exception(msg)
583 h.flash(msg, category='error')
583 h.flash(msg, category='error')
584 return redirect(url('changeset_home',
584 return redirect(url('changeset_home',
585 repo_name=c.repo_name, revision='tip'))
585 repo_name=c.repo_name, revision='tip'))
586
586
587 @LoginRequired()
587 @LoginRequired()
588 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
588 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
589 def edit_home(self, repo_name, revision, f_path):
589 def edit_home(self, repo_name, revision, f_path):
590 commit_id = revision
590 commit_id = revision
591
591
592 repo = c.rhodecode_db_repo
592 repo = c.rhodecode_db_repo
593 if repo.enable_locking and repo.locked[0]:
593 if repo.enable_locking and repo.locked[0]:
594 h.flash(_('This repository has been locked by %s on %s')
594 h.flash(_('This repository has been locked by %s on %s')
595 % (h.person_by_id(repo.locked[0]),
595 % (h.person_by_id(repo.locked[0]),
596 h.format_date(h.time_to_datetime(repo.locked[1]))),
596 h.format_date(h.time_to_datetime(repo.locked[1]))),
597 'warning')
597 'warning')
598 return redirect(h.url('files_home',
598 return redirect(h.url('files_home',
599 repo_name=repo_name, revision='tip'))
599 repo_name=repo_name, revision='tip'))
600
600
601 if not self._is_valid_head(commit_id, repo.scm_instance()):
601 if not self._is_valid_head(commit_id, repo.scm_instance()):
602 h.flash(_('You can only edit files with revision '
602 h.flash(_('You can only edit files with revision '
603 'being a valid branch '), category='warning')
603 'being a valid branch '), category='warning')
604 return redirect(h.url('files_home',
604 return redirect(h.url('files_home',
605 repo_name=repo_name, revision='tip',
605 repo_name=repo_name, revision='tip',
606 f_path=f_path))
606 f_path=f_path))
607
607
608 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
608 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
609 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
609 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
610
610
611 if c.file.is_binary:
611 if c.file.is_binary:
612 return redirect(url('files_home', repo_name=c.repo_name,
612 return redirect(url('files_home', repo_name=c.repo_name,
613 revision=c.commit.raw_id, f_path=f_path))
613 revision=c.commit.raw_id, f_path=f_path))
614 c.default_message = _(
614 c.default_message = _(
615 'Edited file %s via RhodeCode Enterprise') % (f_path)
615 'Edited file %s via RhodeCode Enterprise') % (f_path)
616 c.f_path = f_path
616 c.f_path = f_path
617
617
618 return render('files/files_edit.mako')
618 return render('files/files_edit.mako')
619
619
620 def _is_valid_head(self, commit_id, repo):
620 def _is_valid_head(self, commit_id, repo):
621 # check if commit is a branch identifier- basically we cannot
621 # check if commit is a branch identifier- basically we cannot
622 # create multiple heads via file editing
622 # create multiple heads via file editing
623 valid_heads = repo.branches.keys() + repo.branches.values()
623 valid_heads = repo.branches.keys() + repo.branches.values()
624
624
625 if h.is_svn(repo) and not repo.is_empty():
625 if h.is_svn(repo) and not repo.is_empty():
626 # Note: Subversion only has one head, we add it here in case there
626 # Note: Subversion only has one head, we add it here in case there
627 # is no branch matched.
627 # is no branch matched.
628 valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
628 valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
629
629
630 # check if commit is a branch name or branch hash
630 # check if commit is a branch name or branch hash
631 return commit_id in valid_heads
631 return commit_id in valid_heads
632
632
633 @CSRFRequired()
633 @CSRFRequired()
634 @LoginRequired()
634 @LoginRequired()
635 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
635 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
636 def add(self, repo_name, revision, f_path):
636 def add(self, repo_name, revision, f_path):
637 repo = Repository.get_by_repo_name(repo_name)
637 repo = Repository.get_by_repo_name(repo_name)
638 if repo.enable_locking and repo.locked[0]:
638 if repo.enable_locking and repo.locked[0]:
639 h.flash(_('This repository has been locked by %s on %s')
639 h.flash(_('This repository has been locked by %s on %s')
640 % (h.person_by_id(repo.locked[0]),
640 % (h.person_by_id(repo.locked[0]),
641 h.format_date(h.time_to_datetime(repo.locked[1]))),
641 h.format_date(h.time_to_datetime(repo.locked[1]))),
642 'warning')
642 'warning')
643 return redirect(h.url('files_home',
643 return redirect(h.url('files_home',
644 repo_name=repo_name, revision='tip'))
644 repo_name=repo_name, revision='tip'))
645
645
646 r_post = request.POST
646 r_post = request.POST
647
647
648 c.commit = self.__get_commit_or_redirect(
648 c.commit = self.__get_commit_or_redirect(
649 revision, repo_name, redirect_after=False)
649 revision, repo_name, redirect_after=False)
650 if c.commit is None:
650 if c.commit is None:
651 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
651 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
652 c.default_message = (_('Added file via RhodeCode Enterprise'))
652 c.default_message = (_('Added file via RhodeCode Enterprise'))
653 c.f_path = f_path
653 c.f_path = f_path
654 unix_mode = 0
654 unix_mode = 0
655 content = convert_line_endings(r_post.get('content', ''), unix_mode)
655 content = convert_line_endings(r_post.get('content', ''), unix_mode)
656
656
657 message = r_post.get('message') or c.default_message
657 message = r_post.get('message') or c.default_message
658 filename = r_post.get('filename')
658 filename = r_post.get('filename')
659 location = r_post.get('location', '') # dir location
659 location = r_post.get('location', '') # dir location
660 file_obj = r_post.get('upload_file', None)
660 file_obj = r_post.get('upload_file', None)
661
661
662 if file_obj is not None and hasattr(file_obj, 'filename'):
662 if file_obj is not None and hasattr(file_obj, 'filename'):
663 filename = r_post.get('filename_upload')
663 filename = r_post.get('filename_upload')
664 content = file_obj.file
664 content = file_obj.file
665
665
666 if hasattr(content, 'file'):
666 if hasattr(content, 'file'):
667 # non posix systems store real file under file attr
667 # non posix systems store real file under file attr
668 content = content.file
668 content = content.file
669
669
670 # If there's no commit, redirect to repo summary
670 # If there's no commit, redirect to repo summary
671 if type(c.commit) is EmptyCommit:
671 if type(c.commit) is EmptyCommit:
672 redirect_url = "summary_home"
672 redirect_url = h.route_path('repo_summary', repo_name=c.repo_name)
673 else:
673 else:
674 redirect_url = "changeset_home"
674 redirect_url = url("changeset_home", repo_name=c.repo_name,
675 revision='tip')
675
676
676 if not filename:
677 if not filename:
677 h.flash(_('No filename'), category='warning')
678 h.flash(_('No filename'), category='warning')
678 return redirect(url(redirect_url, repo_name=c.repo_name,
679 return redirect(redirect_url)
679 revision='tip'))
680
680
681 # extract the location from filename,
681 # extract the location from filename,
682 # allows using foo/bar.txt syntax to create subdirectories
682 # allows using foo/bar.txt syntax to create subdirectories
683 subdir_loc = filename.rsplit('/', 1)
683 subdir_loc = filename.rsplit('/', 1)
684 if len(subdir_loc) == 2:
684 if len(subdir_loc) == 2:
685 location = os.path.join(location, subdir_loc[0])
685 location = os.path.join(location, subdir_loc[0])
686
686
687 # strip all crap out of file, just leave the basename
687 # strip all crap out of file, just leave the basename
688 filename = os.path.basename(filename)
688 filename = os.path.basename(filename)
689 node_path = os.path.join(location, filename)
689 node_path = os.path.join(location, filename)
690 author = c.rhodecode_user.full_contact
690 author = c.rhodecode_user.full_contact
691
691
692 try:
692 try:
693 nodes = {
693 nodes = {
694 node_path: {
694 node_path: {
695 'content': content
695 'content': content
696 }
696 }
697 }
697 }
698 self.scm_model.create_nodes(
698 self.scm_model.create_nodes(
699 user=c.rhodecode_user.user_id,
699 user=c.rhodecode_user.user_id,
700 repo=c.rhodecode_db_repo,
700 repo=c.rhodecode_db_repo,
701 message=message,
701 message=message,
702 nodes=nodes,
702 nodes=nodes,
703 parent_commit=c.commit,
703 parent_commit=c.commit,
704 author=author,
704 author=author,
705 )
705 )
706
706
707 h.flash(_('Successfully committed to %s') % node_path,
707 h.flash(_('Successfully committed to %s') % node_path,
708 category='success')
708 category='success')
709 except NonRelativePathError as e:
709 except NonRelativePathError as e:
710 h.flash(_(
710 h.flash(_(
711 'The location specified must be a relative path and must not '
711 'The location specified must be a relative path and must not '
712 'contain .. in the path'), category='warning')
712 'contain .. in the path'), category='warning')
713 return redirect(url('changeset_home', repo_name=c.repo_name,
713 return redirect(url('changeset_home', repo_name=c.repo_name,
714 revision='tip'))
714 revision='tip'))
715 except (NodeError, NodeAlreadyExistsError) as e:
715 except (NodeError, NodeAlreadyExistsError) as e:
716 h.flash(_(e), category='error')
716 h.flash(_(e), category='error')
717 except Exception:
717 except Exception:
718 msg = _('Error occurred during commit')
718 msg = _('Error occurred during commit')
719 log.exception(msg)
719 log.exception(msg)
720 h.flash(msg, category='error')
720 h.flash(msg, category='error')
721 return redirect(url('changeset_home',
721 return redirect(url('changeset_home',
722 repo_name=c.repo_name, revision='tip'))
722 repo_name=c.repo_name, revision='tip'))
723
723
724 @LoginRequired()
724 @LoginRequired()
725 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
725 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
726 def add_home(self, repo_name, revision, f_path):
726 def add_home(self, repo_name, revision, f_path):
727
727
728 repo = Repository.get_by_repo_name(repo_name)
728 repo = Repository.get_by_repo_name(repo_name)
729 if repo.enable_locking and repo.locked[0]:
729 if repo.enable_locking and repo.locked[0]:
730 h.flash(_('This repository has been locked by %s on %s')
730 h.flash(_('This repository has been locked by %s on %s')
731 % (h.person_by_id(repo.locked[0]),
731 % (h.person_by_id(repo.locked[0]),
732 h.format_date(h.time_to_datetime(repo.locked[1]))),
732 h.format_date(h.time_to_datetime(repo.locked[1]))),
733 'warning')
733 'warning')
734 return redirect(h.url('files_home',
734 return redirect(h.url('files_home',
735 repo_name=repo_name, revision='tip'))
735 repo_name=repo_name, revision='tip'))
736
736
737 c.commit = self.__get_commit_or_redirect(
737 c.commit = self.__get_commit_or_redirect(
738 revision, repo_name, redirect_after=False)
738 revision, repo_name, redirect_after=False)
739 if c.commit is None:
739 if c.commit is None:
740 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
740 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
741 c.default_message = (_('Added file via RhodeCode Enterprise'))
741 c.default_message = (_('Added file via RhodeCode Enterprise'))
742 c.f_path = f_path
742 c.f_path = f_path
743
743
744 return render('files/files_add.mako')
744 return render('files/files_add.mako')
745
745
746 @LoginRequired()
746 @LoginRequired()
747 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
747 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
748 'repository.admin')
748 'repository.admin')
749 def archivefile(self, repo_name, fname):
749 def archivefile(self, repo_name, fname):
750 fileformat = None
750 fileformat = None
751 commit_id = None
751 commit_id = None
752 ext = None
752 ext = None
753 subrepos = request.GET.get('subrepos') == 'true'
753 subrepos = request.GET.get('subrepos') == 'true'
754
754
755 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
755 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
756 archive_spec = fname.split(ext_data[1])
756 archive_spec = fname.split(ext_data[1])
757 if len(archive_spec) == 2 and archive_spec[1] == '':
757 if len(archive_spec) == 2 and archive_spec[1] == '':
758 fileformat = a_type or ext_data[1]
758 fileformat = a_type or ext_data[1]
759 commit_id = archive_spec[0]
759 commit_id = archive_spec[0]
760 ext = ext_data[1]
760 ext = ext_data[1]
761
761
762 dbrepo = RepoModel().get_by_repo_name(repo_name)
762 dbrepo = RepoModel().get_by_repo_name(repo_name)
763 if not dbrepo.enable_downloads:
763 if not dbrepo.enable_downloads:
764 return _('Downloads disabled')
764 return _('Downloads disabled')
765
765
766 try:
766 try:
767 commit = c.rhodecode_repo.get_commit(commit_id)
767 commit = c.rhodecode_repo.get_commit(commit_id)
768 content_type = settings.ARCHIVE_SPECS[fileformat][0]
768 content_type = settings.ARCHIVE_SPECS[fileformat][0]
769 except CommitDoesNotExistError:
769 except CommitDoesNotExistError:
770 return _('Unknown revision %s') % commit_id
770 return _('Unknown revision %s') % commit_id
771 except EmptyRepositoryError:
771 except EmptyRepositoryError:
772 return _('Empty repository')
772 return _('Empty repository')
773 except KeyError:
773 except KeyError:
774 return _('Unknown archive type')
774 return _('Unknown archive type')
775
775
776 # archive cache
776 # archive cache
777 from rhodecode import CONFIG
777 from rhodecode import CONFIG
778
778
779 archive_name = '%s-%s%s%s' % (
779 archive_name = '%s-%s%s%s' % (
780 safe_str(repo_name.replace('/', '_')),
780 safe_str(repo_name.replace('/', '_')),
781 '-sub' if subrepos else '',
781 '-sub' if subrepos else '',
782 safe_str(commit.short_id), ext)
782 safe_str(commit.short_id), ext)
783
783
784 use_cached_archive = False
784 use_cached_archive = False
785 archive_cache_enabled = CONFIG.get(
785 archive_cache_enabled = CONFIG.get(
786 'archive_cache_dir') and not request.GET.get('no_cache')
786 'archive_cache_dir') and not request.GET.get('no_cache')
787
787
788 if archive_cache_enabled:
788 if archive_cache_enabled:
789 # check if we it's ok to write
789 # check if we it's ok to write
790 if not os.path.isdir(CONFIG['archive_cache_dir']):
790 if not os.path.isdir(CONFIG['archive_cache_dir']):
791 os.makedirs(CONFIG['archive_cache_dir'])
791 os.makedirs(CONFIG['archive_cache_dir'])
792 cached_archive_path = os.path.join(
792 cached_archive_path = os.path.join(
793 CONFIG['archive_cache_dir'], archive_name)
793 CONFIG['archive_cache_dir'], archive_name)
794 if os.path.isfile(cached_archive_path):
794 if os.path.isfile(cached_archive_path):
795 log.debug('Found cached archive in %s', cached_archive_path)
795 log.debug('Found cached archive in %s', cached_archive_path)
796 fd, archive = None, cached_archive_path
796 fd, archive = None, cached_archive_path
797 use_cached_archive = True
797 use_cached_archive = True
798 else:
798 else:
799 log.debug('Archive %s is not yet cached', archive_name)
799 log.debug('Archive %s is not yet cached', archive_name)
800
800
801 if not use_cached_archive:
801 if not use_cached_archive:
802 # generate new archive
802 # generate new archive
803 fd, archive = tempfile.mkstemp()
803 fd, archive = tempfile.mkstemp()
804 log.debug('Creating new temp archive in %s' % (archive,))
804 log.debug('Creating new temp archive in %s' % (archive,))
805 try:
805 try:
806 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
806 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
807 except ImproperArchiveTypeError:
807 except ImproperArchiveTypeError:
808 return _('Unknown archive type')
808 return _('Unknown archive type')
809 if archive_cache_enabled:
809 if archive_cache_enabled:
810 # if we generated the archive and we have cache enabled
810 # if we generated the archive and we have cache enabled
811 # let's use this for future
811 # let's use this for future
812 log.debug('Storing new archive in %s' % (cached_archive_path,))
812 log.debug('Storing new archive in %s' % (cached_archive_path,))
813 shutil.move(archive, cached_archive_path)
813 shutil.move(archive, cached_archive_path)
814 archive = cached_archive_path
814 archive = cached_archive_path
815
815
816 # store download action
816 # store download action
817 audit_logger.store(
817 audit_logger.store(
818 action='repo.archive.download',
818 action='repo.archive.download',
819 action_data={'user_agent': request.user_agent,
819 action_data={'user_agent': request.user_agent,
820 'archive_name': archive_name,
820 'archive_name': archive_name,
821 'archive_spec': fname,
821 'archive_spec': fname,
822 'archive_cached': use_cached_archive},
822 'archive_cached': use_cached_archive},
823 user=c.rhodecode_user,
823 user=c.rhodecode_user,
824 repo=dbrepo,
824 repo=dbrepo,
825 commit=True
825 commit=True
826 )
826 )
827
827
828 response.content_disposition = str(
828 response.content_disposition = str(
829 'attachment; filename=%s' % archive_name)
829 'attachment; filename=%s' % archive_name)
830 response.content_type = str(content_type)
830 response.content_type = str(content_type)
831
831
832 def get_chunked_archive(archive):
832 def get_chunked_archive(archive):
833 with open(archive, 'rb') as stream:
833 with open(archive, 'rb') as stream:
834 while True:
834 while True:
835 data = stream.read(16 * 1024)
835 data = stream.read(16 * 1024)
836 if not data:
836 if not data:
837 if fd: # fd means we used temporary file
837 if fd: # fd means we used temporary file
838 os.close(fd)
838 os.close(fd)
839 if not archive_cache_enabled:
839 if not archive_cache_enabled:
840 log.debug('Destroying temp archive %s', archive)
840 log.debug('Destroying temp archive %s', archive)
841 os.remove(archive)
841 os.remove(archive)
842 break
842 break
843 yield data
843 yield data
844
844
845 return get_chunked_archive(archive)
845 return get_chunked_archive(archive)
846
846
847 @LoginRequired()
847 @LoginRequired()
848 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
848 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
849 'repository.admin')
849 'repository.admin')
850 def diff(self, repo_name, f_path):
850 def diff(self, repo_name, f_path):
851
851
852 c.action = request.GET.get('diff')
852 c.action = request.GET.get('diff')
853 diff1 = request.GET.get('diff1', '')
853 diff1 = request.GET.get('diff1', '')
854 diff2 = request.GET.get('diff2', '')
854 diff2 = request.GET.get('diff2', '')
855
855
856 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
856 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
857
857
858 ignore_whitespace = str2bool(request.GET.get('ignorews'))
858 ignore_whitespace = str2bool(request.GET.get('ignorews'))
859 line_context = request.GET.get('context', 3)
859 line_context = request.GET.get('context', 3)
860
860
861 if not any((diff1, diff2)):
861 if not any((diff1, diff2)):
862 h.flash(
862 h.flash(
863 'Need query parameter "diff1" or "diff2" to generate a diff.',
863 'Need query parameter "diff1" or "diff2" to generate a diff.',
864 category='error')
864 category='error')
865 raise HTTPBadRequest()
865 raise HTTPBadRequest()
866
866
867 if c.action not in ['download', 'raw']:
867 if c.action not in ['download', 'raw']:
868 # redirect to new view if we render diff
868 # redirect to new view if we render diff
869 return redirect(
869 return redirect(
870 url('compare_url', repo_name=repo_name,
870 url('compare_url', repo_name=repo_name,
871 source_ref_type='rev',
871 source_ref_type='rev',
872 source_ref=diff1,
872 source_ref=diff1,
873 target_repo=c.repo_name,
873 target_repo=c.repo_name,
874 target_ref_type='rev',
874 target_ref_type='rev',
875 target_ref=diff2,
875 target_ref=diff2,
876 f_path=f_path))
876 f_path=f_path))
877
877
878 try:
878 try:
879 node1 = self._get_file_node(diff1, path1)
879 node1 = self._get_file_node(diff1, path1)
880 node2 = self._get_file_node(diff2, f_path)
880 node2 = self._get_file_node(diff2, f_path)
881 except (RepositoryError, NodeError):
881 except (RepositoryError, NodeError):
882 log.exception("Exception while trying to get node from repository")
882 log.exception("Exception while trying to get node from repository")
883 return redirect(url(
883 return redirect(url(
884 'files_home', repo_name=c.repo_name, f_path=f_path))
884 'files_home', repo_name=c.repo_name, f_path=f_path))
885
885
886 if all(isinstance(node.commit, EmptyCommit)
886 if all(isinstance(node.commit, EmptyCommit)
887 for node in (node1, node2)):
887 for node in (node1, node2)):
888 raise HTTPNotFound
888 raise HTTPNotFound
889
889
890 c.commit_1 = node1.commit
890 c.commit_1 = node1.commit
891 c.commit_2 = node2.commit
891 c.commit_2 = node2.commit
892
892
893 if c.action == 'download':
893 if c.action == 'download':
894 _diff = diffs.get_gitdiff(node1, node2,
894 _diff = diffs.get_gitdiff(node1, node2,
895 ignore_whitespace=ignore_whitespace,
895 ignore_whitespace=ignore_whitespace,
896 context=line_context)
896 context=line_context)
897 diff = diffs.DiffProcessor(_diff, format='gitdiff')
897 diff = diffs.DiffProcessor(_diff, format='gitdiff')
898
898
899 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
899 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
900 response.content_type = 'text/plain'
900 response.content_type = 'text/plain'
901 response.content_disposition = (
901 response.content_disposition = (
902 'attachment; filename=%s' % (diff_name,)
902 'attachment; filename=%s' % (diff_name,)
903 )
903 )
904 charset = self._get_default_encoding()
904 charset = self._get_default_encoding()
905 if charset:
905 if charset:
906 response.charset = charset
906 response.charset = charset
907 return diff.as_raw()
907 return diff.as_raw()
908
908
909 elif c.action == 'raw':
909 elif c.action == 'raw':
910 _diff = diffs.get_gitdiff(node1, node2,
910 _diff = diffs.get_gitdiff(node1, node2,
911 ignore_whitespace=ignore_whitespace,
911 ignore_whitespace=ignore_whitespace,
912 context=line_context)
912 context=line_context)
913 diff = diffs.DiffProcessor(_diff, format='gitdiff')
913 diff = diffs.DiffProcessor(_diff, format='gitdiff')
914 response.content_type = 'text/plain'
914 response.content_type = 'text/plain'
915 charset = self._get_default_encoding()
915 charset = self._get_default_encoding()
916 if charset:
916 if charset:
917 response.charset = charset
917 response.charset = charset
918 return diff.as_raw()
918 return diff.as_raw()
919
919
920 else:
920 else:
921 return redirect(
921 return redirect(
922 url('compare_url', repo_name=repo_name,
922 url('compare_url', repo_name=repo_name,
923 source_ref_type='rev',
923 source_ref_type='rev',
924 source_ref=diff1,
924 source_ref=diff1,
925 target_repo=c.repo_name,
925 target_repo=c.repo_name,
926 target_ref_type='rev',
926 target_ref_type='rev',
927 target_ref=diff2,
927 target_ref=diff2,
928 f_path=f_path))
928 f_path=f_path))
929
929
930 @LoginRequired()
930 @LoginRequired()
931 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
931 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
932 'repository.admin')
932 'repository.admin')
933 def diff_2way(self, repo_name, f_path):
933 def diff_2way(self, repo_name, f_path):
934 """
934 """
935 Kept only to make OLD links work
935 Kept only to make OLD links work
936 """
936 """
937 diff1 = request.GET.get('diff1', '')
937 diff1 = request.GET.get('diff1', '')
938 diff2 = request.GET.get('diff2', '')
938 diff2 = request.GET.get('diff2', '')
939
939
940 if not any((diff1, diff2)):
940 if not any((diff1, diff2)):
941 h.flash(
941 h.flash(
942 'Need query parameter "diff1" or "diff2" to generate a diff.',
942 'Need query parameter "diff1" or "diff2" to generate a diff.',
943 category='error')
943 category='error')
944 raise HTTPBadRequest()
944 raise HTTPBadRequest()
945
945
946 return redirect(
946 return redirect(
947 url('compare_url', repo_name=repo_name,
947 url('compare_url', repo_name=repo_name,
948 source_ref_type='rev',
948 source_ref_type='rev',
949 source_ref=diff1,
949 source_ref=diff1,
950 target_repo=c.repo_name,
950 target_repo=c.repo_name,
951 target_ref_type='rev',
951 target_ref_type='rev',
952 target_ref=diff2,
952 target_ref=diff2,
953 f_path=f_path,
953 f_path=f_path,
954 diffmode='sideside'))
954 diffmode='sideside'))
955
955
956 def _get_file_node(self, commit_id, f_path):
956 def _get_file_node(self, commit_id, f_path):
957 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
957 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
958 commit = c.rhodecode_repo.get_commit(commit_id=commit_id)
958 commit = c.rhodecode_repo.get_commit(commit_id=commit_id)
959 try:
959 try:
960 node = commit.get_node(f_path)
960 node = commit.get_node(f_path)
961 if node.is_dir():
961 if node.is_dir():
962 raise NodeError('%s path is a %s not a file'
962 raise NodeError('%s path is a %s not a file'
963 % (node, type(node)))
963 % (node, type(node)))
964 except NodeDoesNotExistError:
964 except NodeDoesNotExistError:
965 commit = EmptyCommit(
965 commit = EmptyCommit(
966 commit_id=commit_id,
966 commit_id=commit_id,
967 idx=commit.idx,
967 idx=commit.idx,
968 repo=commit.repository,
968 repo=commit.repository,
969 alias=commit.repository.alias,
969 alias=commit.repository.alias,
970 message=commit.message,
970 message=commit.message,
971 author=commit.author,
971 author=commit.author,
972 date=commit.date)
972 date=commit.date)
973 node = FileNode(f_path, '', commit=commit)
973 node = FileNode(f_path, '', commit=commit)
974 else:
974 else:
975 commit = EmptyCommit(
975 commit = EmptyCommit(
976 repo=c.rhodecode_repo,
976 repo=c.rhodecode_repo,
977 alias=c.rhodecode_repo.alias)
977 alias=c.rhodecode_repo.alias)
978 node = FileNode(f_path, '', commit=commit)
978 node = FileNode(f_path, '', commit=commit)
979 return node
979 return node
980
980
981 def _get_node_history(self, commit, f_path, commits=None):
981 def _get_node_history(self, commit, f_path, commits=None):
982 """
982 """
983 get commit history for given node
983 get commit history for given node
984
984
985 :param commit: commit to calculate history
985 :param commit: commit to calculate history
986 :param f_path: path for node to calculate history for
986 :param f_path: path for node to calculate history for
987 :param commits: if passed don't calculate history and take
987 :param commits: if passed don't calculate history and take
988 commits defined in this list
988 commits defined in this list
989 """
989 """
990 # calculate history based on tip
990 # calculate history based on tip
991 tip = c.rhodecode_repo.get_commit()
991 tip = c.rhodecode_repo.get_commit()
992 if commits is None:
992 if commits is None:
993 pre_load = ["author", "branch"]
993 pre_load = ["author", "branch"]
994 try:
994 try:
995 commits = tip.get_file_history(f_path, pre_load=pre_load)
995 commits = tip.get_file_history(f_path, pre_load=pre_load)
996 except (NodeDoesNotExistError, CommitError):
996 except (NodeDoesNotExistError, CommitError):
997 # this node is not present at tip!
997 # this node is not present at tip!
998 commits = commit.get_file_history(f_path, pre_load=pre_load)
998 commits = commit.get_file_history(f_path, pre_load=pre_load)
999
999
1000 history = []
1000 history = []
1001 commits_group = ([], _("Changesets"))
1001 commits_group = ([], _("Changesets"))
1002 for commit in commits:
1002 for commit in commits:
1003 branch = ' (%s)' % commit.branch if commit.branch else ''
1003 branch = ' (%s)' % commit.branch if commit.branch else ''
1004 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
1004 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
1005 commits_group[0].append((commit.raw_id, n_desc,))
1005 commits_group[0].append((commit.raw_id, n_desc,))
1006 history.append(commits_group)
1006 history.append(commits_group)
1007
1007
1008 symbolic_reference = self._symbolic_reference
1008 symbolic_reference = self._symbolic_reference
1009
1009
1010 if c.rhodecode_repo.alias == 'svn':
1010 if c.rhodecode_repo.alias == 'svn':
1011 adjusted_f_path = self._adjust_file_path_for_svn(
1011 adjusted_f_path = self._adjust_file_path_for_svn(
1012 f_path, c.rhodecode_repo)
1012 f_path, c.rhodecode_repo)
1013 if adjusted_f_path != f_path:
1013 if adjusted_f_path != f_path:
1014 log.debug(
1014 log.debug(
1015 'Recognized svn tag or branch in file "%s", using svn '
1015 'Recognized svn tag or branch in file "%s", using svn '
1016 'specific symbolic references', f_path)
1016 'specific symbolic references', f_path)
1017 f_path = adjusted_f_path
1017 f_path = adjusted_f_path
1018 symbolic_reference = self._symbolic_reference_svn
1018 symbolic_reference = self._symbolic_reference_svn
1019
1019
1020 branches = self._create_references(
1020 branches = self._create_references(
1021 c.rhodecode_repo.branches, symbolic_reference, f_path)
1021 c.rhodecode_repo.branches, symbolic_reference, f_path)
1022 branches_group = (branches, _("Branches"))
1022 branches_group = (branches, _("Branches"))
1023
1023
1024 tags = self._create_references(
1024 tags = self._create_references(
1025 c.rhodecode_repo.tags, symbolic_reference, f_path)
1025 c.rhodecode_repo.tags, symbolic_reference, f_path)
1026 tags_group = (tags, _("Tags"))
1026 tags_group = (tags, _("Tags"))
1027
1027
1028 history.append(branches_group)
1028 history.append(branches_group)
1029 history.append(tags_group)
1029 history.append(tags_group)
1030
1030
1031 return history, commits
1031 return history, commits
1032
1032
1033 def _adjust_file_path_for_svn(self, f_path, repo):
1033 def _adjust_file_path_for_svn(self, f_path, repo):
1034 """
1034 """
1035 Computes the relative path of `f_path`.
1035 Computes the relative path of `f_path`.
1036
1036
1037 This is mainly based on prefix matching of the recognized tags and
1037 This is mainly based on prefix matching of the recognized tags and
1038 branches in the underlying repository.
1038 branches in the underlying repository.
1039 """
1039 """
1040 tags_and_branches = itertools.chain(
1040 tags_and_branches = itertools.chain(
1041 repo.branches.iterkeys(),
1041 repo.branches.iterkeys(),
1042 repo.tags.iterkeys())
1042 repo.tags.iterkeys())
1043 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
1043 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
1044
1044
1045 for name in tags_and_branches:
1045 for name in tags_and_branches:
1046 if f_path.startswith(name + '/'):
1046 if f_path.startswith(name + '/'):
1047 f_path = vcspath.relpath(f_path, name)
1047 f_path = vcspath.relpath(f_path, name)
1048 break
1048 break
1049 return f_path
1049 return f_path
1050
1050
1051 def _create_references(
1051 def _create_references(
1052 self, branches_or_tags, symbolic_reference, f_path):
1052 self, branches_or_tags, symbolic_reference, f_path):
1053 items = []
1053 items = []
1054 for name, commit_id in branches_or_tags.items():
1054 for name, commit_id in branches_or_tags.items():
1055 sym_ref = symbolic_reference(commit_id, name, f_path)
1055 sym_ref = symbolic_reference(commit_id, name, f_path)
1056 items.append((sym_ref, name))
1056 items.append((sym_ref, name))
1057 return items
1057 return items
1058
1058
1059 def _symbolic_reference(self, commit_id, name, f_path):
1059 def _symbolic_reference(self, commit_id, name, f_path):
1060 return commit_id
1060 return commit_id
1061
1061
1062 def _symbolic_reference_svn(self, commit_id, name, f_path):
1062 def _symbolic_reference_svn(self, commit_id, name, f_path):
1063 new_f_path = vcspath.join(name, f_path)
1063 new_f_path = vcspath.join(name, f_path)
1064 return u'%s@%s' % (new_f_path, commit_id)
1064 return u'%s@%s' % (new_f_path, commit_id)
1065
1065
1066 @LoginRequired()
1066 @LoginRequired()
1067 @XHRRequired()
1067 @XHRRequired()
1068 @HasRepoPermissionAnyDecorator(
1068 @HasRepoPermissionAnyDecorator(
1069 'repository.read', 'repository.write', 'repository.admin')
1069 'repository.read', 'repository.write', 'repository.admin')
1070 @jsonify
1070 @jsonify
1071 def nodelist(self, repo_name, revision, f_path):
1071 def nodelist(self, repo_name, revision, f_path):
1072 commit = self.__get_commit_or_redirect(revision, repo_name)
1072 commit = self.__get_commit_or_redirect(revision, repo_name)
1073
1073
1074 metadata = self._get_nodelist_at_commit(
1074 metadata = self._get_nodelist_at_commit(
1075 repo_name, commit.raw_id, f_path)
1075 repo_name, commit.raw_id, f_path)
1076 return {'nodes': metadata}
1076 return {'nodes': metadata}
1077
1077
1078 @LoginRequired()
1078 @LoginRequired()
1079 @XHRRequired()
1079 @XHRRequired()
1080 @HasRepoPermissionAnyDecorator(
1080 @HasRepoPermissionAnyDecorator(
1081 'repository.read', 'repository.write', 'repository.admin')
1081 'repository.read', 'repository.write', 'repository.admin')
1082 def nodetree_full(self, repo_name, commit_id, f_path):
1082 def nodetree_full(self, repo_name, commit_id, f_path):
1083 """
1083 """
1084 Returns rendered html of file tree that contains commit date,
1084 Returns rendered html of file tree that contains commit date,
1085 author, revision for the specified combination of
1085 author, revision for the specified combination of
1086 repo, commit_id and file path
1086 repo, commit_id and file path
1087
1087
1088 :param repo_name: name of the repository
1088 :param repo_name: name of the repository
1089 :param commit_id: commit_id of file tree
1089 :param commit_id: commit_id of file tree
1090 :param f_path: file path of the requested directory
1090 :param f_path: file path of the requested directory
1091 """
1091 """
1092
1092
1093 commit = self.__get_commit_or_redirect(commit_id, repo_name)
1093 commit = self.__get_commit_or_redirect(commit_id, repo_name)
1094 try:
1094 try:
1095 dir_node = commit.get_node(f_path)
1095 dir_node = commit.get_node(f_path)
1096 except RepositoryError as e:
1096 except RepositoryError as e:
1097 return 'error {}'.format(safe_str(e))
1097 return 'error {}'.format(safe_str(e))
1098
1098
1099 if dir_node.is_file():
1099 if dir_node.is_file():
1100 return ''
1100 return ''
1101
1101
1102 c.file = dir_node
1102 c.file = dir_node
1103 c.commit = commit
1103 c.commit = commit
1104
1104
1105 # using force=True here, make a little trick. We flush the cache and
1105 # using force=True here, make a little trick. We flush the cache and
1106 # compute it using the same key as without full_load, so the fully
1106 # compute it using the same key as without full_load, so the fully
1107 # loaded cached tree is now returned instead of partial
1107 # loaded cached tree is now returned instead of partial
1108 return self._get_tree_at_commit(
1108 return self._get_tree_at_commit(
1109 repo_name, commit.raw_id, dir_node.path, full_load=True,
1109 repo_name, commit.raw_id, dir_node.path, full_load=True,
1110 force=True)
1110 force=True)
@@ -1,1018 +1,1018 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 pull requests controller for rhodecode for initializing pull requests
22 pull requests controller for rhodecode for initializing pull requests
23 """
23 """
24 import types
24 import types
25
25
26 import peppercorn
26 import peppercorn
27 import formencode
27 import formencode
28 import logging
28 import logging
29 import collections
29 import collections
30
30
31 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
31 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
32 from pylons import request, tmpl_context as c, url
32 from pylons import request, tmpl_context as c, url
33 from pylons.controllers.util import redirect
33 from pylons.controllers.util import redirect
34 from pylons.i18n.translation import _
34 from pylons.i18n.translation import _
35 from pyramid.threadlocal import get_current_registry
35 from pyramid.threadlocal import get_current_registry
36 from sqlalchemy.sql import func
36 from sqlalchemy.sql import func
37 from sqlalchemy.sql.expression import or_
37 from sqlalchemy.sql.expression import or_
38
38
39 from rhodecode import events
39 from rhodecode import events
40 from rhodecode.lib import auth, diffs, helpers as h, codeblocks
40 from rhodecode.lib import auth, diffs, helpers as h, codeblocks
41 from rhodecode.lib.ext_json import json
41 from rhodecode.lib.ext_json import json
42 from rhodecode.lib.base import (
42 from rhodecode.lib.base import (
43 BaseRepoController, render, vcs_operation_context)
43 BaseRepoController, render, vcs_operation_context)
44 from rhodecode.lib.auth import (
44 from rhodecode.lib.auth import (
45 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
45 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
46 HasAcceptedRepoType, XHRRequired)
46 HasAcceptedRepoType, XHRRequired)
47 from rhodecode.lib.channelstream import channelstream_request
47 from rhodecode.lib.channelstream import channelstream_request
48 from rhodecode.lib.utils import jsonify
48 from rhodecode.lib.utils import jsonify
49 from rhodecode.lib.utils2 import (
49 from rhodecode.lib.utils2 import (
50 safe_int, safe_str, str2bool, safe_unicode)
50 safe_int, safe_str, str2bool, safe_unicode)
51 from rhodecode.lib.vcs.backends.base import (
51 from rhodecode.lib.vcs.backends.base import (
52 EmptyCommit, UpdateFailureReason, EmptyRepository)
52 EmptyCommit, UpdateFailureReason, EmptyRepository)
53 from rhodecode.lib.vcs.exceptions import (
53 from rhodecode.lib.vcs.exceptions import (
54 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError,
54 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError,
55 NodeDoesNotExistError)
55 NodeDoesNotExistError)
56
56
57 from rhodecode.model.changeset_status import ChangesetStatusModel
57 from rhodecode.model.changeset_status import ChangesetStatusModel
58 from rhodecode.model.comment import CommentsModel
58 from rhodecode.model.comment import CommentsModel
59 from rhodecode.model.db import (PullRequest, ChangesetStatus, ChangesetComment,
59 from rhodecode.model.db import (PullRequest, ChangesetStatus, ChangesetComment,
60 Repository, PullRequestVersion)
60 Repository, PullRequestVersion)
61 from rhodecode.model.forms import PullRequestForm
61 from rhodecode.model.forms import PullRequestForm
62 from rhodecode.model.meta import Session
62 from rhodecode.model.meta import Session
63 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
63 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
64
64
65 log = logging.getLogger(__name__)
65 log = logging.getLogger(__name__)
66
66
67
67
68 class PullrequestsController(BaseRepoController):
68 class PullrequestsController(BaseRepoController):
69
69
70 def __before__(self):
70 def __before__(self):
71 super(PullrequestsController, self).__before__()
71 super(PullrequestsController, self).__before__()
72 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
72 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
73 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
73 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
74
74
75 @LoginRequired()
75 @LoginRequired()
76 @NotAnonymous()
76 @NotAnonymous()
77 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
77 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
78 'repository.admin')
78 'repository.admin')
79 @HasAcceptedRepoType('git', 'hg')
79 @HasAcceptedRepoType('git', 'hg')
80 def index(self):
80 def index(self):
81 source_repo = c.rhodecode_db_repo
81 source_repo = c.rhodecode_db_repo
82
82
83 try:
83 try:
84 source_repo.scm_instance().get_commit()
84 source_repo.scm_instance().get_commit()
85 except EmptyRepositoryError:
85 except EmptyRepositoryError:
86 h.flash(h.literal(_('There are no commits yet')),
86 h.flash(h.literal(_('There are no commits yet')),
87 category='warning')
87 category='warning')
88 redirect(url('summary_home', repo_name=source_repo.repo_name))
88 redirect(h.route_path('repo_summary', repo_name=source_repo.repo_name))
89
89
90 commit_id = request.GET.get('commit')
90 commit_id = request.GET.get('commit')
91 branch_ref = request.GET.get('branch')
91 branch_ref = request.GET.get('branch')
92 bookmark_ref = request.GET.get('bookmark')
92 bookmark_ref = request.GET.get('bookmark')
93
93
94 try:
94 try:
95 source_repo_data = PullRequestModel().generate_repo_data(
95 source_repo_data = PullRequestModel().generate_repo_data(
96 source_repo, commit_id=commit_id,
96 source_repo, commit_id=commit_id,
97 branch=branch_ref, bookmark=bookmark_ref)
97 branch=branch_ref, bookmark=bookmark_ref)
98 except CommitDoesNotExistError as e:
98 except CommitDoesNotExistError as e:
99 log.exception(e)
99 log.exception(e)
100 h.flash(_('Commit does not exist'), 'error')
100 h.flash(_('Commit does not exist'), 'error')
101 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
101 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
102
102
103 default_target_repo = source_repo
103 default_target_repo = source_repo
104
104
105 if source_repo.parent:
105 if source_repo.parent:
106 parent_vcs_obj = source_repo.parent.scm_instance()
106 parent_vcs_obj = source_repo.parent.scm_instance()
107 if parent_vcs_obj and not parent_vcs_obj.is_empty():
107 if parent_vcs_obj and not parent_vcs_obj.is_empty():
108 # change default if we have a parent repo
108 # change default if we have a parent repo
109 default_target_repo = source_repo.parent
109 default_target_repo = source_repo.parent
110
110
111 target_repo_data = PullRequestModel().generate_repo_data(
111 target_repo_data = PullRequestModel().generate_repo_data(
112 default_target_repo)
112 default_target_repo)
113
113
114 selected_source_ref = source_repo_data['refs']['selected_ref']
114 selected_source_ref = source_repo_data['refs']['selected_ref']
115
115
116 title_source_ref = selected_source_ref.split(':', 2)[1]
116 title_source_ref = selected_source_ref.split(':', 2)[1]
117 c.default_title = PullRequestModel().generate_pullrequest_title(
117 c.default_title = PullRequestModel().generate_pullrequest_title(
118 source=source_repo.repo_name,
118 source=source_repo.repo_name,
119 source_ref=title_source_ref,
119 source_ref=title_source_ref,
120 target=default_target_repo.repo_name
120 target=default_target_repo.repo_name
121 )
121 )
122
122
123 c.default_repo_data = {
123 c.default_repo_data = {
124 'source_repo_name': source_repo.repo_name,
124 'source_repo_name': source_repo.repo_name,
125 'source_refs_json': json.dumps(source_repo_data),
125 'source_refs_json': json.dumps(source_repo_data),
126 'target_repo_name': default_target_repo.repo_name,
126 'target_repo_name': default_target_repo.repo_name,
127 'target_refs_json': json.dumps(target_repo_data),
127 'target_refs_json': json.dumps(target_repo_data),
128 }
128 }
129 c.default_source_ref = selected_source_ref
129 c.default_source_ref = selected_source_ref
130
130
131 return render('/pullrequests/pullrequest.mako')
131 return render('/pullrequests/pullrequest.mako')
132
132
133 @LoginRequired()
133 @LoginRequired()
134 @NotAnonymous()
134 @NotAnonymous()
135 @XHRRequired()
135 @XHRRequired()
136 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
136 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
137 'repository.admin')
137 'repository.admin')
138 @jsonify
138 @jsonify
139 def get_repo_refs(self, repo_name, target_repo_name):
139 def get_repo_refs(self, repo_name, target_repo_name):
140 repo = Repository.get_by_repo_name(target_repo_name)
140 repo = Repository.get_by_repo_name(target_repo_name)
141 if not repo:
141 if not repo:
142 raise HTTPNotFound
142 raise HTTPNotFound
143 return PullRequestModel().generate_repo_data(repo)
143 return PullRequestModel().generate_repo_data(repo)
144
144
145 @LoginRequired()
145 @LoginRequired()
146 @NotAnonymous()
146 @NotAnonymous()
147 @XHRRequired()
147 @XHRRequired()
148 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
148 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
149 'repository.admin')
149 'repository.admin')
150 @jsonify
150 @jsonify
151 def get_repo_destinations(self, repo_name):
151 def get_repo_destinations(self, repo_name):
152 repo = Repository.get_by_repo_name(repo_name)
152 repo = Repository.get_by_repo_name(repo_name)
153 if not repo:
153 if not repo:
154 raise HTTPNotFound
154 raise HTTPNotFound
155 filter_query = request.GET.get('query')
155 filter_query = request.GET.get('query')
156
156
157 query = Repository.query() \
157 query = Repository.query() \
158 .order_by(func.length(Repository.repo_name)) \
158 .order_by(func.length(Repository.repo_name)) \
159 .filter(or_(
159 .filter(or_(
160 Repository.repo_name == repo.repo_name,
160 Repository.repo_name == repo.repo_name,
161 Repository.fork_id == repo.repo_id))
161 Repository.fork_id == repo.repo_id))
162
162
163 if filter_query:
163 if filter_query:
164 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
164 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
165 query = query.filter(
165 query = query.filter(
166 Repository.repo_name.ilike(ilike_expression))
166 Repository.repo_name.ilike(ilike_expression))
167
167
168 add_parent = False
168 add_parent = False
169 if repo.parent:
169 if repo.parent:
170 if filter_query in repo.parent.repo_name:
170 if filter_query in repo.parent.repo_name:
171 parent_vcs_obj = repo.parent.scm_instance()
171 parent_vcs_obj = repo.parent.scm_instance()
172 if parent_vcs_obj and not parent_vcs_obj.is_empty():
172 if parent_vcs_obj and not parent_vcs_obj.is_empty():
173 add_parent = True
173 add_parent = True
174
174
175 limit = 20 - 1 if add_parent else 20
175 limit = 20 - 1 if add_parent else 20
176 all_repos = query.limit(limit).all()
176 all_repos = query.limit(limit).all()
177 if add_parent:
177 if add_parent:
178 all_repos += [repo.parent]
178 all_repos += [repo.parent]
179
179
180 repos = []
180 repos = []
181 for obj in self.scm_model.get_repos(all_repos):
181 for obj in self.scm_model.get_repos(all_repos):
182 repos.append({
182 repos.append({
183 'id': obj['name'],
183 'id': obj['name'],
184 'text': obj['name'],
184 'text': obj['name'],
185 'type': 'repo',
185 'type': 'repo',
186 'obj': obj['dbrepo']
186 'obj': obj['dbrepo']
187 })
187 })
188
188
189 data = {
189 data = {
190 'more': False,
190 'more': False,
191 'results': [{
191 'results': [{
192 'text': _('Repositories'),
192 'text': _('Repositories'),
193 'children': repos
193 'children': repos
194 }] if repos else []
194 }] if repos else []
195 }
195 }
196 return data
196 return data
197
197
198 @LoginRequired()
198 @LoginRequired()
199 @NotAnonymous()
199 @NotAnonymous()
200 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
200 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
201 'repository.admin')
201 'repository.admin')
202 @HasAcceptedRepoType('git', 'hg')
202 @HasAcceptedRepoType('git', 'hg')
203 @auth.CSRFRequired()
203 @auth.CSRFRequired()
204 def create(self, repo_name):
204 def create(self, repo_name):
205 repo = Repository.get_by_repo_name(repo_name)
205 repo = Repository.get_by_repo_name(repo_name)
206 if not repo:
206 if not repo:
207 raise HTTPNotFound
207 raise HTTPNotFound
208
208
209 controls = peppercorn.parse(request.POST.items())
209 controls = peppercorn.parse(request.POST.items())
210
210
211 try:
211 try:
212 _form = PullRequestForm(repo.repo_id)().to_python(controls)
212 _form = PullRequestForm(repo.repo_id)().to_python(controls)
213 except formencode.Invalid as errors:
213 except formencode.Invalid as errors:
214 if errors.error_dict.get('revisions'):
214 if errors.error_dict.get('revisions'):
215 msg = 'Revisions: %s' % errors.error_dict['revisions']
215 msg = 'Revisions: %s' % errors.error_dict['revisions']
216 elif errors.error_dict.get('pullrequest_title'):
216 elif errors.error_dict.get('pullrequest_title'):
217 msg = _('Pull request requires a title with min. 3 chars')
217 msg = _('Pull request requires a title with min. 3 chars')
218 else:
218 else:
219 msg = _('Error creating pull request: {}').format(errors)
219 msg = _('Error creating pull request: {}').format(errors)
220 log.exception(msg)
220 log.exception(msg)
221 h.flash(msg, 'error')
221 h.flash(msg, 'error')
222
222
223 # would rather just go back to form ...
223 # would rather just go back to form ...
224 return redirect(url('pullrequest_home', repo_name=repo_name))
224 return redirect(url('pullrequest_home', repo_name=repo_name))
225
225
226 source_repo = _form['source_repo']
226 source_repo = _form['source_repo']
227 source_ref = _form['source_ref']
227 source_ref = _form['source_ref']
228 target_repo = _form['target_repo']
228 target_repo = _form['target_repo']
229 target_ref = _form['target_ref']
229 target_ref = _form['target_ref']
230 commit_ids = _form['revisions'][::-1]
230 commit_ids = _form['revisions'][::-1]
231
231
232 # find the ancestor for this pr
232 # find the ancestor for this pr
233 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
233 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
234 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
234 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
235
235
236 source_scm = source_db_repo.scm_instance()
236 source_scm = source_db_repo.scm_instance()
237 target_scm = target_db_repo.scm_instance()
237 target_scm = target_db_repo.scm_instance()
238
238
239 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
239 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
240 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
240 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
241
241
242 ancestor = source_scm.get_common_ancestor(
242 ancestor = source_scm.get_common_ancestor(
243 source_commit.raw_id, target_commit.raw_id, target_scm)
243 source_commit.raw_id, target_commit.raw_id, target_scm)
244
244
245 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
245 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
246 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
246 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
247
247
248 pullrequest_title = _form['pullrequest_title']
248 pullrequest_title = _form['pullrequest_title']
249 title_source_ref = source_ref.split(':', 2)[1]
249 title_source_ref = source_ref.split(':', 2)[1]
250 if not pullrequest_title:
250 if not pullrequest_title:
251 pullrequest_title = PullRequestModel().generate_pullrequest_title(
251 pullrequest_title = PullRequestModel().generate_pullrequest_title(
252 source=source_repo,
252 source=source_repo,
253 source_ref=title_source_ref,
253 source_ref=title_source_ref,
254 target=target_repo
254 target=target_repo
255 )
255 )
256
256
257 description = _form['pullrequest_desc']
257 description = _form['pullrequest_desc']
258
258
259 get_default_reviewers_data, validate_default_reviewers = \
259 get_default_reviewers_data, validate_default_reviewers = \
260 PullRequestModel().get_reviewer_functions()
260 PullRequestModel().get_reviewer_functions()
261
261
262 # recalculate reviewers logic, to make sure we can validate this
262 # recalculate reviewers logic, to make sure we can validate this
263 reviewer_rules = get_default_reviewers_data(
263 reviewer_rules = get_default_reviewers_data(
264 c.rhodecode_user, source_db_repo, source_commit, target_db_repo,
264 c.rhodecode_user, source_db_repo, source_commit, target_db_repo,
265 target_commit)
265 target_commit)
266
266
267 reviewers = validate_default_reviewers(
267 reviewers = validate_default_reviewers(
268 _form['review_members'], reviewer_rules)
268 _form['review_members'], reviewer_rules)
269
269
270 try:
270 try:
271 pull_request = PullRequestModel().create(
271 pull_request = PullRequestModel().create(
272 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
272 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
273 target_ref, commit_ids, reviewers, pullrequest_title,
273 target_ref, commit_ids, reviewers, pullrequest_title,
274 description, reviewer_rules
274 description, reviewer_rules
275 )
275 )
276 Session().commit()
276 Session().commit()
277 h.flash(_('Successfully opened new pull request'),
277 h.flash(_('Successfully opened new pull request'),
278 category='success')
278 category='success')
279 except Exception as e:
279 except Exception as e:
280 msg = _('Error occurred during creation of this pull request.')
280 msg = _('Error occurred during creation of this pull request.')
281 log.exception(msg)
281 log.exception(msg)
282 h.flash(msg, category='error')
282 h.flash(msg, category='error')
283 return redirect(url('pullrequest_home', repo_name=repo_name))
283 return redirect(url('pullrequest_home', repo_name=repo_name))
284
284
285 return redirect(url('pullrequest_show', repo_name=target_repo,
285 return redirect(url('pullrequest_show', repo_name=target_repo,
286 pull_request_id=pull_request.pull_request_id))
286 pull_request_id=pull_request.pull_request_id))
287
287
288 @LoginRequired()
288 @LoginRequired()
289 @NotAnonymous()
289 @NotAnonymous()
290 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
290 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
291 'repository.admin')
291 'repository.admin')
292 @auth.CSRFRequired()
292 @auth.CSRFRequired()
293 @jsonify
293 @jsonify
294 def update(self, repo_name, pull_request_id):
294 def update(self, repo_name, pull_request_id):
295 pull_request_id = safe_int(pull_request_id)
295 pull_request_id = safe_int(pull_request_id)
296 pull_request = PullRequest.get_or_404(pull_request_id)
296 pull_request = PullRequest.get_or_404(pull_request_id)
297 # only owner or admin can update it
297 # only owner or admin can update it
298 allowed_to_update = PullRequestModel().check_user_update(
298 allowed_to_update = PullRequestModel().check_user_update(
299 pull_request, c.rhodecode_user)
299 pull_request, c.rhodecode_user)
300 if allowed_to_update:
300 if allowed_to_update:
301 controls = peppercorn.parse(request.POST.items())
301 controls = peppercorn.parse(request.POST.items())
302
302
303 if 'review_members' in controls:
303 if 'review_members' in controls:
304 self._update_reviewers(
304 self._update_reviewers(
305 pull_request_id, controls['review_members'],
305 pull_request_id, controls['review_members'],
306 pull_request.reviewer_data)
306 pull_request.reviewer_data)
307 elif str2bool(request.POST.get('update_commits', 'false')):
307 elif str2bool(request.POST.get('update_commits', 'false')):
308 self._update_commits(pull_request)
308 self._update_commits(pull_request)
309 elif str2bool(request.POST.get('close_pull_request', 'false')):
309 elif str2bool(request.POST.get('close_pull_request', 'false')):
310 self._reject_close(pull_request)
310 self._reject_close(pull_request)
311 elif str2bool(request.POST.get('edit_pull_request', 'false')):
311 elif str2bool(request.POST.get('edit_pull_request', 'false')):
312 self._edit_pull_request(pull_request)
312 self._edit_pull_request(pull_request)
313 else:
313 else:
314 raise HTTPBadRequest()
314 raise HTTPBadRequest()
315 return True
315 return True
316 raise HTTPForbidden()
316 raise HTTPForbidden()
317
317
318 def _edit_pull_request(self, pull_request):
318 def _edit_pull_request(self, pull_request):
319 try:
319 try:
320 PullRequestModel().edit(
320 PullRequestModel().edit(
321 pull_request, request.POST.get('title'),
321 pull_request, request.POST.get('title'),
322 request.POST.get('description'))
322 request.POST.get('description'))
323 except ValueError:
323 except ValueError:
324 msg = _(u'Cannot update closed pull requests.')
324 msg = _(u'Cannot update closed pull requests.')
325 h.flash(msg, category='error')
325 h.flash(msg, category='error')
326 return
326 return
327 else:
327 else:
328 Session().commit()
328 Session().commit()
329
329
330 msg = _(u'Pull request title & description updated.')
330 msg = _(u'Pull request title & description updated.')
331 h.flash(msg, category='success')
331 h.flash(msg, category='success')
332 return
332 return
333
333
334 def _update_commits(self, pull_request):
334 def _update_commits(self, pull_request):
335 resp = PullRequestModel().update_commits(pull_request)
335 resp = PullRequestModel().update_commits(pull_request)
336
336
337 if resp.executed:
337 if resp.executed:
338
338
339 if resp.target_changed and resp.source_changed:
339 if resp.target_changed and resp.source_changed:
340 changed = 'target and source repositories'
340 changed = 'target and source repositories'
341 elif resp.target_changed and not resp.source_changed:
341 elif resp.target_changed and not resp.source_changed:
342 changed = 'target repository'
342 changed = 'target repository'
343 elif not resp.target_changed and resp.source_changed:
343 elif not resp.target_changed and resp.source_changed:
344 changed = 'source repository'
344 changed = 'source repository'
345 else:
345 else:
346 changed = 'nothing'
346 changed = 'nothing'
347
347
348 msg = _(
348 msg = _(
349 u'Pull request updated to "{source_commit_id}" with '
349 u'Pull request updated to "{source_commit_id}" with '
350 u'{count_added} added, {count_removed} removed commits. '
350 u'{count_added} added, {count_removed} removed commits. '
351 u'Source of changes: {change_source}')
351 u'Source of changes: {change_source}')
352 msg = msg.format(
352 msg = msg.format(
353 source_commit_id=pull_request.source_ref_parts.commit_id,
353 source_commit_id=pull_request.source_ref_parts.commit_id,
354 count_added=len(resp.changes.added),
354 count_added=len(resp.changes.added),
355 count_removed=len(resp.changes.removed),
355 count_removed=len(resp.changes.removed),
356 change_source=changed)
356 change_source=changed)
357 h.flash(msg, category='success')
357 h.flash(msg, category='success')
358
358
359 registry = get_current_registry()
359 registry = get_current_registry()
360 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
360 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
361 channelstream_config = rhodecode_plugins.get('channelstream', {})
361 channelstream_config = rhodecode_plugins.get('channelstream', {})
362 if channelstream_config.get('enabled'):
362 if channelstream_config.get('enabled'):
363 message = msg + (
363 message = msg + (
364 ' - <a onclick="window.location.reload()">'
364 ' - <a onclick="window.location.reload()">'
365 '<strong>{}</strong></a>'.format(_('Reload page')))
365 '<strong>{}</strong></a>'.format(_('Reload page')))
366 channel = '/repo${}$/pr/{}'.format(
366 channel = '/repo${}$/pr/{}'.format(
367 pull_request.target_repo.repo_name,
367 pull_request.target_repo.repo_name,
368 pull_request.pull_request_id
368 pull_request.pull_request_id
369 )
369 )
370 payload = {
370 payload = {
371 'type': 'message',
371 'type': 'message',
372 'user': 'system',
372 'user': 'system',
373 'exclude_users': [request.user.username],
373 'exclude_users': [request.user.username],
374 'channel': channel,
374 'channel': channel,
375 'message': {
375 'message': {
376 'message': message,
376 'message': message,
377 'level': 'success',
377 'level': 'success',
378 'topic': '/notifications'
378 'topic': '/notifications'
379 }
379 }
380 }
380 }
381 channelstream_request(
381 channelstream_request(
382 channelstream_config, [payload], '/message',
382 channelstream_config, [payload], '/message',
383 raise_exc=False)
383 raise_exc=False)
384 else:
384 else:
385 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
385 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
386 warning_reasons = [
386 warning_reasons = [
387 UpdateFailureReason.NO_CHANGE,
387 UpdateFailureReason.NO_CHANGE,
388 UpdateFailureReason.WRONG_REF_TYPE,
388 UpdateFailureReason.WRONG_REF_TYPE,
389 ]
389 ]
390 category = 'warning' if resp.reason in warning_reasons else 'error'
390 category = 'warning' if resp.reason in warning_reasons else 'error'
391 h.flash(msg, category=category)
391 h.flash(msg, category=category)
392
392
393 @auth.CSRFRequired()
393 @auth.CSRFRequired()
394 @LoginRequired()
394 @LoginRequired()
395 @NotAnonymous()
395 @NotAnonymous()
396 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
396 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
397 'repository.admin')
397 'repository.admin')
398 def merge(self, repo_name, pull_request_id):
398 def merge(self, repo_name, pull_request_id):
399 """
399 """
400 POST /{repo_name}/pull-request/{pull_request_id}
400 POST /{repo_name}/pull-request/{pull_request_id}
401
401
402 Merge will perform a server-side merge of the specified
402 Merge will perform a server-side merge of the specified
403 pull request, if the pull request is approved and mergeable.
403 pull request, if the pull request is approved and mergeable.
404 After successful merging, the pull request is automatically
404 After successful merging, the pull request is automatically
405 closed, with a relevant comment.
405 closed, with a relevant comment.
406 """
406 """
407 pull_request_id = safe_int(pull_request_id)
407 pull_request_id = safe_int(pull_request_id)
408 pull_request = PullRequest.get_or_404(pull_request_id)
408 pull_request = PullRequest.get_or_404(pull_request_id)
409 user = c.rhodecode_user
409 user = c.rhodecode_user
410
410
411 check = MergeCheck.validate(pull_request, user)
411 check = MergeCheck.validate(pull_request, user)
412 merge_possible = not check.failed
412 merge_possible = not check.failed
413
413
414 for err_type, error_msg in check.errors:
414 for err_type, error_msg in check.errors:
415 h.flash(error_msg, category=err_type)
415 h.flash(error_msg, category=err_type)
416
416
417 if merge_possible:
417 if merge_possible:
418 log.debug("Pre-conditions checked, trying to merge.")
418 log.debug("Pre-conditions checked, trying to merge.")
419 extras = vcs_operation_context(
419 extras = vcs_operation_context(
420 request.environ, repo_name=pull_request.target_repo.repo_name,
420 request.environ, repo_name=pull_request.target_repo.repo_name,
421 username=user.username, action='push',
421 username=user.username, action='push',
422 scm=pull_request.target_repo.repo_type)
422 scm=pull_request.target_repo.repo_type)
423 self._merge_pull_request(pull_request, user, extras)
423 self._merge_pull_request(pull_request, user, extras)
424
424
425 return redirect(url(
425 return redirect(url(
426 'pullrequest_show',
426 'pullrequest_show',
427 repo_name=pull_request.target_repo.repo_name,
427 repo_name=pull_request.target_repo.repo_name,
428 pull_request_id=pull_request.pull_request_id))
428 pull_request_id=pull_request.pull_request_id))
429
429
430 def _merge_pull_request(self, pull_request, user, extras):
430 def _merge_pull_request(self, pull_request, user, extras):
431 merge_resp = PullRequestModel().merge(
431 merge_resp = PullRequestModel().merge(
432 pull_request, user, extras=extras)
432 pull_request, user, extras=extras)
433
433
434 if merge_resp.executed:
434 if merge_resp.executed:
435 log.debug("The merge was successful, closing the pull request.")
435 log.debug("The merge was successful, closing the pull request.")
436 PullRequestModel().close_pull_request(
436 PullRequestModel().close_pull_request(
437 pull_request.pull_request_id, user)
437 pull_request.pull_request_id, user)
438 Session().commit()
438 Session().commit()
439 msg = _('Pull request was successfully merged and closed.')
439 msg = _('Pull request was successfully merged and closed.')
440 h.flash(msg, category='success')
440 h.flash(msg, category='success')
441 else:
441 else:
442 log.debug(
442 log.debug(
443 "The merge was not successful. Merge response: %s",
443 "The merge was not successful. Merge response: %s",
444 merge_resp)
444 merge_resp)
445 msg = PullRequestModel().merge_status_message(
445 msg = PullRequestModel().merge_status_message(
446 merge_resp.failure_reason)
446 merge_resp.failure_reason)
447 h.flash(msg, category='error')
447 h.flash(msg, category='error')
448
448
449 def _update_reviewers(self, pull_request_id, review_members, reviewer_rules):
449 def _update_reviewers(self, pull_request_id, review_members, reviewer_rules):
450
450
451 get_default_reviewers_data, validate_default_reviewers = \
451 get_default_reviewers_data, validate_default_reviewers = \
452 PullRequestModel().get_reviewer_functions()
452 PullRequestModel().get_reviewer_functions()
453
453
454 try:
454 try:
455 reviewers = validate_default_reviewers(review_members, reviewer_rules)
455 reviewers = validate_default_reviewers(review_members, reviewer_rules)
456 except ValueError as e:
456 except ValueError as e:
457 log.error('Reviewers Validation:{}'.format(e))
457 log.error('Reviewers Validation:{}'.format(e))
458 h.flash(e, category='error')
458 h.flash(e, category='error')
459 return
459 return
460
460
461 PullRequestModel().update_reviewers(pull_request_id, reviewers)
461 PullRequestModel().update_reviewers(pull_request_id, reviewers)
462 h.flash(_('Pull request reviewers updated.'), category='success')
462 h.flash(_('Pull request reviewers updated.'), category='success')
463 Session().commit()
463 Session().commit()
464
464
465 def _reject_close(self, pull_request):
465 def _reject_close(self, pull_request):
466 if pull_request.is_closed():
466 if pull_request.is_closed():
467 raise HTTPForbidden()
467 raise HTTPForbidden()
468
468
469 PullRequestModel().close_pull_request_with_comment(
469 PullRequestModel().close_pull_request_with_comment(
470 pull_request, c.rhodecode_user, c.rhodecode_db_repo)
470 pull_request, c.rhodecode_user, c.rhodecode_db_repo)
471 Session().commit()
471 Session().commit()
472
472
473 @LoginRequired()
473 @LoginRequired()
474 @NotAnonymous()
474 @NotAnonymous()
475 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
475 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
476 'repository.admin')
476 'repository.admin')
477 @auth.CSRFRequired()
477 @auth.CSRFRequired()
478 @jsonify
478 @jsonify
479 def delete(self, repo_name, pull_request_id):
479 def delete(self, repo_name, pull_request_id):
480 pull_request_id = safe_int(pull_request_id)
480 pull_request_id = safe_int(pull_request_id)
481 pull_request = PullRequest.get_or_404(pull_request_id)
481 pull_request = PullRequest.get_or_404(pull_request_id)
482
482
483 pr_closed = pull_request.is_closed()
483 pr_closed = pull_request.is_closed()
484 allowed_to_delete = PullRequestModel().check_user_delete(
484 allowed_to_delete = PullRequestModel().check_user_delete(
485 pull_request, c.rhodecode_user) and not pr_closed
485 pull_request, c.rhodecode_user) and not pr_closed
486
486
487 # only owner can delete it !
487 # only owner can delete it !
488 if allowed_to_delete:
488 if allowed_to_delete:
489 PullRequestModel().delete(pull_request)
489 PullRequestModel().delete(pull_request)
490 Session().commit()
490 Session().commit()
491 h.flash(_('Successfully deleted pull request'),
491 h.flash(_('Successfully deleted pull request'),
492 category='success')
492 category='success')
493 return redirect(url('my_account_pullrequests'))
493 return redirect(url('my_account_pullrequests'))
494
494
495 h.flash(_('Your are not allowed to delete this pull request'),
495 h.flash(_('Your are not allowed to delete this pull request'),
496 category='error')
496 category='error')
497 raise HTTPForbidden()
497 raise HTTPForbidden()
498
498
499 def _get_pr_version(self, pull_request_id, version=None):
499 def _get_pr_version(self, pull_request_id, version=None):
500 pull_request_id = safe_int(pull_request_id)
500 pull_request_id = safe_int(pull_request_id)
501 at_version = None
501 at_version = None
502
502
503 if version and version == 'latest':
503 if version and version == 'latest':
504 pull_request_ver = PullRequest.get(pull_request_id)
504 pull_request_ver = PullRequest.get(pull_request_id)
505 pull_request_obj = pull_request_ver
505 pull_request_obj = pull_request_ver
506 _org_pull_request_obj = pull_request_obj
506 _org_pull_request_obj = pull_request_obj
507 at_version = 'latest'
507 at_version = 'latest'
508 elif version:
508 elif version:
509 pull_request_ver = PullRequestVersion.get_or_404(version)
509 pull_request_ver = PullRequestVersion.get_or_404(version)
510 pull_request_obj = pull_request_ver
510 pull_request_obj = pull_request_ver
511 _org_pull_request_obj = pull_request_ver.pull_request
511 _org_pull_request_obj = pull_request_ver.pull_request
512 at_version = pull_request_ver.pull_request_version_id
512 at_version = pull_request_ver.pull_request_version_id
513 else:
513 else:
514 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
514 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
515 pull_request_id)
515 pull_request_id)
516
516
517 pull_request_display_obj = PullRequest.get_pr_display_object(
517 pull_request_display_obj = PullRequest.get_pr_display_object(
518 pull_request_obj, _org_pull_request_obj)
518 pull_request_obj, _org_pull_request_obj)
519
519
520 return _org_pull_request_obj, pull_request_obj, \
520 return _org_pull_request_obj, pull_request_obj, \
521 pull_request_display_obj, at_version
521 pull_request_display_obj, at_version
522
522
523 def _get_diffset(
523 def _get_diffset(
524 self, source_repo, source_ref_id, target_ref_id, target_commit,
524 self, source_repo, source_ref_id, target_ref_id, target_commit,
525 source_commit, diff_limit, file_limit, display_inline_comments):
525 source_commit, diff_limit, file_limit, display_inline_comments):
526 vcs_diff = PullRequestModel().get_diff(
526 vcs_diff = PullRequestModel().get_diff(
527 source_repo, source_ref_id, target_ref_id)
527 source_repo, source_ref_id, target_ref_id)
528
528
529 diff_processor = diffs.DiffProcessor(
529 diff_processor = diffs.DiffProcessor(
530 vcs_diff, format='newdiff', diff_limit=diff_limit,
530 vcs_diff, format='newdiff', diff_limit=diff_limit,
531 file_limit=file_limit, show_full_diff=c.fulldiff)
531 file_limit=file_limit, show_full_diff=c.fulldiff)
532
532
533 _parsed = diff_processor.prepare()
533 _parsed = diff_processor.prepare()
534
534
535 def _node_getter(commit):
535 def _node_getter(commit):
536 def get_node(fname):
536 def get_node(fname):
537 try:
537 try:
538 return commit.get_node(fname)
538 return commit.get_node(fname)
539 except NodeDoesNotExistError:
539 except NodeDoesNotExistError:
540 return None
540 return None
541
541
542 return get_node
542 return get_node
543
543
544 diffset = codeblocks.DiffSet(
544 diffset = codeblocks.DiffSet(
545 repo_name=c.repo_name,
545 repo_name=c.repo_name,
546 source_repo_name=c.source_repo.repo_name,
546 source_repo_name=c.source_repo.repo_name,
547 source_node_getter=_node_getter(target_commit),
547 source_node_getter=_node_getter(target_commit),
548 target_node_getter=_node_getter(source_commit),
548 target_node_getter=_node_getter(source_commit),
549 comments=display_inline_comments
549 comments=display_inline_comments
550 )
550 )
551 diffset = diffset.render_patchset(
551 diffset = diffset.render_patchset(
552 _parsed, target_commit.raw_id, source_commit.raw_id)
552 _parsed, target_commit.raw_id, source_commit.raw_id)
553
553
554 return diffset
554 return diffset
555
555
556 @LoginRequired()
556 @LoginRequired()
557 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
557 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
558 'repository.admin')
558 'repository.admin')
559 def show(self, repo_name, pull_request_id):
559 def show(self, repo_name, pull_request_id):
560 pull_request_id = safe_int(pull_request_id)
560 pull_request_id = safe_int(pull_request_id)
561 version = request.GET.get('version')
561 version = request.GET.get('version')
562 from_version = request.GET.get('from_version') or version
562 from_version = request.GET.get('from_version') or version
563 merge_checks = request.GET.get('merge_checks')
563 merge_checks = request.GET.get('merge_checks')
564 c.fulldiff = str2bool(request.GET.get('fulldiff'))
564 c.fulldiff = str2bool(request.GET.get('fulldiff'))
565
565
566 (pull_request_latest,
566 (pull_request_latest,
567 pull_request_at_ver,
567 pull_request_at_ver,
568 pull_request_display_obj,
568 pull_request_display_obj,
569 at_version) = self._get_pr_version(
569 at_version) = self._get_pr_version(
570 pull_request_id, version=version)
570 pull_request_id, version=version)
571 pr_closed = pull_request_latest.is_closed()
571 pr_closed = pull_request_latest.is_closed()
572
572
573 if pr_closed and (version or from_version):
573 if pr_closed and (version or from_version):
574 # not allow to browse versions
574 # not allow to browse versions
575 return redirect(h.url('pullrequest_show', repo_name=repo_name,
575 return redirect(h.url('pullrequest_show', repo_name=repo_name,
576 pull_request_id=pull_request_id))
576 pull_request_id=pull_request_id))
577
577
578 versions = pull_request_display_obj.versions()
578 versions = pull_request_display_obj.versions()
579
579
580 c.at_version = at_version
580 c.at_version = at_version
581 c.at_version_num = (at_version
581 c.at_version_num = (at_version
582 if at_version and at_version != 'latest'
582 if at_version and at_version != 'latest'
583 else None)
583 else None)
584 c.at_version_pos = ChangesetComment.get_index_from_version(
584 c.at_version_pos = ChangesetComment.get_index_from_version(
585 c.at_version_num, versions)
585 c.at_version_num, versions)
586
586
587 (prev_pull_request_latest,
587 (prev_pull_request_latest,
588 prev_pull_request_at_ver,
588 prev_pull_request_at_ver,
589 prev_pull_request_display_obj,
589 prev_pull_request_display_obj,
590 prev_at_version) = self._get_pr_version(
590 prev_at_version) = self._get_pr_version(
591 pull_request_id, version=from_version)
591 pull_request_id, version=from_version)
592
592
593 c.from_version = prev_at_version
593 c.from_version = prev_at_version
594 c.from_version_num = (prev_at_version
594 c.from_version_num = (prev_at_version
595 if prev_at_version and prev_at_version != 'latest'
595 if prev_at_version and prev_at_version != 'latest'
596 else None)
596 else None)
597 c.from_version_pos = ChangesetComment.get_index_from_version(
597 c.from_version_pos = ChangesetComment.get_index_from_version(
598 c.from_version_num, versions)
598 c.from_version_num, versions)
599
599
600 # define if we're in COMPARE mode or VIEW at version mode
600 # define if we're in COMPARE mode or VIEW at version mode
601 compare = at_version != prev_at_version
601 compare = at_version != prev_at_version
602
602
603 # pull_requests repo_name we opened it against
603 # pull_requests repo_name we opened it against
604 # ie. target_repo must match
604 # ie. target_repo must match
605 if repo_name != pull_request_at_ver.target_repo.repo_name:
605 if repo_name != pull_request_at_ver.target_repo.repo_name:
606 raise HTTPNotFound
606 raise HTTPNotFound
607
607
608 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
608 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
609 pull_request_at_ver)
609 pull_request_at_ver)
610
610
611 c.pull_request = pull_request_display_obj
611 c.pull_request = pull_request_display_obj
612 c.pull_request_latest = pull_request_latest
612 c.pull_request_latest = pull_request_latest
613
613
614 if compare or (at_version and not at_version == 'latest'):
614 if compare or (at_version and not at_version == 'latest'):
615 c.allowed_to_change_status = False
615 c.allowed_to_change_status = False
616 c.allowed_to_update = False
616 c.allowed_to_update = False
617 c.allowed_to_merge = False
617 c.allowed_to_merge = False
618 c.allowed_to_delete = False
618 c.allowed_to_delete = False
619 c.allowed_to_comment = False
619 c.allowed_to_comment = False
620 c.allowed_to_close = False
620 c.allowed_to_close = False
621 else:
621 else:
622 can_change_status = PullRequestModel().check_user_change_status(
622 can_change_status = PullRequestModel().check_user_change_status(
623 pull_request_at_ver, c.rhodecode_user)
623 pull_request_at_ver, c.rhodecode_user)
624 c.allowed_to_change_status = can_change_status and not pr_closed
624 c.allowed_to_change_status = can_change_status and not pr_closed
625
625
626 c.allowed_to_update = PullRequestModel().check_user_update(
626 c.allowed_to_update = PullRequestModel().check_user_update(
627 pull_request_latest, c.rhodecode_user) and not pr_closed
627 pull_request_latest, c.rhodecode_user) and not pr_closed
628 c.allowed_to_merge = PullRequestModel().check_user_merge(
628 c.allowed_to_merge = PullRequestModel().check_user_merge(
629 pull_request_latest, c.rhodecode_user) and not pr_closed
629 pull_request_latest, c.rhodecode_user) and not pr_closed
630 c.allowed_to_delete = PullRequestModel().check_user_delete(
630 c.allowed_to_delete = PullRequestModel().check_user_delete(
631 pull_request_latest, c.rhodecode_user) and not pr_closed
631 pull_request_latest, c.rhodecode_user) and not pr_closed
632 c.allowed_to_comment = not pr_closed
632 c.allowed_to_comment = not pr_closed
633 c.allowed_to_close = c.allowed_to_merge and not pr_closed
633 c.allowed_to_close = c.allowed_to_merge and not pr_closed
634
634
635 c.forbid_adding_reviewers = False
635 c.forbid_adding_reviewers = False
636 c.forbid_author_to_review = False
636 c.forbid_author_to_review = False
637
637
638 if pull_request_latest.reviewer_data and \
638 if pull_request_latest.reviewer_data and \
639 'rules' in pull_request_latest.reviewer_data:
639 'rules' in pull_request_latest.reviewer_data:
640 rules = pull_request_latest.reviewer_data['rules'] or {}
640 rules = pull_request_latest.reviewer_data['rules'] or {}
641 try:
641 try:
642 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
642 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
643 c.forbid_author_to_review = rules.get('forbid_author_to_review')
643 c.forbid_author_to_review = rules.get('forbid_author_to_review')
644 except Exception:
644 except Exception:
645 pass
645 pass
646
646
647 # check merge capabilities
647 # check merge capabilities
648 _merge_check = MergeCheck.validate(
648 _merge_check = MergeCheck.validate(
649 pull_request_latest, user=c.rhodecode_user)
649 pull_request_latest, user=c.rhodecode_user)
650 c.pr_merge_errors = _merge_check.error_details
650 c.pr_merge_errors = _merge_check.error_details
651 c.pr_merge_possible = not _merge_check.failed
651 c.pr_merge_possible = not _merge_check.failed
652 c.pr_merge_message = _merge_check.merge_msg
652 c.pr_merge_message = _merge_check.merge_msg
653
653
654 c.pull_request_review_status = _merge_check.review_status
654 c.pull_request_review_status = _merge_check.review_status
655 if merge_checks:
655 if merge_checks:
656 return render('/pullrequests/pullrequest_merge_checks.mako')
656 return render('/pullrequests/pullrequest_merge_checks.mako')
657
657
658 comments_model = CommentsModel()
658 comments_model = CommentsModel()
659
659
660 # reviewers and statuses
660 # reviewers and statuses
661 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
661 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
662 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
662 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
663
663
664 # GENERAL COMMENTS with versions #
664 # GENERAL COMMENTS with versions #
665 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
665 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
666 q = q.order_by(ChangesetComment.comment_id.asc())
666 q = q.order_by(ChangesetComment.comment_id.asc())
667 general_comments = q
667 general_comments = q
668
668
669 # pick comments we want to render at current version
669 # pick comments we want to render at current version
670 c.comment_versions = comments_model.aggregate_comments(
670 c.comment_versions = comments_model.aggregate_comments(
671 general_comments, versions, c.at_version_num)
671 general_comments, versions, c.at_version_num)
672 c.comments = c.comment_versions[c.at_version_num]['until']
672 c.comments = c.comment_versions[c.at_version_num]['until']
673
673
674 # INLINE COMMENTS with versions #
674 # INLINE COMMENTS with versions #
675 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
675 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
676 q = q.order_by(ChangesetComment.comment_id.asc())
676 q = q.order_by(ChangesetComment.comment_id.asc())
677 inline_comments = q
677 inline_comments = q
678
678
679 c.inline_versions = comments_model.aggregate_comments(
679 c.inline_versions = comments_model.aggregate_comments(
680 inline_comments, versions, c.at_version_num, inline=True)
680 inline_comments, versions, c.at_version_num, inline=True)
681
681
682 # inject latest version
682 # inject latest version
683 latest_ver = PullRequest.get_pr_display_object(
683 latest_ver = PullRequest.get_pr_display_object(
684 pull_request_latest, pull_request_latest)
684 pull_request_latest, pull_request_latest)
685
685
686 c.versions = versions + [latest_ver]
686 c.versions = versions + [latest_ver]
687
687
688 # if we use version, then do not show later comments
688 # if we use version, then do not show later comments
689 # than current version
689 # than current version
690 display_inline_comments = collections.defaultdict(
690 display_inline_comments = collections.defaultdict(
691 lambda: collections.defaultdict(list))
691 lambda: collections.defaultdict(list))
692 for co in inline_comments:
692 for co in inline_comments:
693 if c.at_version_num:
693 if c.at_version_num:
694 # pick comments that are at least UPTO given version, so we
694 # pick comments that are at least UPTO given version, so we
695 # don't render comments for higher version
695 # don't render comments for higher version
696 should_render = co.pull_request_version_id and \
696 should_render = co.pull_request_version_id and \
697 co.pull_request_version_id <= c.at_version_num
697 co.pull_request_version_id <= c.at_version_num
698 else:
698 else:
699 # showing all, for 'latest'
699 # showing all, for 'latest'
700 should_render = True
700 should_render = True
701
701
702 if should_render:
702 if should_render:
703 display_inline_comments[co.f_path][co.line_no].append(co)
703 display_inline_comments[co.f_path][co.line_no].append(co)
704
704
705 # load diff data into template context, if we use compare mode then
705 # load diff data into template context, if we use compare mode then
706 # diff is calculated based on changes between versions of PR
706 # diff is calculated based on changes between versions of PR
707
707
708 source_repo = pull_request_at_ver.source_repo
708 source_repo = pull_request_at_ver.source_repo
709 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
709 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
710
710
711 target_repo = pull_request_at_ver.target_repo
711 target_repo = pull_request_at_ver.target_repo
712 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
712 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
713
713
714 if compare:
714 if compare:
715 # in compare switch the diff base to latest commit from prev version
715 # in compare switch the diff base to latest commit from prev version
716 target_ref_id = prev_pull_request_display_obj.revisions[0]
716 target_ref_id = prev_pull_request_display_obj.revisions[0]
717
717
718 # despite opening commits for bookmarks/branches/tags, we always
718 # despite opening commits for bookmarks/branches/tags, we always
719 # convert this to rev to prevent changes after bookmark or branch change
719 # convert this to rev to prevent changes after bookmark or branch change
720 c.source_ref_type = 'rev'
720 c.source_ref_type = 'rev'
721 c.source_ref = source_ref_id
721 c.source_ref = source_ref_id
722
722
723 c.target_ref_type = 'rev'
723 c.target_ref_type = 'rev'
724 c.target_ref = target_ref_id
724 c.target_ref = target_ref_id
725
725
726 c.source_repo = source_repo
726 c.source_repo = source_repo
727 c.target_repo = target_repo
727 c.target_repo = target_repo
728
728
729 # diff_limit is the old behavior, will cut off the whole diff
729 # diff_limit is the old behavior, will cut off the whole diff
730 # if the limit is applied otherwise will just hide the
730 # if the limit is applied otherwise will just hide the
731 # big files from the front-end
731 # big files from the front-end
732 diff_limit = self.cut_off_limit_diff
732 diff_limit = self.cut_off_limit_diff
733 file_limit = self.cut_off_limit_file
733 file_limit = self.cut_off_limit_file
734
734
735 c.commit_ranges = []
735 c.commit_ranges = []
736 source_commit = EmptyCommit()
736 source_commit = EmptyCommit()
737 target_commit = EmptyCommit()
737 target_commit = EmptyCommit()
738 c.missing_requirements = False
738 c.missing_requirements = False
739
739
740 source_scm = source_repo.scm_instance()
740 source_scm = source_repo.scm_instance()
741 target_scm = target_repo.scm_instance()
741 target_scm = target_repo.scm_instance()
742
742
743 # try first shadow repo, fallback to regular repo
743 # try first shadow repo, fallback to regular repo
744 try:
744 try:
745 commits_source_repo = pull_request_latest.get_shadow_repo()
745 commits_source_repo = pull_request_latest.get_shadow_repo()
746 except Exception:
746 except Exception:
747 log.debug('Failed to get shadow repo', exc_info=True)
747 log.debug('Failed to get shadow repo', exc_info=True)
748 commits_source_repo = source_scm
748 commits_source_repo = source_scm
749
749
750 c.commits_source_repo = commits_source_repo
750 c.commits_source_repo = commits_source_repo
751 commit_cache = {}
751 commit_cache = {}
752 try:
752 try:
753 pre_load = ["author", "branch", "date", "message"]
753 pre_load = ["author", "branch", "date", "message"]
754 show_revs = pull_request_at_ver.revisions
754 show_revs = pull_request_at_ver.revisions
755 for rev in show_revs:
755 for rev in show_revs:
756 comm = commits_source_repo.get_commit(
756 comm = commits_source_repo.get_commit(
757 commit_id=rev, pre_load=pre_load)
757 commit_id=rev, pre_load=pre_load)
758 c.commit_ranges.append(comm)
758 c.commit_ranges.append(comm)
759 commit_cache[comm.raw_id] = comm
759 commit_cache[comm.raw_id] = comm
760
760
761 # Order here matters, we first need to get target, and then
761 # Order here matters, we first need to get target, and then
762 # the source
762 # the source
763 target_commit = commits_source_repo.get_commit(
763 target_commit = commits_source_repo.get_commit(
764 commit_id=safe_str(target_ref_id))
764 commit_id=safe_str(target_ref_id))
765
765
766 source_commit = commits_source_repo.get_commit(
766 source_commit = commits_source_repo.get_commit(
767 commit_id=safe_str(source_ref_id))
767 commit_id=safe_str(source_ref_id))
768
768
769 except CommitDoesNotExistError:
769 except CommitDoesNotExistError:
770 log.warning(
770 log.warning(
771 'Failed to get commit from `{}` repo'.format(
771 'Failed to get commit from `{}` repo'.format(
772 commits_source_repo), exc_info=True)
772 commits_source_repo), exc_info=True)
773 except RepositoryRequirementError:
773 except RepositoryRequirementError:
774 log.warning(
774 log.warning(
775 'Failed to get all required data from repo', exc_info=True)
775 'Failed to get all required data from repo', exc_info=True)
776 c.missing_requirements = True
776 c.missing_requirements = True
777
777
778 c.ancestor = None # set it to None, to hide it from PR view
778 c.ancestor = None # set it to None, to hide it from PR view
779
779
780 try:
780 try:
781 ancestor_id = source_scm.get_common_ancestor(
781 ancestor_id = source_scm.get_common_ancestor(
782 source_commit.raw_id, target_commit.raw_id, target_scm)
782 source_commit.raw_id, target_commit.raw_id, target_scm)
783 c.ancestor_commit = source_scm.get_commit(ancestor_id)
783 c.ancestor_commit = source_scm.get_commit(ancestor_id)
784 except Exception:
784 except Exception:
785 c.ancestor_commit = None
785 c.ancestor_commit = None
786
786
787 c.statuses = source_repo.statuses(
787 c.statuses = source_repo.statuses(
788 [x.raw_id for x in c.commit_ranges])
788 [x.raw_id for x in c.commit_ranges])
789
789
790 # auto collapse if we have more than limit
790 # auto collapse if we have more than limit
791 collapse_limit = diffs.DiffProcessor._collapse_commits_over
791 collapse_limit = diffs.DiffProcessor._collapse_commits_over
792 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
792 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
793 c.compare_mode = compare
793 c.compare_mode = compare
794
794
795 c.missing_commits = False
795 c.missing_commits = False
796 if (c.missing_requirements or isinstance(source_commit, EmptyCommit)
796 if (c.missing_requirements or isinstance(source_commit, EmptyCommit)
797 or source_commit == target_commit):
797 or source_commit == target_commit):
798
798
799 c.missing_commits = True
799 c.missing_commits = True
800 else:
800 else:
801
801
802 c.diffset = self._get_diffset(
802 c.diffset = self._get_diffset(
803 commits_source_repo, source_ref_id, target_ref_id,
803 commits_source_repo, source_ref_id, target_ref_id,
804 target_commit, source_commit,
804 target_commit, source_commit,
805 diff_limit, file_limit, display_inline_comments)
805 diff_limit, file_limit, display_inline_comments)
806
806
807 c.limited_diff = c.diffset.limited_diff
807 c.limited_diff = c.diffset.limited_diff
808
808
809 # calculate removed files that are bound to comments
809 # calculate removed files that are bound to comments
810 comment_deleted_files = [
810 comment_deleted_files = [
811 fname for fname in display_inline_comments
811 fname for fname in display_inline_comments
812 if fname not in c.diffset.file_stats]
812 if fname not in c.diffset.file_stats]
813
813
814 c.deleted_files_comments = collections.defaultdict(dict)
814 c.deleted_files_comments = collections.defaultdict(dict)
815 for fname, per_line_comments in display_inline_comments.items():
815 for fname, per_line_comments in display_inline_comments.items():
816 if fname in comment_deleted_files:
816 if fname in comment_deleted_files:
817 c.deleted_files_comments[fname]['stats'] = 0
817 c.deleted_files_comments[fname]['stats'] = 0
818 c.deleted_files_comments[fname]['comments'] = list()
818 c.deleted_files_comments[fname]['comments'] = list()
819 for lno, comments in per_line_comments.items():
819 for lno, comments in per_line_comments.items():
820 c.deleted_files_comments[fname]['comments'].extend(
820 c.deleted_files_comments[fname]['comments'].extend(
821 comments)
821 comments)
822
822
823 # this is a hack to properly display links, when creating PR, the
823 # this is a hack to properly display links, when creating PR, the
824 # compare view and others uses different notation, and
824 # compare view and others uses different notation, and
825 # compare_commits.mako renders links based on the target_repo.
825 # compare_commits.mako renders links based on the target_repo.
826 # We need to swap that here to generate it properly on the html side
826 # We need to swap that here to generate it properly on the html side
827 c.target_repo = c.source_repo
827 c.target_repo = c.source_repo
828
828
829 c.commit_statuses = ChangesetStatus.STATUSES
829 c.commit_statuses = ChangesetStatus.STATUSES
830
830
831 c.show_version_changes = not pr_closed
831 c.show_version_changes = not pr_closed
832 if c.show_version_changes:
832 if c.show_version_changes:
833 cur_obj = pull_request_at_ver
833 cur_obj = pull_request_at_ver
834 prev_obj = prev_pull_request_at_ver
834 prev_obj = prev_pull_request_at_ver
835
835
836 old_commit_ids = prev_obj.revisions
836 old_commit_ids = prev_obj.revisions
837 new_commit_ids = cur_obj.revisions
837 new_commit_ids = cur_obj.revisions
838 commit_changes = PullRequestModel()._calculate_commit_id_changes(
838 commit_changes = PullRequestModel()._calculate_commit_id_changes(
839 old_commit_ids, new_commit_ids)
839 old_commit_ids, new_commit_ids)
840 c.commit_changes_summary = commit_changes
840 c.commit_changes_summary = commit_changes
841
841
842 # calculate the diff for commits between versions
842 # calculate the diff for commits between versions
843 c.commit_changes = []
843 c.commit_changes = []
844 mark = lambda cs, fw: list(
844 mark = lambda cs, fw: list(
845 h.itertools.izip_longest([], cs, fillvalue=fw))
845 h.itertools.izip_longest([], cs, fillvalue=fw))
846 for c_type, raw_id in mark(commit_changes.added, 'a') \
846 for c_type, raw_id in mark(commit_changes.added, 'a') \
847 + mark(commit_changes.removed, 'r') \
847 + mark(commit_changes.removed, 'r') \
848 + mark(commit_changes.common, 'c'):
848 + mark(commit_changes.common, 'c'):
849
849
850 if raw_id in commit_cache:
850 if raw_id in commit_cache:
851 commit = commit_cache[raw_id]
851 commit = commit_cache[raw_id]
852 else:
852 else:
853 try:
853 try:
854 commit = commits_source_repo.get_commit(raw_id)
854 commit = commits_source_repo.get_commit(raw_id)
855 except CommitDoesNotExistError:
855 except CommitDoesNotExistError:
856 # in case we fail extracting still use "dummy" commit
856 # in case we fail extracting still use "dummy" commit
857 # for display in commit diff
857 # for display in commit diff
858 commit = h.AttributeDict(
858 commit = h.AttributeDict(
859 {'raw_id': raw_id,
859 {'raw_id': raw_id,
860 'message': 'EMPTY or MISSING COMMIT'})
860 'message': 'EMPTY or MISSING COMMIT'})
861 c.commit_changes.append([c_type, commit])
861 c.commit_changes.append([c_type, commit])
862
862
863 # current user review statuses for each version
863 # current user review statuses for each version
864 c.review_versions = {}
864 c.review_versions = {}
865 if c.rhodecode_user.user_id in allowed_reviewers:
865 if c.rhodecode_user.user_id in allowed_reviewers:
866 for co in general_comments:
866 for co in general_comments:
867 if co.author.user_id == c.rhodecode_user.user_id:
867 if co.author.user_id == c.rhodecode_user.user_id:
868 # each comment has a status change
868 # each comment has a status change
869 status = co.status_change
869 status = co.status_change
870 if status:
870 if status:
871 _ver_pr = status[0].comment.pull_request_version_id
871 _ver_pr = status[0].comment.pull_request_version_id
872 c.review_versions[_ver_pr] = status[0]
872 c.review_versions[_ver_pr] = status[0]
873
873
874 return render('/pullrequests/pullrequest_show.mako')
874 return render('/pullrequests/pullrequest_show.mako')
875
875
876 @LoginRequired()
876 @LoginRequired()
877 @NotAnonymous()
877 @NotAnonymous()
878 @HasRepoPermissionAnyDecorator(
878 @HasRepoPermissionAnyDecorator(
879 'repository.read', 'repository.write', 'repository.admin')
879 'repository.read', 'repository.write', 'repository.admin')
880 @auth.CSRFRequired()
880 @auth.CSRFRequired()
881 @jsonify
881 @jsonify
882 def comment(self, repo_name, pull_request_id):
882 def comment(self, repo_name, pull_request_id):
883 pull_request_id = safe_int(pull_request_id)
883 pull_request_id = safe_int(pull_request_id)
884 pull_request = PullRequest.get_or_404(pull_request_id)
884 pull_request = PullRequest.get_or_404(pull_request_id)
885 if pull_request.is_closed():
885 if pull_request.is_closed():
886 raise HTTPForbidden()
886 raise HTTPForbidden()
887
887
888 status = request.POST.get('changeset_status', None)
888 status = request.POST.get('changeset_status', None)
889 text = request.POST.get('text')
889 text = request.POST.get('text')
890 comment_type = request.POST.get('comment_type')
890 comment_type = request.POST.get('comment_type')
891 resolves_comment_id = request.POST.get('resolves_comment_id', None)
891 resolves_comment_id = request.POST.get('resolves_comment_id', None)
892 close_pull_request = request.POST.get('close_pull_request')
892 close_pull_request = request.POST.get('close_pull_request')
893
893
894 close_pr = False
894 close_pr = False
895 # only owner or admin or person with write permissions
895 # only owner or admin or person with write permissions
896 allowed_to_close = PullRequestModel().check_user_update(
896 allowed_to_close = PullRequestModel().check_user_update(
897 pull_request, c.rhodecode_user)
897 pull_request, c.rhodecode_user)
898
898
899 if close_pull_request and allowed_to_close:
899 if close_pull_request and allowed_to_close:
900 close_pr = True
900 close_pr = True
901 pull_request_review_status = pull_request.calculated_review_status()
901 pull_request_review_status = pull_request.calculated_review_status()
902 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
902 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
903 # approved only if we have voting consent
903 # approved only if we have voting consent
904 status = ChangesetStatus.STATUS_APPROVED
904 status = ChangesetStatus.STATUS_APPROVED
905 else:
905 else:
906 status = ChangesetStatus.STATUS_REJECTED
906 status = ChangesetStatus.STATUS_REJECTED
907
907
908 allowed_to_change_status = PullRequestModel().check_user_change_status(
908 allowed_to_change_status = PullRequestModel().check_user_change_status(
909 pull_request, c.rhodecode_user)
909 pull_request, c.rhodecode_user)
910
910
911 if status and allowed_to_change_status:
911 if status and allowed_to_change_status:
912 message = (_('Status change %(transition_icon)s %(status)s')
912 message = (_('Status change %(transition_icon)s %(status)s')
913 % {'transition_icon': '>',
913 % {'transition_icon': '>',
914 'status': ChangesetStatus.get_status_lbl(status)})
914 'status': ChangesetStatus.get_status_lbl(status)})
915 if close_pr:
915 if close_pr:
916 message = _('Closing with') + ' ' + message
916 message = _('Closing with') + ' ' + message
917 text = text or message
917 text = text or message
918 comm = CommentsModel().create(
918 comm = CommentsModel().create(
919 text=text,
919 text=text,
920 repo=c.rhodecode_db_repo.repo_id,
920 repo=c.rhodecode_db_repo.repo_id,
921 user=c.rhodecode_user.user_id,
921 user=c.rhodecode_user.user_id,
922 pull_request=pull_request_id,
922 pull_request=pull_request_id,
923 f_path=request.POST.get('f_path'),
923 f_path=request.POST.get('f_path'),
924 line_no=request.POST.get('line'),
924 line_no=request.POST.get('line'),
925 status_change=(ChangesetStatus.get_status_lbl(status)
925 status_change=(ChangesetStatus.get_status_lbl(status)
926 if status and allowed_to_change_status else None),
926 if status and allowed_to_change_status else None),
927 status_change_type=(status
927 status_change_type=(status
928 if status and allowed_to_change_status else None),
928 if status and allowed_to_change_status else None),
929 closing_pr=close_pr,
929 closing_pr=close_pr,
930 comment_type=comment_type,
930 comment_type=comment_type,
931 resolves_comment_id=resolves_comment_id
931 resolves_comment_id=resolves_comment_id
932 )
932 )
933
933
934 if allowed_to_change_status:
934 if allowed_to_change_status:
935 old_calculated_status = pull_request.calculated_review_status()
935 old_calculated_status = pull_request.calculated_review_status()
936 # get status if set !
936 # get status if set !
937 if status:
937 if status:
938 ChangesetStatusModel().set_status(
938 ChangesetStatusModel().set_status(
939 c.rhodecode_db_repo.repo_id,
939 c.rhodecode_db_repo.repo_id,
940 status,
940 status,
941 c.rhodecode_user.user_id,
941 c.rhodecode_user.user_id,
942 comm,
942 comm,
943 pull_request=pull_request_id
943 pull_request=pull_request_id
944 )
944 )
945
945
946 Session().flush()
946 Session().flush()
947 events.trigger(events.PullRequestCommentEvent(pull_request, comm))
947 events.trigger(events.PullRequestCommentEvent(pull_request, comm))
948 # we now calculate the status of pull request, and based on that
948 # we now calculate the status of pull request, and based on that
949 # calculation we set the commits status
949 # calculation we set the commits status
950 calculated_status = pull_request.calculated_review_status()
950 calculated_status = pull_request.calculated_review_status()
951 if old_calculated_status != calculated_status:
951 if old_calculated_status != calculated_status:
952 PullRequestModel()._trigger_pull_request_hook(
952 PullRequestModel()._trigger_pull_request_hook(
953 pull_request, c.rhodecode_user, 'review_status_change')
953 pull_request, c.rhodecode_user, 'review_status_change')
954
954
955 calculated_status_lbl = ChangesetStatus.get_status_lbl(
955 calculated_status_lbl = ChangesetStatus.get_status_lbl(
956 calculated_status)
956 calculated_status)
957
957
958 if close_pr:
958 if close_pr:
959 status_completed = (
959 status_completed = (
960 calculated_status in [ChangesetStatus.STATUS_APPROVED,
960 calculated_status in [ChangesetStatus.STATUS_APPROVED,
961 ChangesetStatus.STATUS_REJECTED])
961 ChangesetStatus.STATUS_REJECTED])
962 if close_pull_request or status_completed:
962 if close_pull_request or status_completed:
963 PullRequestModel().close_pull_request(
963 PullRequestModel().close_pull_request(
964 pull_request_id, c.rhodecode_user)
964 pull_request_id, c.rhodecode_user)
965 else:
965 else:
966 h.flash(_('Closing pull request on other statuses than '
966 h.flash(_('Closing pull request on other statuses than '
967 'rejected or approved is forbidden. '
967 'rejected or approved is forbidden. '
968 'Calculated status from all reviewers '
968 'Calculated status from all reviewers '
969 'is currently: %s') % calculated_status_lbl,
969 'is currently: %s') % calculated_status_lbl,
970 category='warning')
970 category='warning')
971
971
972 Session().commit()
972 Session().commit()
973
973
974 if not request.is_xhr:
974 if not request.is_xhr:
975 return redirect(h.url('pullrequest_show', repo_name=repo_name,
975 return redirect(h.url('pullrequest_show', repo_name=repo_name,
976 pull_request_id=pull_request_id))
976 pull_request_id=pull_request_id))
977
977
978 data = {
978 data = {
979 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
979 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
980 }
980 }
981 if comm:
981 if comm:
982 c.co = comm
982 c.co = comm
983 c.inline_comment = True if comm.line_no else False
983 c.inline_comment = True if comm.line_no else False
984 data.update(comm.get_dict())
984 data.update(comm.get_dict())
985 data.update({'rendered_text':
985 data.update({'rendered_text':
986 render('changeset/changeset_comment_block.mako')})
986 render('changeset/changeset_comment_block.mako')})
987
987
988 return data
988 return data
989
989
990 @LoginRequired()
990 @LoginRequired()
991 @NotAnonymous()
991 @NotAnonymous()
992 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
992 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
993 'repository.admin')
993 'repository.admin')
994 @auth.CSRFRequired()
994 @auth.CSRFRequired()
995 @jsonify
995 @jsonify
996 def delete_comment(self, repo_name, comment_id):
996 def delete_comment(self, repo_name, comment_id):
997 return self._delete_comment(comment_id)
997 return self._delete_comment(comment_id)
998
998
999 def _delete_comment(self, comment_id):
999 def _delete_comment(self, comment_id):
1000 comment_id = safe_int(comment_id)
1000 comment_id = safe_int(comment_id)
1001 co = ChangesetComment.get_or_404(comment_id)
1001 co = ChangesetComment.get_or_404(comment_id)
1002 if co.pull_request.is_closed():
1002 if co.pull_request.is_closed():
1003 # don't allow deleting comments on closed pull request
1003 # don't allow deleting comments on closed pull request
1004 raise HTTPForbidden()
1004 raise HTTPForbidden()
1005
1005
1006 is_owner = co.author.user_id == c.rhodecode_user.user_id
1006 is_owner = co.author.user_id == c.rhodecode_user.user_id
1007 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
1007 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
1008 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
1008 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
1009 old_calculated_status = co.pull_request.calculated_review_status()
1009 old_calculated_status = co.pull_request.calculated_review_status()
1010 CommentsModel().delete(comment=co)
1010 CommentsModel().delete(comment=co)
1011 Session().commit()
1011 Session().commit()
1012 calculated_status = co.pull_request.calculated_review_status()
1012 calculated_status = co.pull_request.calculated_review_status()
1013 if old_calculated_status != calculated_status:
1013 if old_calculated_status != calculated_status:
1014 PullRequestModel()._trigger_pull_request_hook(
1014 PullRequestModel()._trigger_pull_request_hook(
1015 co.pull_request, c.rhodecode_user, 'review_status_change')
1015 co.pull_request, c.rhodecode_user, 'review_status_change')
1016 return True
1016 return True
1017 else:
1017 else:
1018 raise HTTPForbidden()
1018 raise HTTPForbidden()
@@ -1,314 +1,315 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 from pylons import url
23 from pylons import url
24 from pylons.i18n.translation import _
24 from pylons.i18n.translation import _
25 from webhelpers.html.builder import literal
25 from webhelpers.html.builder import literal
26 from webhelpers.html.tags import link_to
26 from webhelpers.html.tags import link_to
27
27
28 from rhodecode.lib.utils2 import AttributeDict
28 from rhodecode.lib.utils2 import AttributeDict
29 from rhodecode.lib.vcs.backends.base import BaseCommit
29 from rhodecode.lib.vcs.backends.base import BaseCommit
30 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
30 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
31
31
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 def action_parser(user_log, feed=False, parse_cs=False):
36 def action_parser(user_log, feed=False, parse_cs=False):
37 """
37 """
38 This helper will action_map the specified string action into translated
38 This helper will action_map the specified string action into translated
39 fancy names with icons and links
39 fancy names with icons and links
40
40
41 :param user_log: user log instance
41 :param user_log: user log instance
42 :param feed: use output for feeds (no html and fancy icons)
42 :param feed: use output for feeds (no html and fancy icons)
43 :param parse_cs: parse Changesets into VCS instances
43 :param parse_cs: parse Changesets into VCS instances
44 """
44 """
45 ap = ActionParser(user_log, feed=False, parse_commits=False)
45 ap = ActionParser(user_log, feed=False, parse_commits=False)
46 return ap.callbacks()
46 return ap.callbacks()
47
47
48
48
49 class ActionParser(object):
49 class ActionParser(object):
50
50
51 commits_limit = 3 # display this amount always
51 commits_limit = 3 # display this amount always
52 commits_top_limit = 50 # show up to this amount of commits hidden
52 commits_top_limit = 50 # show up to this amount of commits hidden
53
53
54 def __init__(self, user_log, feed=False, parse_commits=False):
54 def __init__(self, user_log, feed=False, parse_commits=False):
55 self.user_log = user_log
55 self.user_log = user_log
56 self.feed = feed
56 self.feed = feed
57 self.parse_commits = parse_commits
57 self.parse_commits = parse_commits
58
58
59 self.action = user_log.action
59 self.action = user_log.action
60 self.action_params = ' '
60 self.action_params = ' '
61 x = self.action.split(':', 1)
61 x = self.action.split(':', 1)
62 if len(x) > 1:
62 if len(x) > 1:
63 self.action, self.action_params = x
63 self.action, self.action_params = x
64
64
65 def callbacks(self):
65 def callbacks(self):
66 action_str = self.action_map.get(self.action, self.action)
66 action_str = self.action_map.get(self.action, self.action)
67 if self.feed:
67 if self.feed:
68 action = action_str[0].replace('[', '').replace(']', '')
68 action = action_str[0].replace('[', '').replace(']', '')
69 else:
69 else:
70 action = action_str[0]\
70 action = action_str[0]\
71 .replace('[', '<span class="journal_highlight">')\
71 .replace('[', '<span class="journal_highlight">')\
72 .replace(']', '</span>')
72 .replace(']', '</span>')
73
73
74 action_params_func = _no_params_func
74 action_params_func = _no_params_func
75 if callable(action_str[1]):
75 if callable(action_str[1]):
76 action_params_func = action_str[1]
76 action_params_func = action_str[1]
77
77
78 # returned callbacks we need to call to get
78 # returned callbacks we need to call to get
79 return [
79 return [
80 lambda: literal(action), action_params_func,
80 lambda: literal(action), action_params_func,
81 self.action_parser_icon]
81 self.action_parser_icon]
82
82
83 @property
83 @property
84 def action_map(self):
84 def action_map(self):
85
85
86 # action : translated str, callback(extractor), icon
86 # action : translated str, callback(extractor), icon
87 action_map = {
87 action_map = {
88 'user_deleted_repo': (
88 'user_deleted_repo': (
89 _('[deleted] repository'),
89 _('[deleted] repository'),
90 None, 'icon-trash'),
90 None, 'icon-trash'),
91 'user_created_repo': (
91 'user_created_repo': (
92 _('[created] repository'),
92 _('[created] repository'),
93 None, 'icon-plus icon-plus-colored'),
93 None, 'icon-plus icon-plus-colored'),
94 'user_created_fork': (
94 'user_created_fork': (
95 _('[created] repository as fork'),
95 _('[created] repository as fork'),
96 None, 'icon-code-fork'),
96 None, 'icon-code-fork'),
97 'user_forked_repo': (
97 'user_forked_repo': (
98 _('[forked] repository'),
98 _('[forked] repository'),
99 self.get_fork_name, 'icon-code-fork'),
99 self.get_fork_name, 'icon-code-fork'),
100 'user_updated_repo': (
100 'user_updated_repo': (
101 _('[updated] repository'),
101 _('[updated] repository'),
102 None, 'icon-pencil icon-pencil-colored'),
102 None, 'icon-pencil icon-pencil-colored'),
103 'user_downloaded_archive': (
103 'user_downloaded_archive': (
104 _('[downloaded] archive from repository'),
104 _('[downloaded] archive from repository'),
105 self.get_archive_name, 'icon-download-alt'),
105 self.get_archive_name, 'icon-download-alt'),
106 'admin_deleted_repo': (
106 'admin_deleted_repo': (
107 _('[delete] repository'),
107 _('[delete] repository'),
108 None, 'icon-trash'),
108 None, 'icon-trash'),
109 'admin_created_repo': (
109 'admin_created_repo': (
110 _('[created] repository'),
110 _('[created] repository'),
111 None, 'icon-plus icon-plus-colored'),
111 None, 'icon-plus icon-plus-colored'),
112 'admin_forked_repo': (
112 'admin_forked_repo': (
113 _('[forked] repository'),
113 _('[forked] repository'),
114 None, 'icon-code-fork icon-fork-colored'),
114 None, 'icon-code-fork icon-fork-colored'),
115 'admin_updated_repo': (
115 'admin_updated_repo': (
116 _('[updated] repository'),
116 _('[updated] repository'),
117 None, 'icon-pencil icon-pencil-colored'),
117 None, 'icon-pencil icon-pencil-colored'),
118 'admin_created_user': (
118 'admin_created_user': (
119 _('[created] user'),
119 _('[created] user'),
120 self.get_user_name, 'icon-user icon-user-colored'),
120 self.get_user_name, 'icon-user icon-user-colored'),
121 'admin_updated_user': (
121 'admin_updated_user': (
122 _('[updated] user'),
122 _('[updated] user'),
123 self.get_user_name, 'icon-user icon-user-colored'),
123 self.get_user_name, 'icon-user icon-user-colored'),
124 'admin_created_users_group': (
124 'admin_created_users_group': (
125 _('[created] user group'),
125 _('[created] user group'),
126 self.get_users_group, 'icon-pencil icon-pencil-colored'),
126 self.get_users_group, 'icon-pencil icon-pencil-colored'),
127 'admin_updated_users_group': (
127 'admin_updated_users_group': (
128 _('[updated] user group'),
128 _('[updated] user group'),
129 self.get_users_group, 'icon-pencil icon-pencil-colored'),
129 self.get_users_group, 'icon-pencil icon-pencil-colored'),
130 'user_commented_revision': (
130 'user_commented_revision': (
131 _('[commented] on commit in repository'),
131 _('[commented] on commit in repository'),
132 self.get_cs_links, 'icon-comment icon-comment-colored'),
132 self.get_cs_links, 'icon-comment icon-comment-colored'),
133 'user_commented_pull_request': (
133 'user_commented_pull_request': (
134 _('[commented] on pull request for'),
134 _('[commented] on pull request for'),
135 self.get_pull_request, 'icon-comment icon-comment-colored'),
135 self.get_pull_request, 'icon-comment icon-comment-colored'),
136 'user_closed_pull_request': (
136 'user_closed_pull_request': (
137 _('[closed] pull request for'),
137 _('[closed] pull request for'),
138 self.get_pull_request, 'icon-check'),
138 self.get_pull_request, 'icon-check'),
139 'user_merged_pull_request': (
139 'user_merged_pull_request': (
140 _('[merged] pull request for'),
140 _('[merged] pull request for'),
141 self.get_pull_request, 'icon-check'),
141 self.get_pull_request, 'icon-check'),
142 'push': (
142 'push': (
143 _('[pushed] into'),
143 _('[pushed] into'),
144 self.get_cs_links, 'icon-arrow-up'),
144 self.get_cs_links, 'icon-arrow-up'),
145 'push_local': (
145 'push_local': (
146 _('[committed via RhodeCode] into repository'),
146 _('[committed via RhodeCode] into repository'),
147 self.get_cs_links, 'icon-pencil icon-pencil-colored'),
147 self.get_cs_links, 'icon-pencil icon-pencil-colored'),
148 'push_remote': (
148 'push_remote': (
149 _('[pulled from remote] into repository'),
149 _('[pulled from remote] into repository'),
150 self.get_cs_links, 'icon-arrow-up'),
150 self.get_cs_links, 'icon-arrow-up'),
151 'pull': (
151 'pull': (
152 _('[pulled] from'),
152 _('[pulled] from'),
153 None, 'icon-arrow-down'),
153 None, 'icon-arrow-down'),
154 'started_following_repo': (
154 'started_following_repo': (
155 _('[started following] repository'),
155 _('[started following] repository'),
156 None, 'icon-heart icon-heart-colored'),
156 None, 'icon-heart icon-heart-colored'),
157 'stopped_following_repo': (
157 'stopped_following_repo': (
158 _('[stopped following] repository'),
158 _('[stopped following] repository'),
159 None, 'icon-heart-empty icon-heart-colored'),
159 None, 'icon-heart-empty icon-heart-colored'),
160 }
160 }
161 return action_map
161 return action_map
162
162
163 def get_fork_name(self):
163 def get_fork_name(self):
164 from rhodecode.lib import helpers as h
164 repo_name = self.action_params
165 repo_name = self.action_params
165 _url = url('summary_home', repo_name=repo_name)
166 _url = h.route_path('repo_summary', repo_name=repo_name)
166 return _('fork name %s') % link_to(self.action_params, _url)
167 return _('fork name %s') % link_to(self.action_params, _url)
167
168
168 def get_user_name(self):
169 def get_user_name(self):
169 user_name = self.action_params
170 user_name = self.action_params
170 return user_name
171 return user_name
171
172
172 def get_users_group(self):
173 def get_users_group(self):
173 group_name = self.action_params
174 group_name = self.action_params
174 return group_name
175 return group_name
175
176
176 def get_pull_request(self):
177 def get_pull_request(self):
177 pull_request_id = self.action_params
178 pull_request_id = self.action_params
178 if self.is_deleted():
179 if self.is_deleted():
179 repo_name = self.user_log.repository_name
180 repo_name = self.user_log.repository_name
180 else:
181 else:
181 repo_name = self.user_log.repository.repo_name
182 repo_name = self.user_log.repository.repo_name
182 return link_to(
183 return link_to(
183 _('Pull request #%s') % pull_request_id,
184 _('Pull request #%s') % pull_request_id,
184 url('pullrequest_show', repo_name=repo_name,
185 url('pullrequest_show', repo_name=repo_name,
185 pull_request_id=pull_request_id))
186 pull_request_id=pull_request_id))
186
187
187 def get_archive_name(self):
188 def get_archive_name(self):
188 archive_name = self.action_params
189 archive_name = self.action_params
189 return archive_name
190 return archive_name
190
191
191 def action_parser_icon(self):
192 def action_parser_icon(self):
192 tmpl = """<i class="%s" alt="%s"></i>"""
193 tmpl = """<i class="%s" alt="%s"></i>"""
193 ico = self.action_map.get(self.action, ['', '', ''])[2]
194 ico = self.action_map.get(self.action, ['', '', ''])[2]
194 return literal(tmpl % (ico, self.action))
195 return literal(tmpl % (ico, self.action))
195
196
196 def get_cs_links(self):
197 def get_cs_links(self):
197 if self.is_deleted():
198 if self.is_deleted():
198 return self.action_params
199 return self.action_params
199
200
200 repo_name = self.user_log.repository.repo_name
201 repo_name = self.user_log.repository.repo_name
201 commit_ids = self.action_params.split(',')
202 commit_ids = self.action_params.split(',')
202 commits = self.get_commits(commit_ids)
203 commits = self.get_commits(commit_ids)
203
204
204 link_generator = (
205 link_generator = (
205 self.lnk(commit, repo_name)
206 self.lnk(commit, repo_name)
206 for commit in commits[:self.commits_limit])
207 for commit in commits[:self.commits_limit])
207 commit_links = [" " + ', '.join(link_generator)]
208 commit_links = [" " + ', '.join(link_generator)]
208 _op1, _name1 = _get_op(commit_ids[0])
209 _op1, _name1 = _get_op(commit_ids[0])
209 _op2, _name2 = _get_op(commit_ids[-1])
210 _op2, _name2 = _get_op(commit_ids[-1])
210
211
211 commit_id_range = '%s...%s' % (_name1, _name2)
212 commit_id_range = '%s...%s' % (_name1, _name2)
212
213
213 compare_view = (
214 compare_view = (
214 ' <div class="compare_view tooltip" title="%s">'
215 ' <div class="compare_view tooltip" title="%s">'
215 '<a href="%s">%s</a> </div>' % (
216 '<a href="%s">%s</a> </div>' % (
216 _('Show all combined commits %s->%s') % (
217 _('Show all combined commits %s->%s') % (
217 commit_ids[0][:12], commit_ids[-1][:12]
218 commit_ids[0][:12], commit_ids[-1][:12]
218 ),
219 ),
219 url('changeset_home', repo_name=repo_name,
220 url('changeset_home', repo_name=repo_name,
220 revision=commit_id_range), _('compare view')
221 revision=commit_id_range), _('compare view')
221 )
222 )
222 )
223 )
223
224
224 if len(commit_ids) > self.commits_limit:
225 if len(commit_ids) > self.commits_limit:
225 more_count = len(commit_ids) - self.commits_limit
226 more_count = len(commit_ids) - self.commits_limit
226 commit_links.append(
227 commit_links.append(
227 _(' and %(num)s more commits') % {'num': more_count}
228 _(' and %(num)s more commits') % {'num': more_count}
228 )
229 )
229
230
230 if len(commits) > 1:
231 if len(commits) > 1:
231 commit_links.append(compare_view)
232 commit_links.append(compare_view)
232 return ''.join(commit_links)
233 return ''.join(commit_links)
233
234
234 def get_commits(self, commit_ids):
235 def get_commits(self, commit_ids):
235 commits = []
236 commits = []
236 if not filter(lambda v: v != '', commit_ids):
237 if not filter(lambda v: v != '', commit_ids):
237 return commits
238 return commits
238
239
239 repo = None
240 repo = None
240 if self.parse_commits:
241 if self.parse_commits:
241 repo = self.user_log.repository.scm_instance()
242 repo = self.user_log.repository.scm_instance()
242
243
243 for commit_id in commit_ids[:self.commits_top_limit]:
244 for commit_id in commit_ids[:self.commits_top_limit]:
244 _op, _name = _get_op(commit_id)
245 _op, _name = _get_op(commit_id)
245
246
246 # we want parsed commits, or new log store format is bad
247 # we want parsed commits, or new log store format is bad
247 if self.parse_commits:
248 if self.parse_commits:
248 try:
249 try:
249 commit = repo.get_commit(commit_id=commit_id)
250 commit = repo.get_commit(commit_id=commit_id)
250 commits.append(commit)
251 commits.append(commit)
251 except CommitDoesNotExistError:
252 except CommitDoesNotExistError:
252 log.error(
253 log.error(
253 'cannot find commit id %s in this repository',
254 'cannot find commit id %s in this repository',
254 commit_id)
255 commit_id)
255 commits.append(commit_id)
256 commits.append(commit_id)
256 continue
257 continue
257 else:
258 else:
258 fake_commit = AttributeDict({
259 fake_commit = AttributeDict({
259 'short_id': commit_id[:12],
260 'short_id': commit_id[:12],
260 'raw_id': commit_id,
261 'raw_id': commit_id,
261 'message': '',
262 'message': '',
262 'op': _op,
263 'op': _op,
263 'ref_name': _name
264 'ref_name': _name
264 })
265 })
265 commits.append(fake_commit)
266 commits.append(fake_commit)
266
267
267 return commits
268 return commits
268
269
269 def lnk(self, commit_or_id, repo_name):
270 def lnk(self, commit_or_id, repo_name):
270 from rhodecode.lib.helpers import tooltip
271 from rhodecode.lib.helpers import tooltip
271
272
272 if isinstance(commit_or_id, (BaseCommit, AttributeDict)):
273 if isinstance(commit_or_id, (BaseCommit, AttributeDict)):
273 lazy_cs = True
274 lazy_cs = True
274 if (getattr(commit_or_id, 'op', None) and
275 if (getattr(commit_or_id, 'op', None) and
275 getattr(commit_or_id, 'ref_name', None)):
276 getattr(commit_or_id, 'ref_name', None)):
276 lazy_cs = False
277 lazy_cs = False
277 lbl = '?'
278 lbl = '?'
278 if commit_or_id.op == 'delete_branch':
279 if commit_or_id.op == 'delete_branch':
279 lbl = '%s' % _('Deleted branch: %s') % commit_or_id.ref_name
280 lbl = '%s' % _('Deleted branch: %s') % commit_or_id.ref_name
280 title = ''
281 title = ''
281 elif commit_or_id.op == 'tag':
282 elif commit_or_id.op == 'tag':
282 lbl = '%s' % _('Created tag: %s') % commit_or_id.ref_name
283 lbl = '%s' % _('Created tag: %s') % commit_or_id.ref_name
283 title = ''
284 title = ''
284 _url = '#'
285 _url = '#'
285
286
286 else:
287 else:
287 lbl = '%s' % (commit_or_id.short_id[:8])
288 lbl = '%s' % (commit_or_id.short_id[:8])
288 _url = url('changeset_home', repo_name=repo_name,
289 _url = url('changeset_home', repo_name=repo_name,
289 revision=commit_or_id.raw_id)
290 revision=commit_or_id.raw_id)
290 title = tooltip(commit_or_id.message)
291 title = tooltip(commit_or_id.message)
291 else:
292 else:
292 # commit cannot be found/striped/removed etc.
293 # commit cannot be found/striped/removed etc.
293 lbl = ('%s' % commit_or_id)[:12]
294 lbl = ('%s' % commit_or_id)[:12]
294 _url = '#'
295 _url = '#'
295 title = _('Commit not found')
296 title = _('Commit not found')
296 if self.parse_commits:
297 if self.parse_commits:
297 return link_to(lbl, _url, title=title, class_='tooltip')
298 return link_to(lbl, _url, title=title, class_='tooltip')
298 return link_to(lbl, _url, raw_id=commit_or_id.raw_id, repo_name=repo_name,
299 return link_to(lbl, _url, raw_id=commit_or_id.raw_id, repo_name=repo_name,
299 class_='lazy-cs' if lazy_cs else '')
300 class_='lazy-cs' if lazy_cs else '')
300
301
301 def is_deleted(self):
302 def is_deleted(self):
302 return self.user_log.repository is None
303 return self.user_log.repository is None
303
304
304
305
305 def _no_params_func():
306 def _no_params_func():
306 return ""
307 return ""
307
308
308
309
309 def _get_op(commit_id):
310 def _get_op(commit_id):
310 _op = None
311 _op = None
311 _name = commit_id
312 _name = commit_id
312 if len(commit_id.split('=>')) == 2:
313 if len(commit_id.split('=>')) == 2:
313 _op, _name = commit_id.split('=>')
314 _op, _name = commit_id.split('=>')
314 return _op, _name
315 return _op, _name
@@ -1,2007 +1,2007 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 authentication and permission libraries
22 authentication and permission libraries
23 """
23 """
24
24
25 import os
25 import os
26 import inspect
26 import inspect
27 import collections
27 import collections
28 import fnmatch
28 import fnmatch
29 import hashlib
29 import hashlib
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import random
32 import random
33 import traceback
33 import traceback
34 from functools import wraps
34 from functools import wraps
35
35
36 import ipaddress
36 import ipaddress
37 from pyramid.httpexceptions import HTTPForbidden, HTTPFound
37 from pyramid.httpexceptions import HTTPForbidden, HTTPFound
38 from pylons import url, request
38 from pylons import url, request
39 from pylons.controllers.util import abort, redirect
39 from pylons.controllers.util import abort, redirect
40 from pylons.i18n.translation import _
40 from pylons.i18n.translation import _
41 from sqlalchemy.orm.exc import ObjectDeletedError
41 from sqlalchemy.orm.exc import ObjectDeletedError
42 from sqlalchemy.orm import joinedload
42 from sqlalchemy.orm import joinedload
43 from zope.cachedescriptors.property import Lazy as LazyProperty
43 from zope.cachedescriptors.property import Lazy as LazyProperty
44
44
45 import rhodecode
45 import rhodecode
46 from rhodecode.model import meta
46 from rhodecode.model import meta
47 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
48 from rhodecode.model.user import UserModel
48 from rhodecode.model.user import UserModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
50 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
51 UserIpMap, UserApiKeys, RepoGroup)
51 UserIpMap, UserApiKeys, RepoGroup)
52 from rhodecode.lib import caches
52 from rhodecode.lib import caches
53 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5
53 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5
54 from rhodecode.lib.utils import (
54 from rhodecode.lib.utils import (
55 get_repo_slug, get_repo_group_slug, get_user_group_slug)
55 get_repo_slug, get_repo_group_slug, get_user_group_slug)
56 from rhodecode.lib.caching_query import FromCache
56 from rhodecode.lib.caching_query import FromCache
57
57
58
58
59 if rhodecode.is_unix:
59 if rhodecode.is_unix:
60 import bcrypt
60 import bcrypt
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 csrf_token_key = "csrf_token"
64 csrf_token_key = "csrf_token"
65
65
66
66
67 class PasswordGenerator(object):
67 class PasswordGenerator(object):
68 """
68 """
69 This is a simple class for generating password from different sets of
69 This is a simple class for generating password from different sets of
70 characters
70 characters
71 usage::
71 usage::
72
72
73 passwd_gen = PasswordGenerator()
73 passwd_gen = PasswordGenerator()
74 #print 8-letter password containing only big and small letters
74 #print 8-letter password containing only big and small letters
75 of alphabet
75 of alphabet
76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
77 """
77 """
78 ALPHABETS_NUM = r'''1234567890'''
78 ALPHABETS_NUM = r'''1234567890'''
79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
88
88
89 def __init__(self, passwd=''):
89 def __init__(self, passwd=''):
90 self.passwd = passwd
90 self.passwd = passwd
91
91
92 def gen_password(self, length, type_=None):
92 def gen_password(self, length, type_=None):
93 if type_ is None:
93 if type_ is None:
94 type_ = self.ALPHABETS_FULL
94 type_ = self.ALPHABETS_FULL
95 self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
95 self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
96 return self.passwd
96 return self.passwd
97
97
98
98
99 class _RhodeCodeCryptoBase(object):
99 class _RhodeCodeCryptoBase(object):
100 ENC_PREF = None
100 ENC_PREF = None
101
101
102 def hash_create(self, str_):
102 def hash_create(self, str_):
103 """
103 """
104 hash the string using
104 hash the string using
105
105
106 :param str_: password to hash
106 :param str_: password to hash
107 """
107 """
108 raise NotImplementedError
108 raise NotImplementedError
109
109
110 def hash_check_with_upgrade(self, password, hashed):
110 def hash_check_with_upgrade(self, password, hashed):
111 """
111 """
112 Returns tuple in which first element is boolean that states that
112 Returns tuple in which first element is boolean that states that
113 given password matches it's hashed version, and the second is new hash
113 given password matches it's hashed version, and the second is new hash
114 of the password, in case this password should be migrated to new
114 of the password, in case this password should be migrated to new
115 cipher.
115 cipher.
116 """
116 """
117 checked_hash = self.hash_check(password, hashed)
117 checked_hash = self.hash_check(password, hashed)
118 return checked_hash, None
118 return checked_hash, None
119
119
120 def hash_check(self, password, hashed):
120 def hash_check(self, password, hashed):
121 """
121 """
122 Checks matching password with it's hashed value.
122 Checks matching password with it's hashed value.
123
123
124 :param password: password
124 :param password: password
125 :param hashed: password in hashed form
125 :param hashed: password in hashed form
126 """
126 """
127 raise NotImplementedError
127 raise NotImplementedError
128
128
129 def _assert_bytes(self, value):
129 def _assert_bytes(self, value):
130 """
130 """
131 Passing in an `unicode` object can lead to hard to detect issues
131 Passing in an `unicode` object can lead to hard to detect issues
132 if passwords contain non-ascii characters. Doing a type check
132 if passwords contain non-ascii characters. Doing a type check
133 during runtime, so that such mistakes are detected early on.
133 during runtime, so that such mistakes are detected early on.
134 """
134 """
135 if not isinstance(value, str):
135 if not isinstance(value, str):
136 raise TypeError(
136 raise TypeError(
137 "Bytestring required as input, got %r." % (value, ))
137 "Bytestring required as input, got %r." % (value, ))
138
138
139
139
140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
141 ENC_PREF = ('$2a$10', '$2b$10')
141 ENC_PREF = ('$2a$10', '$2b$10')
142
142
143 def hash_create(self, str_):
143 def hash_create(self, str_):
144 self._assert_bytes(str_)
144 self._assert_bytes(str_)
145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
146
146
147 def hash_check_with_upgrade(self, password, hashed):
147 def hash_check_with_upgrade(self, password, hashed):
148 """
148 """
149 Returns tuple in which first element is boolean that states that
149 Returns tuple in which first element is boolean that states that
150 given password matches it's hashed version, and the second is new hash
150 given password matches it's hashed version, and the second is new hash
151 of the password, in case this password should be migrated to new
151 of the password, in case this password should be migrated to new
152 cipher.
152 cipher.
153
153
154 This implements special upgrade logic which works like that:
154 This implements special upgrade logic which works like that:
155 - check if the given password == bcrypted hash, if yes then we
155 - check if the given password == bcrypted hash, if yes then we
156 properly used password and it was already in bcrypt. Proceed
156 properly used password and it was already in bcrypt. Proceed
157 without any changes
157 without any changes
158 - if bcrypt hash check is not working try with sha256. If hash compare
158 - if bcrypt hash check is not working try with sha256. If hash compare
159 is ok, it means we using correct but old hashed password. indicate
159 is ok, it means we using correct but old hashed password. indicate
160 hash change and proceed
160 hash change and proceed
161 """
161 """
162
162
163 new_hash = None
163 new_hash = None
164
164
165 # regular pw check
165 # regular pw check
166 password_match_bcrypt = self.hash_check(password, hashed)
166 password_match_bcrypt = self.hash_check(password, hashed)
167
167
168 # now we want to know if the password was maybe from sha256
168 # now we want to know if the password was maybe from sha256
169 # basically calling _RhodeCodeCryptoSha256().hash_check()
169 # basically calling _RhodeCodeCryptoSha256().hash_check()
170 if not password_match_bcrypt:
170 if not password_match_bcrypt:
171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
172 new_hash = self.hash_create(password) # make new bcrypt hash
172 new_hash = self.hash_create(password) # make new bcrypt hash
173 password_match_bcrypt = True
173 password_match_bcrypt = True
174
174
175 return password_match_bcrypt, new_hash
175 return password_match_bcrypt, new_hash
176
176
177 def hash_check(self, password, hashed):
177 def hash_check(self, password, hashed):
178 """
178 """
179 Checks matching password with it's hashed value.
179 Checks matching password with it's hashed value.
180
180
181 :param password: password
181 :param password: password
182 :param hashed: password in hashed form
182 :param hashed: password in hashed form
183 """
183 """
184 self._assert_bytes(password)
184 self._assert_bytes(password)
185 try:
185 try:
186 return bcrypt.hashpw(password, hashed) == hashed
186 return bcrypt.hashpw(password, hashed) == hashed
187 except ValueError as e:
187 except ValueError as e:
188 # we're having a invalid salt here probably, we should not crash
188 # we're having a invalid salt here probably, we should not crash
189 # just return with False as it would be a wrong password.
189 # just return with False as it would be a wrong password.
190 log.debug('Failed to check password hash using bcrypt %s',
190 log.debug('Failed to check password hash using bcrypt %s',
191 safe_str(e))
191 safe_str(e))
192
192
193 return False
193 return False
194
194
195
195
196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
197 ENC_PREF = '_'
197 ENC_PREF = '_'
198
198
199 def hash_create(self, str_):
199 def hash_create(self, str_):
200 self._assert_bytes(str_)
200 self._assert_bytes(str_)
201 return hashlib.sha256(str_).hexdigest()
201 return hashlib.sha256(str_).hexdigest()
202
202
203 def hash_check(self, password, hashed):
203 def hash_check(self, password, hashed):
204 """
204 """
205 Checks matching password with it's hashed value.
205 Checks matching password with it's hashed value.
206
206
207 :param password: password
207 :param password: password
208 :param hashed: password in hashed form
208 :param hashed: password in hashed form
209 """
209 """
210 self._assert_bytes(password)
210 self._assert_bytes(password)
211 return hashlib.sha256(password).hexdigest() == hashed
211 return hashlib.sha256(password).hexdigest() == hashed
212
212
213
213
214 class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase):
214 class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase):
215 ENC_PREF = '_'
215 ENC_PREF = '_'
216
216
217 def hash_create(self, str_):
217 def hash_create(self, str_):
218 self._assert_bytes(str_)
218 self._assert_bytes(str_)
219 return hashlib.md5(str_).hexdigest()
219 return hashlib.md5(str_).hexdigest()
220
220
221 def hash_check(self, password, hashed):
221 def hash_check(self, password, hashed):
222 """
222 """
223 Checks matching password with it's hashed value.
223 Checks matching password with it's hashed value.
224
224
225 :param password: password
225 :param password: password
226 :param hashed: password in hashed form
226 :param hashed: password in hashed form
227 """
227 """
228 self._assert_bytes(password)
228 self._assert_bytes(password)
229 return hashlib.md5(password).hexdigest() == hashed
229 return hashlib.md5(password).hexdigest() == hashed
230
230
231
231
232 def crypto_backend():
232 def crypto_backend():
233 """
233 """
234 Return the matching crypto backend.
234 Return the matching crypto backend.
235
235
236 Selection is based on if we run tests or not, we pick md5 backend to run
236 Selection is based on if we run tests or not, we pick md5 backend to run
237 tests faster since BCRYPT is expensive to calculate
237 tests faster since BCRYPT is expensive to calculate
238 """
238 """
239 if rhodecode.is_test:
239 if rhodecode.is_test:
240 RhodeCodeCrypto = _RhodeCodeCryptoMd5()
240 RhodeCodeCrypto = _RhodeCodeCryptoMd5()
241 else:
241 else:
242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
243
243
244 return RhodeCodeCrypto
244 return RhodeCodeCrypto
245
245
246
246
247 def get_crypt_password(password):
247 def get_crypt_password(password):
248 """
248 """
249 Create the hash of `password` with the active crypto backend.
249 Create the hash of `password` with the active crypto backend.
250
250
251 :param password: The cleartext password.
251 :param password: The cleartext password.
252 :type password: unicode
252 :type password: unicode
253 """
253 """
254 password = safe_str(password)
254 password = safe_str(password)
255 return crypto_backend().hash_create(password)
255 return crypto_backend().hash_create(password)
256
256
257
257
258 def check_password(password, hashed):
258 def check_password(password, hashed):
259 """
259 """
260 Check if the value in `password` matches the hash in `hashed`.
260 Check if the value in `password` matches the hash in `hashed`.
261
261
262 :param password: The cleartext password.
262 :param password: The cleartext password.
263 :type password: unicode
263 :type password: unicode
264
264
265 :param hashed: The expected hashed version of the password.
265 :param hashed: The expected hashed version of the password.
266 :type hashed: The hash has to be passed in in text representation.
266 :type hashed: The hash has to be passed in in text representation.
267 """
267 """
268 password = safe_str(password)
268 password = safe_str(password)
269 return crypto_backend().hash_check(password, hashed)
269 return crypto_backend().hash_check(password, hashed)
270
270
271
271
272 def generate_auth_token(data, salt=None):
272 def generate_auth_token(data, salt=None):
273 """
273 """
274 Generates API KEY from given string
274 Generates API KEY from given string
275 """
275 """
276
276
277 if salt is None:
277 if salt is None:
278 salt = os.urandom(16)
278 salt = os.urandom(16)
279 return hashlib.sha1(safe_str(data) + salt).hexdigest()
279 return hashlib.sha1(safe_str(data) + salt).hexdigest()
280
280
281
281
282 class CookieStoreWrapper(object):
282 class CookieStoreWrapper(object):
283
283
284 def __init__(self, cookie_store):
284 def __init__(self, cookie_store):
285 self.cookie_store = cookie_store
285 self.cookie_store = cookie_store
286
286
287 def __repr__(self):
287 def __repr__(self):
288 return 'CookieStore<%s>' % (self.cookie_store)
288 return 'CookieStore<%s>' % (self.cookie_store)
289
289
290 def get(self, key, other=None):
290 def get(self, key, other=None):
291 if isinstance(self.cookie_store, dict):
291 if isinstance(self.cookie_store, dict):
292 return self.cookie_store.get(key, other)
292 return self.cookie_store.get(key, other)
293 elif isinstance(self.cookie_store, AuthUser):
293 elif isinstance(self.cookie_store, AuthUser):
294 return self.cookie_store.__dict__.get(key, other)
294 return self.cookie_store.__dict__.get(key, other)
295
295
296
296
297 def _cached_perms_data(user_id, scope, user_is_admin,
297 def _cached_perms_data(user_id, scope, user_is_admin,
298 user_inherit_default_permissions, explicit, algo):
298 user_inherit_default_permissions, explicit, algo):
299
299
300 permissions = PermissionCalculator(
300 permissions = PermissionCalculator(
301 user_id, scope, user_is_admin, user_inherit_default_permissions,
301 user_id, scope, user_is_admin, user_inherit_default_permissions,
302 explicit, algo)
302 explicit, algo)
303 return permissions.calculate()
303 return permissions.calculate()
304
304
305 class PermOrigin:
305 class PermOrigin:
306 ADMIN = 'superadmin'
306 ADMIN = 'superadmin'
307
307
308 REPO_USER = 'user:%s'
308 REPO_USER = 'user:%s'
309 REPO_USERGROUP = 'usergroup:%s'
309 REPO_USERGROUP = 'usergroup:%s'
310 REPO_OWNER = 'repo.owner'
310 REPO_OWNER = 'repo.owner'
311 REPO_DEFAULT = 'repo.default'
311 REPO_DEFAULT = 'repo.default'
312 REPO_PRIVATE = 'repo.private'
312 REPO_PRIVATE = 'repo.private'
313
313
314 REPOGROUP_USER = 'user:%s'
314 REPOGROUP_USER = 'user:%s'
315 REPOGROUP_USERGROUP = 'usergroup:%s'
315 REPOGROUP_USERGROUP = 'usergroup:%s'
316 REPOGROUP_OWNER = 'group.owner'
316 REPOGROUP_OWNER = 'group.owner'
317 REPOGROUP_DEFAULT = 'group.default'
317 REPOGROUP_DEFAULT = 'group.default'
318
318
319 USERGROUP_USER = 'user:%s'
319 USERGROUP_USER = 'user:%s'
320 USERGROUP_USERGROUP = 'usergroup:%s'
320 USERGROUP_USERGROUP = 'usergroup:%s'
321 USERGROUP_OWNER = 'usergroup.owner'
321 USERGROUP_OWNER = 'usergroup.owner'
322 USERGROUP_DEFAULT = 'usergroup.default'
322 USERGROUP_DEFAULT = 'usergroup.default'
323
323
324
324
325 class PermOriginDict(dict):
325 class PermOriginDict(dict):
326 """
326 """
327 A special dict used for tracking permissions along with their origins.
327 A special dict used for tracking permissions along with their origins.
328
328
329 `__setitem__` has been overridden to expect a tuple(perm, origin)
329 `__setitem__` has been overridden to expect a tuple(perm, origin)
330 `__getitem__` will return only the perm
330 `__getitem__` will return only the perm
331 `.perm_origin_stack` will return the stack of (perm, origin) set per key
331 `.perm_origin_stack` will return the stack of (perm, origin) set per key
332
332
333 >>> perms = PermOriginDict()
333 >>> perms = PermOriginDict()
334 >>> perms['resource'] = 'read', 'default'
334 >>> perms['resource'] = 'read', 'default'
335 >>> perms['resource']
335 >>> perms['resource']
336 'read'
336 'read'
337 >>> perms['resource'] = 'write', 'admin'
337 >>> perms['resource'] = 'write', 'admin'
338 >>> perms['resource']
338 >>> perms['resource']
339 'write'
339 'write'
340 >>> perms.perm_origin_stack
340 >>> perms.perm_origin_stack
341 {'resource': [('read', 'default'), ('write', 'admin')]}
341 {'resource': [('read', 'default'), ('write', 'admin')]}
342 """
342 """
343
343
344
344
345 def __init__(self, *args, **kw):
345 def __init__(self, *args, **kw):
346 dict.__init__(self, *args, **kw)
346 dict.__init__(self, *args, **kw)
347 self.perm_origin_stack = {}
347 self.perm_origin_stack = {}
348
348
349 def __setitem__(self, key, (perm, origin)):
349 def __setitem__(self, key, (perm, origin)):
350 self.perm_origin_stack.setdefault(key, []).append((perm, origin))
350 self.perm_origin_stack.setdefault(key, []).append((perm, origin))
351 dict.__setitem__(self, key, perm)
351 dict.__setitem__(self, key, perm)
352
352
353
353
354 class PermissionCalculator(object):
354 class PermissionCalculator(object):
355
355
356 def __init__(
356 def __init__(
357 self, user_id, scope, user_is_admin,
357 self, user_id, scope, user_is_admin,
358 user_inherit_default_permissions, explicit, algo):
358 user_inherit_default_permissions, explicit, algo):
359 self.user_id = user_id
359 self.user_id = user_id
360 self.user_is_admin = user_is_admin
360 self.user_is_admin = user_is_admin
361 self.inherit_default_permissions = user_inherit_default_permissions
361 self.inherit_default_permissions = user_inherit_default_permissions
362 self.explicit = explicit
362 self.explicit = explicit
363 self.algo = algo
363 self.algo = algo
364
364
365 scope = scope or {}
365 scope = scope or {}
366 self.scope_repo_id = scope.get('repo_id')
366 self.scope_repo_id = scope.get('repo_id')
367 self.scope_repo_group_id = scope.get('repo_group_id')
367 self.scope_repo_group_id = scope.get('repo_group_id')
368 self.scope_user_group_id = scope.get('user_group_id')
368 self.scope_user_group_id = scope.get('user_group_id')
369
369
370 self.default_user_id = User.get_default_user(cache=True).user_id
370 self.default_user_id = User.get_default_user(cache=True).user_id
371
371
372 self.permissions_repositories = PermOriginDict()
372 self.permissions_repositories = PermOriginDict()
373 self.permissions_repository_groups = PermOriginDict()
373 self.permissions_repository_groups = PermOriginDict()
374 self.permissions_user_groups = PermOriginDict()
374 self.permissions_user_groups = PermOriginDict()
375 self.permissions_global = set()
375 self.permissions_global = set()
376
376
377 self.default_repo_perms = Permission.get_default_repo_perms(
377 self.default_repo_perms = Permission.get_default_repo_perms(
378 self.default_user_id, self.scope_repo_id)
378 self.default_user_id, self.scope_repo_id)
379 self.default_repo_groups_perms = Permission.get_default_group_perms(
379 self.default_repo_groups_perms = Permission.get_default_group_perms(
380 self.default_user_id, self.scope_repo_group_id)
380 self.default_user_id, self.scope_repo_group_id)
381 self.default_user_group_perms = \
381 self.default_user_group_perms = \
382 Permission.get_default_user_group_perms(
382 Permission.get_default_user_group_perms(
383 self.default_user_id, self.scope_user_group_id)
383 self.default_user_id, self.scope_user_group_id)
384
384
385 def calculate(self):
385 def calculate(self):
386 if self.user_is_admin:
386 if self.user_is_admin:
387 return self._admin_permissions()
387 return self._admin_permissions()
388
388
389 self._calculate_global_default_permissions()
389 self._calculate_global_default_permissions()
390 self._calculate_global_permissions()
390 self._calculate_global_permissions()
391 self._calculate_default_permissions()
391 self._calculate_default_permissions()
392 self._calculate_repository_permissions()
392 self._calculate_repository_permissions()
393 self._calculate_repository_group_permissions()
393 self._calculate_repository_group_permissions()
394 self._calculate_user_group_permissions()
394 self._calculate_user_group_permissions()
395 return self._permission_structure()
395 return self._permission_structure()
396
396
397 def _admin_permissions(self):
397 def _admin_permissions(self):
398 """
398 """
399 admin user have all default rights for repositories
399 admin user have all default rights for repositories
400 and groups set to admin
400 and groups set to admin
401 """
401 """
402 self.permissions_global.add('hg.admin')
402 self.permissions_global.add('hg.admin')
403 self.permissions_global.add('hg.create.write_on_repogroup.true')
403 self.permissions_global.add('hg.create.write_on_repogroup.true')
404
404
405 # repositories
405 # repositories
406 for perm in self.default_repo_perms:
406 for perm in self.default_repo_perms:
407 r_k = perm.UserRepoToPerm.repository.repo_name
407 r_k = perm.UserRepoToPerm.repository.repo_name
408 p = 'repository.admin'
408 p = 'repository.admin'
409 self.permissions_repositories[r_k] = p, PermOrigin.ADMIN
409 self.permissions_repositories[r_k] = p, PermOrigin.ADMIN
410
410
411 # repository groups
411 # repository groups
412 for perm in self.default_repo_groups_perms:
412 for perm in self.default_repo_groups_perms:
413 rg_k = perm.UserRepoGroupToPerm.group.group_name
413 rg_k = perm.UserRepoGroupToPerm.group.group_name
414 p = 'group.admin'
414 p = 'group.admin'
415 self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN
415 self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN
416
416
417 # user groups
417 # user groups
418 for perm in self.default_user_group_perms:
418 for perm in self.default_user_group_perms:
419 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
419 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
420 p = 'usergroup.admin'
420 p = 'usergroup.admin'
421 self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN
421 self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN
422
422
423 return self._permission_structure()
423 return self._permission_structure()
424
424
425 def _calculate_global_default_permissions(self):
425 def _calculate_global_default_permissions(self):
426 """
426 """
427 global permissions taken from the default user
427 global permissions taken from the default user
428 """
428 """
429 default_global_perms = UserToPerm.query()\
429 default_global_perms = UserToPerm.query()\
430 .filter(UserToPerm.user_id == self.default_user_id)\
430 .filter(UserToPerm.user_id == self.default_user_id)\
431 .options(joinedload(UserToPerm.permission))
431 .options(joinedload(UserToPerm.permission))
432
432
433 for perm in default_global_perms:
433 for perm in default_global_perms:
434 self.permissions_global.add(perm.permission.permission_name)
434 self.permissions_global.add(perm.permission.permission_name)
435
435
436 def _calculate_global_permissions(self):
436 def _calculate_global_permissions(self):
437 """
437 """
438 Set global system permissions with user permissions or permissions
438 Set global system permissions with user permissions or permissions
439 taken from the user groups of the current user.
439 taken from the user groups of the current user.
440
440
441 The permissions include repo creating, repo group creating, forking
441 The permissions include repo creating, repo group creating, forking
442 etc.
442 etc.
443 """
443 """
444
444
445 # now we read the defined permissions and overwrite what we have set
445 # now we read the defined permissions and overwrite what we have set
446 # before those can be configured from groups or users explicitly.
446 # before those can be configured from groups or users explicitly.
447
447
448 # TODO: johbo: This seems to be out of sync, find out the reason
448 # TODO: johbo: This seems to be out of sync, find out the reason
449 # for the comment below and update it.
449 # for the comment below and update it.
450
450
451 # In case we want to extend this list we should be always in sync with
451 # In case we want to extend this list we should be always in sync with
452 # User.DEFAULT_USER_PERMISSIONS definitions
452 # User.DEFAULT_USER_PERMISSIONS definitions
453 _configurable = frozenset([
453 _configurable = frozenset([
454 'hg.fork.none', 'hg.fork.repository',
454 'hg.fork.none', 'hg.fork.repository',
455 'hg.create.none', 'hg.create.repository',
455 'hg.create.none', 'hg.create.repository',
456 'hg.usergroup.create.false', 'hg.usergroup.create.true',
456 'hg.usergroup.create.false', 'hg.usergroup.create.true',
457 'hg.repogroup.create.false', 'hg.repogroup.create.true',
457 'hg.repogroup.create.false', 'hg.repogroup.create.true',
458 'hg.create.write_on_repogroup.false',
458 'hg.create.write_on_repogroup.false',
459 'hg.create.write_on_repogroup.true',
459 'hg.create.write_on_repogroup.true',
460 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
460 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
461 ])
461 ])
462
462
463 # USER GROUPS comes first user group global permissions
463 # USER GROUPS comes first user group global permissions
464 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
464 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
465 .options(joinedload(UserGroupToPerm.permission))\
465 .options(joinedload(UserGroupToPerm.permission))\
466 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
466 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
467 UserGroupMember.users_group_id))\
467 UserGroupMember.users_group_id))\
468 .filter(UserGroupMember.user_id == self.user_id)\
468 .filter(UserGroupMember.user_id == self.user_id)\
469 .order_by(UserGroupToPerm.users_group_id)\
469 .order_by(UserGroupToPerm.users_group_id)\
470 .all()
470 .all()
471
471
472 # need to group here by groups since user can be in more than
472 # need to group here by groups since user can be in more than
473 # one group, so we get all groups
473 # one group, so we get all groups
474 _explicit_grouped_perms = [
474 _explicit_grouped_perms = [
475 [x, list(y)] for x, y in
475 [x, list(y)] for x, y in
476 itertools.groupby(user_perms_from_users_groups,
476 itertools.groupby(user_perms_from_users_groups,
477 lambda _x: _x.users_group)]
477 lambda _x: _x.users_group)]
478
478
479 for gr, perms in _explicit_grouped_perms:
479 for gr, perms in _explicit_grouped_perms:
480 # since user can be in multiple groups iterate over them and
480 # since user can be in multiple groups iterate over them and
481 # select the lowest permissions first (more explicit)
481 # select the lowest permissions first (more explicit)
482 # TODO: marcink: do this^^
482 # TODO: marcink: do this^^
483
483
484 # group doesn't inherit default permissions so we actually set them
484 # group doesn't inherit default permissions so we actually set them
485 if not gr.inherit_default_permissions:
485 if not gr.inherit_default_permissions:
486 # NEED TO IGNORE all previously set configurable permissions
486 # NEED TO IGNORE all previously set configurable permissions
487 # and replace them with explicitly set from this user
487 # and replace them with explicitly set from this user
488 # group permissions
488 # group permissions
489 self.permissions_global = self.permissions_global.difference(
489 self.permissions_global = self.permissions_global.difference(
490 _configurable)
490 _configurable)
491 for perm in perms:
491 for perm in perms:
492 self.permissions_global.add(perm.permission.permission_name)
492 self.permissions_global.add(perm.permission.permission_name)
493
493
494 # user explicit global permissions
494 # user explicit global permissions
495 user_perms = Session().query(UserToPerm)\
495 user_perms = Session().query(UserToPerm)\
496 .options(joinedload(UserToPerm.permission))\
496 .options(joinedload(UserToPerm.permission))\
497 .filter(UserToPerm.user_id == self.user_id).all()
497 .filter(UserToPerm.user_id == self.user_id).all()
498
498
499 if not self.inherit_default_permissions:
499 if not self.inherit_default_permissions:
500 # NEED TO IGNORE all configurable permissions and
500 # NEED TO IGNORE all configurable permissions and
501 # replace them with explicitly set from this user permissions
501 # replace them with explicitly set from this user permissions
502 self.permissions_global = self.permissions_global.difference(
502 self.permissions_global = self.permissions_global.difference(
503 _configurable)
503 _configurable)
504 for perm in user_perms:
504 for perm in user_perms:
505 self.permissions_global.add(perm.permission.permission_name)
505 self.permissions_global.add(perm.permission.permission_name)
506
506
507 def _calculate_default_permissions(self):
507 def _calculate_default_permissions(self):
508 """
508 """
509 Set default user permissions for repositories, repository groups
509 Set default user permissions for repositories, repository groups
510 taken from the default user.
510 taken from the default user.
511
511
512 Calculate inheritance of object permissions based on what we have now
512 Calculate inheritance of object permissions based on what we have now
513 in GLOBAL permissions. We check if .false is in GLOBAL since this is
513 in GLOBAL permissions. We check if .false is in GLOBAL since this is
514 explicitly set. Inherit is the opposite of .false being there.
514 explicitly set. Inherit is the opposite of .false being there.
515
515
516 .. note::
516 .. note::
517
517
518 the syntax is little bit odd but what we need to check here is
518 the syntax is little bit odd but what we need to check here is
519 the opposite of .false permission being in the list so even for
519 the opposite of .false permission being in the list so even for
520 inconsistent state when both .true/.false is there
520 inconsistent state when both .true/.false is there
521 .false is more important
521 .false is more important
522
522
523 """
523 """
524 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
524 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
525 in self.permissions_global)
525 in self.permissions_global)
526
526
527 # defaults for repositories, taken from `default` user permissions
527 # defaults for repositories, taken from `default` user permissions
528 # on given repo
528 # on given repo
529 for perm in self.default_repo_perms:
529 for perm in self.default_repo_perms:
530 r_k = perm.UserRepoToPerm.repository.repo_name
530 r_k = perm.UserRepoToPerm.repository.repo_name
531 o = PermOrigin.REPO_DEFAULT
531 o = PermOrigin.REPO_DEFAULT
532 if perm.Repository.private and not (
532 if perm.Repository.private and not (
533 perm.Repository.user_id == self.user_id):
533 perm.Repository.user_id == self.user_id):
534 # disable defaults for private repos,
534 # disable defaults for private repos,
535 p = 'repository.none'
535 p = 'repository.none'
536 o = PermOrigin.REPO_PRIVATE
536 o = PermOrigin.REPO_PRIVATE
537 elif perm.Repository.user_id == self.user_id:
537 elif perm.Repository.user_id == self.user_id:
538 # set admin if owner
538 # set admin if owner
539 p = 'repository.admin'
539 p = 'repository.admin'
540 o = PermOrigin.REPO_OWNER
540 o = PermOrigin.REPO_OWNER
541 else:
541 else:
542 p = perm.Permission.permission_name
542 p = perm.Permission.permission_name
543 # if we decide this user isn't inheriting permissions from
543 # if we decide this user isn't inheriting permissions from
544 # default user we set him to .none so only explicit
544 # default user we set him to .none so only explicit
545 # permissions work
545 # permissions work
546 if not user_inherit_object_permissions:
546 if not user_inherit_object_permissions:
547 p = 'repository.none'
547 p = 'repository.none'
548 self.permissions_repositories[r_k] = p, o
548 self.permissions_repositories[r_k] = p, o
549
549
550 # defaults for repository groups taken from `default` user permission
550 # defaults for repository groups taken from `default` user permission
551 # on given group
551 # on given group
552 for perm in self.default_repo_groups_perms:
552 for perm in self.default_repo_groups_perms:
553 rg_k = perm.UserRepoGroupToPerm.group.group_name
553 rg_k = perm.UserRepoGroupToPerm.group.group_name
554 o = PermOrigin.REPOGROUP_DEFAULT
554 o = PermOrigin.REPOGROUP_DEFAULT
555 if perm.RepoGroup.user_id == self.user_id:
555 if perm.RepoGroup.user_id == self.user_id:
556 # set admin if owner
556 # set admin if owner
557 p = 'group.admin'
557 p = 'group.admin'
558 o = PermOrigin.REPOGROUP_OWNER
558 o = PermOrigin.REPOGROUP_OWNER
559 else:
559 else:
560 p = perm.Permission.permission_name
560 p = perm.Permission.permission_name
561
561
562 # if we decide this user isn't inheriting permissions from default
562 # if we decide this user isn't inheriting permissions from default
563 # user we set him to .none so only explicit permissions work
563 # user we set him to .none so only explicit permissions work
564 if not user_inherit_object_permissions:
564 if not user_inherit_object_permissions:
565 p = 'group.none'
565 p = 'group.none'
566 self.permissions_repository_groups[rg_k] = p, o
566 self.permissions_repository_groups[rg_k] = p, o
567
567
568 # defaults for user groups taken from `default` user permission
568 # defaults for user groups taken from `default` user permission
569 # on given user group
569 # on given user group
570 for perm in self.default_user_group_perms:
570 for perm in self.default_user_group_perms:
571 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
571 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
572 o = PermOrigin.USERGROUP_DEFAULT
572 o = PermOrigin.USERGROUP_DEFAULT
573 if perm.UserGroup.user_id == self.user_id:
573 if perm.UserGroup.user_id == self.user_id:
574 # set admin if owner
574 # set admin if owner
575 p = 'usergroup.admin'
575 p = 'usergroup.admin'
576 o = PermOrigin.USERGROUP_OWNER
576 o = PermOrigin.USERGROUP_OWNER
577 else:
577 else:
578 p = perm.Permission.permission_name
578 p = perm.Permission.permission_name
579
579
580 # if we decide this user isn't inheriting permissions from default
580 # if we decide this user isn't inheriting permissions from default
581 # user we set him to .none so only explicit permissions work
581 # user we set him to .none so only explicit permissions work
582 if not user_inherit_object_permissions:
582 if not user_inherit_object_permissions:
583 p = 'usergroup.none'
583 p = 'usergroup.none'
584 self.permissions_user_groups[u_k] = p, o
584 self.permissions_user_groups[u_k] = p, o
585
585
586 def _calculate_repository_permissions(self):
586 def _calculate_repository_permissions(self):
587 """
587 """
588 Repository permissions for the current user.
588 Repository permissions for the current user.
589
589
590 Check if the user is part of user groups for this repository and
590 Check if the user is part of user groups for this repository and
591 fill in the permission from it. `_choose_permission` decides of which
591 fill in the permission from it. `_choose_permission` decides of which
592 permission should be selected based on selected method.
592 permission should be selected based on selected method.
593 """
593 """
594
594
595 # user group for repositories permissions
595 # user group for repositories permissions
596 user_repo_perms_from_user_group = Permission\
596 user_repo_perms_from_user_group = Permission\
597 .get_default_repo_perms_from_user_group(
597 .get_default_repo_perms_from_user_group(
598 self.user_id, self.scope_repo_id)
598 self.user_id, self.scope_repo_id)
599
599
600 multiple_counter = collections.defaultdict(int)
600 multiple_counter = collections.defaultdict(int)
601 for perm in user_repo_perms_from_user_group:
601 for perm in user_repo_perms_from_user_group:
602 r_k = perm.UserGroupRepoToPerm.repository.repo_name
602 r_k = perm.UserGroupRepoToPerm.repository.repo_name
603 ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name
603 ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name
604 multiple_counter[r_k] += 1
604 multiple_counter[r_k] += 1
605 p = perm.Permission.permission_name
605 p = perm.Permission.permission_name
606 o = PermOrigin.REPO_USERGROUP % ug_k
606 o = PermOrigin.REPO_USERGROUP % ug_k
607
607
608 if perm.Repository.user_id == self.user_id:
608 if perm.Repository.user_id == self.user_id:
609 # set admin if owner
609 # set admin if owner
610 p = 'repository.admin'
610 p = 'repository.admin'
611 o = PermOrigin.REPO_OWNER
611 o = PermOrigin.REPO_OWNER
612 else:
612 else:
613 if multiple_counter[r_k] > 1:
613 if multiple_counter[r_k] > 1:
614 cur_perm = self.permissions_repositories[r_k]
614 cur_perm = self.permissions_repositories[r_k]
615 p = self._choose_permission(p, cur_perm)
615 p = self._choose_permission(p, cur_perm)
616 self.permissions_repositories[r_k] = p, o
616 self.permissions_repositories[r_k] = p, o
617
617
618 # user explicit permissions for repositories, overrides any specified
618 # user explicit permissions for repositories, overrides any specified
619 # by the group permission
619 # by the group permission
620 user_repo_perms = Permission.get_default_repo_perms(
620 user_repo_perms = Permission.get_default_repo_perms(
621 self.user_id, self.scope_repo_id)
621 self.user_id, self.scope_repo_id)
622 for perm in user_repo_perms:
622 for perm in user_repo_perms:
623 r_k = perm.UserRepoToPerm.repository.repo_name
623 r_k = perm.UserRepoToPerm.repository.repo_name
624 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
624 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
625 # set admin if owner
625 # set admin if owner
626 if perm.Repository.user_id == self.user_id:
626 if perm.Repository.user_id == self.user_id:
627 p = 'repository.admin'
627 p = 'repository.admin'
628 o = PermOrigin.REPO_OWNER
628 o = PermOrigin.REPO_OWNER
629 else:
629 else:
630 p = perm.Permission.permission_name
630 p = perm.Permission.permission_name
631 if not self.explicit:
631 if not self.explicit:
632 cur_perm = self.permissions_repositories.get(
632 cur_perm = self.permissions_repositories.get(
633 r_k, 'repository.none')
633 r_k, 'repository.none')
634 p = self._choose_permission(p, cur_perm)
634 p = self._choose_permission(p, cur_perm)
635 self.permissions_repositories[r_k] = p, o
635 self.permissions_repositories[r_k] = p, o
636
636
637 def _calculate_repository_group_permissions(self):
637 def _calculate_repository_group_permissions(self):
638 """
638 """
639 Repository group permissions for the current user.
639 Repository group permissions for the current user.
640
640
641 Check if the user is part of user groups for repository groups and
641 Check if the user is part of user groups for repository groups and
642 fill in the permissions from it. `_choose_permmission` decides of which
642 fill in the permissions from it. `_choose_permmission` decides of which
643 permission should be selected based on selected method.
643 permission should be selected based on selected method.
644 """
644 """
645 # user group for repo groups permissions
645 # user group for repo groups permissions
646 user_repo_group_perms_from_user_group = Permission\
646 user_repo_group_perms_from_user_group = Permission\
647 .get_default_group_perms_from_user_group(
647 .get_default_group_perms_from_user_group(
648 self.user_id, self.scope_repo_group_id)
648 self.user_id, self.scope_repo_group_id)
649
649
650 multiple_counter = collections.defaultdict(int)
650 multiple_counter = collections.defaultdict(int)
651 for perm in user_repo_group_perms_from_user_group:
651 for perm in user_repo_group_perms_from_user_group:
652 g_k = perm.UserGroupRepoGroupToPerm.group.group_name
652 g_k = perm.UserGroupRepoGroupToPerm.group.group_name
653 ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name
653 ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name
654 o = PermOrigin.REPOGROUP_USERGROUP % ug_k
654 o = PermOrigin.REPOGROUP_USERGROUP % ug_k
655 multiple_counter[g_k] += 1
655 multiple_counter[g_k] += 1
656 p = perm.Permission.permission_name
656 p = perm.Permission.permission_name
657 if perm.RepoGroup.user_id == self.user_id:
657 if perm.RepoGroup.user_id == self.user_id:
658 # set admin if owner, even for member of other user group
658 # set admin if owner, even for member of other user group
659 p = 'group.admin'
659 p = 'group.admin'
660 o = PermOrigin.REPOGROUP_OWNER
660 o = PermOrigin.REPOGROUP_OWNER
661 else:
661 else:
662 if multiple_counter[g_k] > 1:
662 if multiple_counter[g_k] > 1:
663 cur_perm = self.permissions_repository_groups[g_k]
663 cur_perm = self.permissions_repository_groups[g_k]
664 p = self._choose_permission(p, cur_perm)
664 p = self._choose_permission(p, cur_perm)
665 self.permissions_repository_groups[g_k] = p, o
665 self.permissions_repository_groups[g_k] = p, o
666
666
667 # user explicit permissions for repository groups
667 # user explicit permissions for repository groups
668 user_repo_groups_perms = Permission.get_default_group_perms(
668 user_repo_groups_perms = Permission.get_default_group_perms(
669 self.user_id, self.scope_repo_group_id)
669 self.user_id, self.scope_repo_group_id)
670 for perm in user_repo_groups_perms:
670 for perm in user_repo_groups_perms:
671 rg_k = perm.UserRepoGroupToPerm.group.group_name
671 rg_k = perm.UserRepoGroupToPerm.group.group_name
672 u_k = perm.UserRepoGroupToPerm.user.username
672 u_k = perm.UserRepoGroupToPerm.user.username
673 o = PermOrigin.REPOGROUP_USER % u_k
673 o = PermOrigin.REPOGROUP_USER % u_k
674
674
675 if perm.RepoGroup.user_id == self.user_id:
675 if perm.RepoGroup.user_id == self.user_id:
676 # set admin if owner
676 # set admin if owner
677 p = 'group.admin'
677 p = 'group.admin'
678 o = PermOrigin.REPOGROUP_OWNER
678 o = PermOrigin.REPOGROUP_OWNER
679 else:
679 else:
680 p = perm.Permission.permission_name
680 p = perm.Permission.permission_name
681 if not self.explicit:
681 if not self.explicit:
682 cur_perm = self.permissions_repository_groups.get(
682 cur_perm = self.permissions_repository_groups.get(
683 rg_k, 'group.none')
683 rg_k, 'group.none')
684 p = self._choose_permission(p, cur_perm)
684 p = self._choose_permission(p, cur_perm)
685 self.permissions_repository_groups[rg_k] = p, o
685 self.permissions_repository_groups[rg_k] = p, o
686
686
687 def _calculate_user_group_permissions(self):
687 def _calculate_user_group_permissions(self):
688 """
688 """
689 User group permissions for the current user.
689 User group permissions for the current user.
690 """
690 """
691 # user group for user group permissions
691 # user group for user group permissions
692 user_group_from_user_group = Permission\
692 user_group_from_user_group = Permission\
693 .get_default_user_group_perms_from_user_group(
693 .get_default_user_group_perms_from_user_group(
694 self.user_id, self.scope_user_group_id)
694 self.user_id, self.scope_user_group_id)
695
695
696 multiple_counter = collections.defaultdict(int)
696 multiple_counter = collections.defaultdict(int)
697 for perm in user_group_from_user_group:
697 for perm in user_group_from_user_group:
698 g_k = perm.UserGroupUserGroupToPerm\
698 g_k = perm.UserGroupUserGroupToPerm\
699 .target_user_group.users_group_name
699 .target_user_group.users_group_name
700 u_k = perm.UserGroupUserGroupToPerm\
700 u_k = perm.UserGroupUserGroupToPerm\
701 .user_group.users_group_name
701 .user_group.users_group_name
702 o = PermOrigin.USERGROUP_USERGROUP % u_k
702 o = PermOrigin.USERGROUP_USERGROUP % u_k
703 multiple_counter[g_k] += 1
703 multiple_counter[g_k] += 1
704 p = perm.Permission.permission_name
704 p = perm.Permission.permission_name
705
705
706 if perm.UserGroup.user_id == self.user_id:
706 if perm.UserGroup.user_id == self.user_id:
707 # set admin if owner, even for member of other user group
707 # set admin if owner, even for member of other user group
708 p = 'usergroup.admin'
708 p = 'usergroup.admin'
709 o = PermOrigin.USERGROUP_OWNER
709 o = PermOrigin.USERGROUP_OWNER
710 else:
710 else:
711 if multiple_counter[g_k] > 1:
711 if multiple_counter[g_k] > 1:
712 cur_perm = self.permissions_user_groups[g_k]
712 cur_perm = self.permissions_user_groups[g_k]
713 p = self._choose_permission(p, cur_perm)
713 p = self._choose_permission(p, cur_perm)
714 self.permissions_user_groups[g_k] = p, o
714 self.permissions_user_groups[g_k] = p, o
715
715
716 # user explicit permission for user groups
716 # user explicit permission for user groups
717 user_user_groups_perms = Permission.get_default_user_group_perms(
717 user_user_groups_perms = Permission.get_default_user_group_perms(
718 self.user_id, self.scope_user_group_id)
718 self.user_id, self.scope_user_group_id)
719 for perm in user_user_groups_perms:
719 for perm in user_user_groups_perms:
720 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
720 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
721 u_k = perm.UserUserGroupToPerm.user.username
721 u_k = perm.UserUserGroupToPerm.user.username
722 o = PermOrigin.USERGROUP_USER % u_k
722 o = PermOrigin.USERGROUP_USER % u_k
723
723
724 if perm.UserGroup.user_id == self.user_id:
724 if perm.UserGroup.user_id == self.user_id:
725 # set admin if owner
725 # set admin if owner
726 p = 'usergroup.admin'
726 p = 'usergroup.admin'
727 o = PermOrigin.USERGROUP_OWNER
727 o = PermOrigin.USERGROUP_OWNER
728 else:
728 else:
729 p = perm.Permission.permission_name
729 p = perm.Permission.permission_name
730 if not self.explicit:
730 if not self.explicit:
731 cur_perm = self.permissions_user_groups.get(
731 cur_perm = self.permissions_user_groups.get(
732 ug_k, 'usergroup.none')
732 ug_k, 'usergroup.none')
733 p = self._choose_permission(p, cur_perm)
733 p = self._choose_permission(p, cur_perm)
734 self.permissions_user_groups[ug_k] = p, o
734 self.permissions_user_groups[ug_k] = p, o
735
735
736 def _choose_permission(self, new_perm, cur_perm):
736 def _choose_permission(self, new_perm, cur_perm):
737 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
737 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
738 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
738 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
739 if self.algo == 'higherwin':
739 if self.algo == 'higherwin':
740 if new_perm_val > cur_perm_val:
740 if new_perm_val > cur_perm_val:
741 return new_perm
741 return new_perm
742 return cur_perm
742 return cur_perm
743 elif self.algo == 'lowerwin':
743 elif self.algo == 'lowerwin':
744 if new_perm_val < cur_perm_val:
744 if new_perm_val < cur_perm_val:
745 return new_perm
745 return new_perm
746 return cur_perm
746 return cur_perm
747
747
748 def _permission_structure(self):
748 def _permission_structure(self):
749 return {
749 return {
750 'global': self.permissions_global,
750 'global': self.permissions_global,
751 'repositories': self.permissions_repositories,
751 'repositories': self.permissions_repositories,
752 'repositories_groups': self.permissions_repository_groups,
752 'repositories_groups': self.permissions_repository_groups,
753 'user_groups': self.permissions_user_groups,
753 'user_groups': self.permissions_user_groups,
754 }
754 }
755
755
756
756
757 def allowed_auth_token_access(controller_name, whitelist=None, auth_token=None):
757 def allowed_auth_token_access(controller_name, whitelist=None, auth_token=None):
758 """
758 """
759 Check if given controller_name is in whitelist of auth token access
759 Check if given controller_name is in whitelist of auth token access
760 """
760 """
761 if not whitelist:
761 if not whitelist:
762 from rhodecode import CONFIG
762 from rhodecode import CONFIG
763 whitelist = aslist(
763 whitelist = aslist(
764 CONFIG.get('api_access_controllers_whitelist'), sep=',')
764 CONFIG.get('api_access_controllers_whitelist'), sep=',')
765 log.debug(
765 log.debug(
766 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,))
766 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,))
767
767
768 auth_token_access_valid = False
768 auth_token_access_valid = False
769 for entry in whitelist:
769 for entry in whitelist:
770 if fnmatch.fnmatch(controller_name, entry):
770 if fnmatch.fnmatch(controller_name, entry):
771 auth_token_access_valid = True
771 auth_token_access_valid = True
772 break
772 break
773
773
774 if auth_token_access_valid:
774 if auth_token_access_valid:
775 log.debug('controller:%s matches entry in whitelist'
775 log.debug('controller:%s matches entry in whitelist'
776 % (controller_name,))
776 % (controller_name,))
777 else:
777 else:
778 msg = ('controller: %s does *NOT* match any entry in whitelist'
778 msg = ('controller: %s does *NOT* match any entry in whitelist'
779 % (controller_name,))
779 % (controller_name,))
780 if auth_token:
780 if auth_token:
781 # if we use auth token key and don't have access it's a warning
781 # if we use auth token key and don't have access it's a warning
782 log.warning(msg)
782 log.warning(msg)
783 else:
783 else:
784 log.debug(msg)
784 log.debug(msg)
785
785
786 return auth_token_access_valid
786 return auth_token_access_valid
787
787
788
788
789 class AuthUser(object):
789 class AuthUser(object):
790 """
790 """
791 A simple object that handles all attributes of user in RhodeCode
791 A simple object that handles all attributes of user in RhodeCode
792
792
793 It does lookup based on API key,given user, or user present in session
793 It does lookup based on API key,given user, or user present in session
794 Then it fills all required information for such user. It also checks if
794 Then it fills all required information for such user. It also checks if
795 anonymous access is enabled and if so, it returns default user as logged in
795 anonymous access is enabled and if so, it returns default user as logged in
796 """
796 """
797 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
797 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
798
798
799 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
799 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
800
800
801 self.user_id = user_id
801 self.user_id = user_id
802 self._api_key = api_key
802 self._api_key = api_key
803
803
804 self.api_key = None
804 self.api_key = None
805 self.feed_token = ''
805 self.feed_token = ''
806 self.username = username
806 self.username = username
807 self.ip_addr = ip_addr
807 self.ip_addr = ip_addr
808 self.name = ''
808 self.name = ''
809 self.lastname = ''
809 self.lastname = ''
810 self.email = ''
810 self.email = ''
811 self.is_authenticated = False
811 self.is_authenticated = False
812 self.admin = False
812 self.admin = False
813 self.inherit_default_permissions = False
813 self.inherit_default_permissions = False
814 self.password = ''
814 self.password = ''
815
815
816 self.anonymous_user = None # propagated on propagate_data
816 self.anonymous_user = None # propagated on propagate_data
817 self.propagate_data()
817 self.propagate_data()
818 self._instance = None
818 self._instance = None
819 self._permissions_scoped_cache = {} # used to bind scoped calculation
819 self._permissions_scoped_cache = {} # used to bind scoped calculation
820
820
821 @LazyProperty
821 @LazyProperty
822 def permissions(self):
822 def permissions(self):
823 return self.get_perms(user=self, cache=False)
823 return self.get_perms(user=self, cache=False)
824
824
825 def permissions_with_scope(self, scope):
825 def permissions_with_scope(self, scope):
826 """
826 """
827 Call the get_perms function with scoped data. The scope in that function
827 Call the get_perms function with scoped data. The scope in that function
828 narrows the SQL calls to the given ID of objects resulting in fetching
828 narrows the SQL calls to the given ID of objects resulting in fetching
829 Just particular permission we want to obtain. If scope is an empty dict
829 Just particular permission we want to obtain. If scope is an empty dict
830 then it basically narrows the scope to GLOBAL permissions only.
830 then it basically narrows the scope to GLOBAL permissions only.
831
831
832 :param scope: dict
832 :param scope: dict
833 """
833 """
834 if 'repo_name' in scope:
834 if 'repo_name' in scope:
835 obj = Repository.get_by_repo_name(scope['repo_name'])
835 obj = Repository.get_by_repo_name(scope['repo_name'])
836 if obj:
836 if obj:
837 scope['repo_id'] = obj.repo_id
837 scope['repo_id'] = obj.repo_id
838 _scope = {
838 _scope = {
839 'repo_id': -1,
839 'repo_id': -1,
840 'user_group_id': -1,
840 'user_group_id': -1,
841 'repo_group_id': -1,
841 'repo_group_id': -1,
842 }
842 }
843 _scope.update(scope)
843 _scope.update(scope)
844 cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b,
844 cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b,
845 _scope.items())))
845 _scope.items())))
846 if cache_key not in self._permissions_scoped_cache:
846 if cache_key not in self._permissions_scoped_cache:
847 # store in cache to mimic how the @LazyProperty works,
847 # store in cache to mimic how the @LazyProperty works,
848 # the difference here is that we use the unique key calculated
848 # the difference here is that we use the unique key calculated
849 # from params and values
849 # from params and values
850 res = self.get_perms(user=self, cache=False, scope=_scope)
850 res = self.get_perms(user=self, cache=False, scope=_scope)
851 self._permissions_scoped_cache[cache_key] = res
851 self._permissions_scoped_cache[cache_key] = res
852 return self._permissions_scoped_cache[cache_key]
852 return self._permissions_scoped_cache[cache_key]
853
853
854 def get_instance(self):
854 def get_instance(self):
855 return User.get(self.user_id)
855 return User.get(self.user_id)
856
856
857 def update_lastactivity(self):
857 def update_lastactivity(self):
858 if self.user_id:
858 if self.user_id:
859 User.get(self.user_id).update_lastactivity()
859 User.get(self.user_id).update_lastactivity()
860
860
861 def propagate_data(self):
861 def propagate_data(self):
862 """
862 """
863 Fills in user data and propagates values to this instance. Maps fetched
863 Fills in user data and propagates values to this instance. Maps fetched
864 user attributes to this class instance attributes
864 user attributes to this class instance attributes
865 """
865 """
866 log.debug('starting data propagation for new potential AuthUser')
866 log.debug('starting data propagation for new potential AuthUser')
867 user_model = UserModel()
867 user_model = UserModel()
868 anon_user = self.anonymous_user = User.get_default_user(cache=True)
868 anon_user = self.anonymous_user = User.get_default_user(cache=True)
869 is_user_loaded = False
869 is_user_loaded = False
870
870
871 # lookup by userid
871 # lookup by userid
872 if self.user_id is not None and self.user_id != anon_user.user_id:
872 if self.user_id is not None and self.user_id != anon_user.user_id:
873 log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id)
873 log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id)
874 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
874 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
875
875
876 # try go get user by api key
876 # try go get user by api key
877 elif self._api_key and self._api_key != anon_user.api_key:
877 elif self._api_key and self._api_key != anon_user.api_key:
878 log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key)
878 log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key)
879 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
879 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
880
880
881 # lookup by username
881 # lookup by username
882 elif self.username:
882 elif self.username:
883 log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username)
883 log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username)
884 is_user_loaded = user_model.fill_data(self, username=self.username)
884 is_user_loaded = user_model.fill_data(self, username=self.username)
885 else:
885 else:
886 log.debug('No data in %s that could been used to log in' % self)
886 log.debug('No data in %s that could been used to log in' % self)
887
887
888 if not is_user_loaded:
888 if not is_user_loaded:
889 log.debug('Failed to load user. Fallback to default user')
889 log.debug('Failed to load user. Fallback to default user')
890 # if we cannot authenticate user try anonymous
890 # if we cannot authenticate user try anonymous
891 if anon_user.active:
891 if anon_user.active:
892 user_model.fill_data(self, user_id=anon_user.user_id)
892 user_model.fill_data(self, user_id=anon_user.user_id)
893 # then we set this user is logged in
893 # then we set this user is logged in
894 self.is_authenticated = True
894 self.is_authenticated = True
895 else:
895 else:
896 # in case of disabled anonymous user we reset some of the
896 # in case of disabled anonymous user we reset some of the
897 # parameters so such user is "corrupted", skipping the fill_data
897 # parameters so such user is "corrupted", skipping the fill_data
898 for attr in ['user_id', 'username', 'admin', 'active']:
898 for attr in ['user_id', 'username', 'admin', 'active']:
899 setattr(self, attr, None)
899 setattr(self, attr, None)
900 self.is_authenticated = False
900 self.is_authenticated = False
901
901
902 if not self.username:
902 if not self.username:
903 self.username = 'None'
903 self.username = 'None'
904
904
905 log.debug('Auth User is now %s' % self)
905 log.debug('Auth User is now %s' % self)
906
906
907 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
907 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
908 cache=False):
908 cache=False):
909 """
909 """
910 Fills user permission attribute with permissions taken from database
910 Fills user permission attribute with permissions taken from database
911 works for permissions given for repositories, and for permissions that
911 works for permissions given for repositories, and for permissions that
912 are granted to groups
912 are granted to groups
913
913
914 :param user: instance of User object from database
914 :param user: instance of User object from database
915 :param explicit: In case there are permissions both for user and a group
915 :param explicit: In case there are permissions both for user and a group
916 that user is part of, explicit flag will defiine if user will
916 that user is part of, explicit flag will defiine if user will
917 explicitly override permissions from group, if it's False it will
917 explicitly override permissions from group, if it's False it will
918 make decision based on the algo
918 make decision based on the algo
919 :param algo: algorithm to decide what permission should be choose if
919 :param algo: algorithm to decide what permission should be choose if
920 it's multiple defined, eg user in two different groups. It also
920 it's multiple defined, eg user in two different groups. It also
921 decides if explicit flag is turned off how to specify the permission
921 decides if explicit flag is turned off how to specify the permission
922 for case when user is in a group + have defined separate permission
922 for case when user is in a group + have defined separate permission
923 """
923 """
924 user_id = user.user_id
924 user_id = user.user_id
925 user_is_admin = user.is_admin
925 user_is_admin = user.is_admin
926
926
927 # inheritance of global permissions like create repo/fork repo etc
927 # inheritance of global permissions like create repo/fork repo etc
928 user_inherit_default_permissions = user.inherit_default_permissions
928 user_inherit_default_permissions = user.inherit_default_permissions
929
929
930 log.debug('Computing PERMISSION tree for scope %s' % (scope, ))
930 log.debug('Computing PERMISSION tree for scope %s' % (scope, ))
931 compute = caches.conditional_cache(
931 compute = caches.conditional_cache(
932 'short_term', 'cache_desc',
932 'short_term', 'cache_desc',
933 condition=cache, func=_cached_perms_data)
933 condition=cache, func=_cached_perms_data)
934 result = compute(user_id, scope, user_is_admin,
934 result = compute(user_id, scope, user_is_admin,
935 user_inherit_default_permissions, explicit, algo)
935 user_inherit_default_permissions, explicit, algo)
936
936
937 result_repr = []
937 result_repr = []
938 for k in result:
938 for k in result:
939 result_repr.append((k, len(result[k])))
939 result_repr.append((k, len(result[k])))
940
940
941 log.debug('PERMISSION tree computed %s' % (result_repr,))
941 log.debug('PERMISSION tree computed %s' % (result_repr,))
942 return result
942 return result
943
943
944 @property
944 @property
945 def is_default(self):
945 def is_default(self):
946 return self.username == User.DEFAULT_USER
946 return self.username == User.DEFAULT_USER
947
947
948 @property
948 @property
949 def is_admin(self):
949 def is_admin(self):
950 return self.admin
950 return self.admin
951
951
952 @property
952 @property
953 def is_user_object(self):
953 def is_user_object(self):
954 return self.user_id is not None
954 return self.user_id is not None
955
955
956 @property
956 @property
957 def repositories_admin(self):
957 def repositories_admin(self):
958 """
958 """
959 Returns list of repositories you're an admin of
959 Returns list of repositories you're an admin of
960 """
960 """
961 return [
961 return [
962 x[0] for x in self.permissions['repositories'].iteritems()
962 x[0] for x in self.permissions['repositories'].iteritems()
963 if x[1] == 'repository.admin']
963 if x[1] == 'repository.admin']
964
964
965 @property
965 @property
966 def repository_groups_admin(self):
966 def repository_groups_admin(self):
967 """
967 """
968 Returns list of repository groups you're an admin of
968 Returns list of repository groups you're an admin of
969 """
969 """
970 return [
970 return [
971 x[0] for x in self.permissions['repositories_groups'].iteritems()
971 x[0] for x in self.permissions['repositories_groups'].iteritems()
972 if x[1] == 'group.admin']
972 if x[1] == 'group.admin']
973
973
974 @property
974 @property
975 def user_groups_admin(self):
975 def user_groups_admin(self):
976 """
976 """
977 Returns list of user groups you're an admin of
977 Returns list of user groups you're an admin of
978 """
978 """
979 return [
979 return [
980 x[0] for x in self.permissions['user_groups'].iteritems()
980 x[0] for x in self.permissions['user_groups'].iteritems()
981 if x[1] == 'usergroup.admin']
981 if x[1] == 'usergroup.admin']
982
982
983 @property
983 @property
984 def ip_allowed(self):
984 def ip_allowed(self):
985 """
985 """
986 Checks if ip_addr used in constructor is allowed from defined list of
986 Checks if ip_addr used in constructor is allowed from defined list of
987 allowed ip_addresses for user
987 allowed ip_addresses for user
988
988
989 :returns: boolean, True if ip is in allowed ip range
989 :returns: boolean, True if ip is in allowed ip range
990 """
990 """
991 # check IP
991 # check IP
992 inherit = self.inherit_default_permissions
992 inherit = self.inherit_default_permissions
993 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
993 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
994 inherit_from_default=inherit)
994 inherit_from_default=inherit)
995 @property
995 @property
996 def personal_repo_group(self):
996 def personal_repo_group(self):
997 return RepoGroup.get_user_personal_repo_group(self.user_id)
997 return RepoGroup.get_user_personal_repo_group(self.user_id)
998
998
999 @classmethod
999 @classmethod
1000 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1000 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1001 allowed_ips = AuthUser.get_allowed_ips(
1001 allowed_ips = AuthUser.get_allowed_ips(
1002 user_id, cache=True, inherit_from_default=inherit_from_default)
1002 user_id, cache=True, inherit_from_default=inherit_from_default)
1003 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1003 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1004 log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
1004 log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
1005 return True
1005 return True
1006 else:
1006 else:
1007 log.info('Access for IP:%s forbidden, '
1007 log.info('Access for IP:%s forbidden, '
1008 'not in %s' % (ip_addr, allowed_ips))
1008 'not in %s' % (ip_addr, allowed_ips))
1009 return False
1009 return False
1010
1010
1011 def __repr__(self):
1011 def __repr__(self):
1012 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1012 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1013 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1013 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1014
1014
1015 def set_authenticated(self, authenticated=True):
1015 def set_authenticated(self, authenticated=True):
1016 if self.user_id != self.anonymous_user.user_id:
1016 if self.user_id != self.anonymous_user.user_id:
1017 self.is_authenticated = authenticated
1017 self.is_authenticated = authenticated
1018
1018
1019 def get_cookie_store(self):
1019 def get_cookie_store(self):
1020 return {
1020 return {
1021 'username': self.username,
1021 'username': self.username,
1022 'password': md5(self.password),
1022 'password': md5(self.password),
1023 'user_id': self.user_id,
1023 'user_id': self.user_id,
1024 'is_authenticated': self.is_authenticated
1024 'is_authenticated': self.is_authenticated
1025 }
1025 }
1026
1026
1027 @classmethod
1027 @classmethod
1028 def from_cookie_store(cls, cookie_store):
1028 def from_cookie_store(cls, cookie_store):
1029 """
1029 """
1030 Creates AuthUser from a cookie store
1030 Creates AuthUser from a cookie store
1031
1031
1032 :param cls:
1032 :param cls:
1033 :param cookie_store:
1033 :param cookie_store:
1034 """
1034 """
1035 user_id = cookie_store.get('user_id')
1035 user_id = cookie_store.get('user_id')
1036 username = cookie_store.get('username')
1036 username = cookie_store.get('username')
1037 api_key = cookie_store.get('api_key')
1037 api_key = cookie_store.get('api_key')
1038 return AuthUser(user_id, api_key, username)
1038 return AuthUser(user_id, api_key, username)
1039
1039
1040 @classmethod
1040 @classmethod
1041 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1041 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1042 _set = set()
1042 _set = set()
1043
1043
1044 if inherit_from_default:
1044 if inherit_from_default:
1045 default_ips = UserIpMap.query().filter(
1045 default_ips = UserIpMap.query().filter(
1046 UserIpMap.user == User.get_default_user(cache=True))
1046 UserIpMap.user == User.get_default_user(cache=True))
1047 if cache:
1047 if cache:
1048 default_ips = default_ips.options(
1048 default_ips = default_ips.options(
1049 FromCache("sql_cache_short", "get_user_ips_default"))
1049 FromCache("sql_cache_short", "get_user_ips_default"))
1050
1050
1051 # populate from default user
1051 # populate from default user
1052 for ip in default_ips:
1052 for ip in default_ips:
1053 try:
1053 try:
1054 _set.add(ip.ip_addr)
1054 _set.add(ip.ip_addr)
1055 except ObjectDeletedError:
1055 except ObjectDeletedError:
1056 # since we use heavy caching sometimes it happens that
1056 # since we use heavy caching sometimes it happens that
1057 # we get deleted objects here, we just skip them
1057 # we get deleted objects here, we just skip them
1058 pass
1058 pass
1059
1059
1060 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1060 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1061 if cache:
1061 if cache:
1062 user_ips = user_ips.options(
1062 user_ips = user_ips.options(
1063 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1063 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1064
1064
1065 for ip in user_ips:
1065 for ip in user_ips:
1066 try:
1066 try:
1067 _set.add(ip.ip_addr)
1067 _set.add(ip.ip_addr)
1068 except ObjectDeletedError:
1068 except ObjectDeletedError:
1069 # since we use heavy caching sometimes it happens that we get
1069 # since we use heavy caching sometimes it happens that we get
1070 # deleted objects here, we just skip them
1070 # deleted objects here, we just skip them
1071 pass
1071 pass
1072 return _set or set(['0.0.0.0/0', '::/0'])
1072 return _set or set(['0.0.0.0/0', '::/0'])
1073
1073
1074
1074
1075 def set_available_permissions(config):
1075 def set_available_permissions(config):
1076 """
1076 """
1077 This function will propagate pylons globals with all available defined
1077 This function will propagate pylons globals with all available defined
1078 permission given in db. We don't want to check each time from db for new
1078 permission given in db. We don't want to check each time from db for new
1079 permissions since adding a new permission also requires application restart
1079 permissions since adding a new permission also requires application restart
1080 ie. to decorate new views with the newly created permission
1080 ie. to decorate new views with the newly created permission
1081
1081
1082 :param config: current pylons config instance
1082 :param config: current pylons config instance
1083
1083
1084 """
1084 """
1085 log.info('getting information about all available permissions')
1085 log.info('getting information about all available permissions')
1086 try:
1086 try:
1087 sa = meta.Session
1087 sa = meta.Session
1088 all_perms = sa.query(Permission).all()
1088 all_perms = sa.query(Permission).all()
1089 config['available_permissions'] = [x.permission_name for x in all_perms]
1089 config['available_permissions'] = [x.permission_name for x in all_perms]
1090 except Exception:
1090 except Exception:
1091 log.error(traceback.format_exc())
1091 log.error(traceback.format_exc())
1092 finally:
1092 finally:
1093 meta.Session.remove()
1093 meta.Session.remove()
1094
1094
1095
1095
1096 def get_csrf_token(session=None, force_new=False, save_if_missing=True):
1096 def get_csrf_token(session=None, force_new=False, save_if_missing=True):
1097 """
1097 """
1098 Return the current authentication token, creating one if one doesn't
1098 Return the current authentication token, creating one if one doesn't
1099 already exist and the save_if_missing flag is present.
1099 already exist and the save_if_missing flag is present.
1100
1100
1101 :param session: pass in the pylons session, else we use the global ones
1101 :param session: pass in the pylons session, else we use the global ones
1102 :param force_new: force to re-generate the token and store it in session
1102 :param force_new: force to re-generate the token and store it in session
1103 :param save_if_missing: save the newly generated token if it's missing in
1103 :param save_if_missing: save the newly generated token if it's missing in
1104 session
1104 session
1105 """
1105 """
1106 if not session:
1106 if not session:
1107 from pylons import session
1107 from pylons import session
1108
1108
1109 if (csrf_token_key not in session and save_if_missing) or force_new:
1109 if (csrf_token_key not in session and save_if_missing) or force_new:
1110 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1110 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1111 session[csrf_token_key] = token
1111 session[csrf_token_key] = token
1112 if hasattr(session, 'save'):
1112 if hasattr(session, 'save'):
1113 session.save()
1113 session.save()
1114 return session.get(csrf_token_key)
1114 return session.get(csrf_token_key)
1115
1115
1116
1116
1117 # CHECK DECORATORS
1117 # CHECK DECORATORS
1118 class CSRFRequired(object):
1118 class CSRFRequired(object):
1119 """
1119 """
1120 Decorator for authenticating a form
1120 Decorator for authenticating a form
1121
1121
1122 This decorator uses an authorization token stored in the client's
1122 This decorator uses an authorization token stored in the client's
1123 session for prevention of certain Cross-site request forgery (CSRF)
1123 session for prevention of certain Cross-site request forgery (CSRF)
1124 attacks (See
1124 attacks (See
1125 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1125 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1126 information).
1126 information).
1127
1127
1128 For use with the ``webhelpers.secure_form`` helper functions.
1128 For use with the ``webhelpers.secure_form`` helper functions.
1129
1129
1130 """
1130 """
1131 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1131 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1132 except_methods=None):
1132 except_methods=None):
1133 self.token = token
1133 self.token = token
1134 self.header = header
1134 self.header = header
1135 self.except_methods = except_methods or []
1135 self.except_methods = except_methods or []
1136
1136
1137 def __call__(self, func):
1137 def __call__(self, func):
1138 return get_cython_compat_decorator(self.__wrapper, func)
1138 return get_cython_compat_decorator(self.__wrapper, func)
1139
1139
1140 def _get_csrf(self, _request):
1140 def _get_csrf(self, _request):
1141 return _request.POST.get(self.token, _request.headers.get(self.header))
1141 return _request.POST.get(self.token, _request.headers.get(self.header))
1142
1142
1143 def check_csrf(self, _request, cur_token):
1143 def check_csrf(self, _request, cur_token):
1144 supplied_token = self._get_csrf(_request)
1144 supplied_token = self._get_csrf(_request)
1145 return supplied_token and supplied_token == cur_token
1145 return supplied_token and supplied_token == cur_token
1146
1146
1147 def __wrapper(self, func, *fargs, **fkwargs):
1147 def __wrapper(self, func, *fargs, **fkwargs):
1148 if request.method in self.except_methods:
1148 if request.method in self.except_methods:
1149 return func(*fargs, **fkwargs)
1149 return func(*fargs, **fkwargs)
1150
1150
1151 cur_token = get_csrf_token(save_if_missing=False)
1151 cur_token = get_csrf_token(save_if_missing=False)
1152 if self.check_csrf(request, cur_token):
1152 if self.check_csrf(request, cur_token):
1153 if request.POST.get(self.token):
1153 if request.POST.get(self.token):
1154 del request.POST[self.token]
1154 del request.POST[self.token]
1155 return func(*fargs, **fkwargs)
1155 return func(*fargs, **fkwargs)
1156 else:
1156 else:
1157 reason = 'token-missing'
1157 reason = 'token-missing'
1158 supplied_token = self._get_csrf(request)
1158 supplied_token = self._get_csrf(request)
1159 if supplied_token and cur_token != supplied_token:
1159 if supplied_token and cur_token != supplied_token:
1160 reason = 'token-mismatch [%s:%s]' % (cur_token or ''[:6],
1160 reason = 'token-mismatch [%s:%s]' % (cur_token or ''[:6],
1161 supplied_token or ''[:6])
1161 supplied_token or ''[:6])
1162
1162
1163 csrf_message = \
1163 csrf_message = \
1164 ("Cross-site request forgery detected, request denied. See "
1164 ("Cross-site request forgery detected, request denied. See "
1165 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1165 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1166 "more information.")
1166 "more information.")
1167 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1167 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1168 'REMOTE_ADDR:%s, HEADERS:%s' % (
1168 'REMOTE_ADDR:%s, HEADERS:%s' % (
1169 request, reason, request.remote_addr, request.headers))
1169 request, reason, request.remote_addr, request.headers))
1170
1170
1171 raise HTTPForbidden(explanation=csrf_message)
1171 raise HTTPForbidden(explanation=csrf_message)
1172
1172
1173
1173
1174 class LoginRequired(object):
1174 class LoginRequired(object):
1175 """
1175 """
1176 Must be logged in to execute this function else
1176 Must be logged in to execute this function else
1177 redirect to login page
1177 redirect to login page
1178
1178
1179 :param api_access: if enabled this checks only for valid auth token
1179 :param api_access: if enabled this checks only for valid auth token
1180 and grants access based on valid token
1180 and grants access based on valid token
1181 """
1181 """
1182 def __init__(self, auth_token_access=None):
1182 def __init__(self, auth_token_access=None):
1183 self.auth_token_access = auth_token_access
1183 self.auth_token_access = auth_token_access
1184
1184
1185 def __call__(self, func):
1185 def __call__(self, func):
1186 return get_cython_compat_decorator(self.__wrapper, func)
1186 return get_cython_compat_decorator(self.__wrapper, func)
1187
1187
1188 def _get_request(self):
1188 def _get_request(self):
1189 from pyramid.threadlocal import get_current_request
1189 from pyramid.threadlocal import get_current_request
1190 pyramid_request = get_current_request()
1190 pyramid_request = get_current_request()
1191 if not pyramid_request:
1191 if not pyramid_request:
1192 # return global request of pylons in case pyramid isn't available
1192 # return global request of pylons in case pyramid isn't available
1193 return request
1193 return request
1194 return pyramid_request
1194 return pyramid_request
1195
1195
1196 def __wrapper(self, func, *fargs, **fkwargs):
1196 def __wrapper(self, func, *fargs, **fkwargs):
1197 from rhodecode.lib import helpers as h
1197 from rhodecode.lib import helpers as h
1198 cls = fargs[0]
1198 cls = fargs[0]
1199 user = cls._rhodecode_user
1199 user = cls._rhodecode_user
1200 request = self._get_request()
1200 request = self._get_request()
1201
1201
1202 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1202 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1203 log.debug('Starting login restriction checks for user: %s' % (user,))
1203 log.debug('Starting login restriction checks for user: %s' % (user,))
1204 # check if our IP is allowed
1204 # check if our IP is allowed
1205 ip_access_valid = True
1205 ip_access_valid = True
1206 if not user.ip_allowed:
1206 if not user.ip_allowed:
1207 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1207 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1208 category='warning')
1208 category='warning')
1209 ip_access_valid = False
1209 ip_access_valid = False
1210
1210
1211 # check if we used an APIKEY and it's a valid one
1211 # check if we used an APIKEY and it's a valid one
1212 # defined white-list of controllers which API access will be enabled
1212 # defined white-list of controllers which API access will be enabled
1213 _auth_token = request.GET.get(
1213 _auth_token = request.GET.get(
1214 'auth_token', '') or request.GET.get('api_key', '')
1214 'auth_token', '') or request.GET.get('api_key', '')
1215 auth_token_access_valid = allowed_auth_token_access(
1215 auth_token_access_valid = allowed_auth_token_access(
1216 loc, auth_token=_auth_token)
1216 loc, auth_token=_auth_token)
1217
1217
1218 # explicit controller is enabled or API is in our whitelist
1218 # explicit controller is enabled or API is in our whitelist
1219 if self.auth_token_access or auth_token_access_valid:
1219 if self.auth_token_access or auth_token_access_valid:
1220 log.debug('Checking AUTH TOKEN access for %s' % (cls,))
1220 log.debug('Checking AUTH TOKEN access for %s' % (cls,))
1221 db_user = user.get_instance()
1221 db_user = user.get_instance()
1222
1222
1223 if db_user:
1223 if db_user:
1224 if self.auth_token_access:
1224 if self.auth_token_access:
1225 roles = self.auth_token_access
1225 roles = self.auth_token_access
1226 else:
1226 else:
1227 roles = [UserApiKeys.ROLE_HTTP]
1227 roles = [UserApiKeys.ROLE_HTTP]
1228 token_match = db_user.authenticate_by_token(
1228 token_match = db_user.authenticate_by_token(
1229 _auth_token, roles=roles)
1229 _auth_token, roles=roles)
1230 else:
1230 else:
1231 log.debug('Unable to fetch db instance for auth user: %s', user)
1231 log.debug('Unable to fetch db instance for auth user: %s', user)
1232 token_match = False
1232 token_match = False
1233
1233
1234 if _auth_token and token_match:
1234 if _auth_token and token_match:
1235 auth_token_access_valid = True
1235 auth_token_access_valid = True
1236 log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],))
1236 log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],))
1237 else:
1237 else:
1238 auth_token_access_valid = False
1238 auth_token_access_valid = False
1239 if not _auth_token:
1239 if not _auth_token:
1240 log.debug("AUTH TOKEN *NOT* present in request")
1240 log.debug("AUTH TOKEN *NOT* present in request")
1241 else:
1241 else:
1242 log.warning(
1242 log.warning(
1243 "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:])
1243 "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:])
1244
1244
1245 log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
1245 log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
1246 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1246 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1247 else 'AUTH_TOKEN_AUTH'
1247 else 'AUTH_TOKEN_AUTH'
1248
1248
1249 if ip_access_valid and (
1249 if ip_access_valid and (
1250 user.is_authenticated or auth_token_access_valid):
1250 user.is_authenticated or auth_token_access_valid):
1251 log.info(
1251 log.info(
1252 'user %s authenticating with:%s IS authenticated on func %s'
1252 'user %s authenticating with:%s IS authenticated on func %s'
1253 % (user, reason, loc))
1253 % (user, reason, loc))
1254
1254
1255 # update user data to check last activity
1255 # update user data to check last activity
1256 user.update_lastactivity()
1256 user.update_lastactivity()
1257 Session().commit()
1257 Session().commit()
1258 return func(*fargs, **fkwargs)
1258 return func(*fargs, **fkwargs)
1259 else:
1259 else:
1260 log.warning(
1260 log.warning(
1261 'user %s authenticating with:%s NOT authenticated on '
1261 'user %s authenticating with:%s NOT authenticated on '
1262 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s'
1262 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s'
1263 % (user, reason, loc, ip_access_valid,
1263 % (user, reason, loc, ip_access_valid,
1264 auth_token_access_valid))
1264 auth_token_access_valid))
1265 # we preserve the get PARAM
1265 # we preserve the get PARAM
1266 came_from = request.path_qs
1266 came_from = request.path_qs
1267 log.debug('redirecting to login page with %s' % (came_from,))
1267 log.debug('redirecting to login page with %s' % (came_from,))
1268 return redirect(
1268 return redirect(
1269 h.route_path('login', _query={'came_from': came_from}))
1269 h.route_path('login', _query={'came_from': came_from}))
1270
1270
1271
1271
1272 class NotAnonymous(object):
1272 class NotAnonymous(object):
1273 """
1273 """
1274 Must be logged in to execute this function else
1274 Must be logged in to execute this function else
1275 redirect to login page
1275 redirect to login page
1276 """
1276 """
1277
1277
1278 def __call__(self, func):
1278 def __call__(self, func):
1279 return get_cython_compat_decorator(self.__wrapper, func)
1279 return get_cython_compat_decorator(self.__wrapper, func)
1280
1280
1281 def __wrapper(self, func, *fargs, **fkwargs):
1281 def __wrapper(self, func, *fargs, **fkwargs):
1282 import rhodecode.lib.helpers as h
1282 import rhodecode.lib.helpers as h
1283 cls = fargs[0]
1283 cls = fargs[0]
1284 self.user = cls._rhodecode_user
1284 self.user = cls._rhodecode_user
1285
1285
1286 log.debug('Checking if user is not anonymous @%s' % cls)
1286 log.debug('Checking if user is not anonymous @%s' % cls)
1287
1287
1288 anonymous = self.user.username == User.DEFAULT_USER
1288 anonymous = self.user.username == User.DEFAULT_USER
1289
1289
1290 if anonymous:
1290 if anonymous:
1291 came_from = request.path_qs
1291 came_from = request.path_qs
1292 h.flash(_('You need to be a registered user to '
1292 h.flash(_('You need to be a registered user to '
1293 'perform this action'),
1293 'perform this action'),
1294 category='warning')
1294 category='warning')
1295 return redirect(
1295 return redirect(
1296 h.route_path('login', _query={'came_from': came_from}))
1296 h.route_path('login', _query={'came_from': came_from}))
1297 else:
1297 else:
1298 return func(*fargs, **fkwargs)
1298 return func(*fargs, **fkwargs)
1299
1299
1300
1300
1301 class XHRRequired(object):
1301 class XHRRequired(object):
1302 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1302 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1303
1303
1304 def __call__(self, func):
1304 def __call__(self, func):
1305 return get_cython_compat_decorator(self.__wrapper, func)
1305 return get_cython_compat_decorator(self.__wrapper, func)
1306
1306
1307 def __wrapper(self, func, *fargs, **fkwargs):
1307 def __wrapper(self, func, *fargs, **fkwargs):
1308 log.debug('Checking if request is XMLHttpRequest (XHR)')
1308 log.debug('Checking if request is XMLHttpRequest (XHR)')
1309 xhr_message = 'This is not a valid XMLHttpRequest (XHR) request'
1309 xhr_message = 'This is not a valid XMLHttpRequest (XHR) request'
1310 if not request.is_xhr:
1310 if not request.is_xhr:
1311 abort(400, detail=xhr_message)
1311 abort(400, detail=xhr_message)
1312
1312
1313 return func(*fargs, **fkwargs)
1313 return func(*fargs, **fkwargs)
1314
1314
1315
1315
1316 class HasAcceptedRepoType(object):
1316 class HasAcceptedRepoType(object):
1317 """
1317 """
1318 Check if requested repo is within given repo type aliases
1318 Check if requested repo is within given repo type aliases
1319 """
1319 """
1320
1320
1321 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1321 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1322
1322
1323 def __init__(self, *repo_type_list):
1323 def __init__(self, *repo_type_list):
1324 self.repo_type_list = set(repo_type_list)
1324 self.repo_type_list = set(repo_type_list)
1325
1325
1326 def __call__(self, func):
1326 def __call__(self, func):
1327 return get_cython_compat_decorator(self.__wrapper, func)
1327 return get_cython_compat_decorator(self.__wrapper, func)
1328
1328
1329 def __wrapper(self, func, *fargs, **fkwargs):
1329 def __wrapper(self, func, *fargs, **fkwargs):
1330 import rhodecode.lib.helpers as h
1330 import rhodecode.lib.helpers as h
1331 cls = fargs[0]
1331 cls = fargs[0]
1332 rhodecode_repo = cls.rhodecode_repo
1332 rhodecode_repo = cls.rhodecode_repo
1333
1333
1334 log.debug('%s checking repo type for %s in %s',
1334 log.debug('%s checking repo type for %s in %s',
1335 self.__class__.__name__,
1335 self.__class__.__name__,
1336 rhodecode_repo.alias, self.repo_type_list)
1336 rhodecode_repo.alias, self.repo_type_list)
1337
1337
1338 if rhodecode_repo.alias in self.repo_type_list:
1338 if rhodecode_repo.alias in self.repo_type_list:
1339 return func(*fargs, **fkwargs)
1339 return func(*fargs, **fkwargs)
1340 else:
1340 else:
1341 h.flash(h.literal(
1341 h.flash(h.literal(
1342 _('Action not supported for %s.' % rhodecode_repo.alias)),
1342 _('Action not supported for %s.' % rhodecode_repo.alias)),
1343 category='warning')
1343 category='warning')
1344 return redirect(
1344 return redirect(
1345 url('summary_home', repo_name=cls.rhodecode_db_repo.repo_name))
1345 h.route_path('repo_summary', repo_name=cls.rhodecode_db_repo.repo_name))
1346
1346
1347
1347
1348 class PermsDecorator(object):
1348 class PermsDecorator(object):
1349 """
1349 """
1350 Base class for controller decorators, we extract the current user from
1350 Base class for controller decorators, we extract the current user from
1351 the class itself, which has it stored in base controllers
1351 the class itself, which has it stored in base controllers
1352 """
1352 """
1353
1353
1354 def __init__(self, *required_perms):
1354 def __init__(self, *required_perms):
1355 self.required_perms = set(required_perms)
1355 self.required_perms = set(required_perms)
1356
1356
1357 def __call__(self, func):
1357 def __call__(self, func):
1358 return get_cython_compat_decorator(self.__wrapper, func)
1358 return get_cython_compat_decorator(self.__wrapper, func)
1359
1359
1360 def _get_request(self):
1360 def _get_request(self):
1361 from pyramid.threadlocal import get_current_request
1361 from pyramid.threadlocal import get_current_request
1362 pyramid_request = get_current_request()
1362 pyramid_request = get_current_request()
1363 if not pyramid_request:
1363 if not pyramid_request:
1364 # return global request of pylons in case pyramid isn't available
1364 # return global request of pylons in case pyramid isn't available
1365 return request
1365 return request
1366 return pyramid_request
1366 return pyramid_request
1367
1367
1368 def _get_came_from(self):
1368 def _get_came_from(self):
1369 _request = self._get_request()
1369 _request = self._get_request()
1370
1370
1371 # both pylons/pyramid has this attribute
1371 # both pylons/pyramid has this attribute
1372 return _request.path_qs
1372 return _request.path_qs
1373
1373
1374 def __wrapper(self, func, *fargs, **fkwargs):
1374 def __wrapper(self, func, *fargs, **fkwargs):
1375 import rhodecode.lib.helpers as h
1375 import rhodecode.lib.helpers as h
1376 cls = fargs[0]
1376 cls = fargs[0]
1377 _user = cls._rhodecode_user
1377 _user = cls._rhodecode_user
1378
1378
1379 log.debug('checking %s permissions %s for %s %s',
1379 log.debug('checking %s permissions %s for %s %s',
1380 self.__class__.__name__, self.required_perms, cls, _user)
1380 self.__class__.__name__, self.required_perms, cls, _user)
1381
1381
1382 if self.check_permissions(_user):
1382 if self.check_permissions(_user):
1383 log.debug('Permission granted for %s %s', cls, _user)
1383 log.debug('Permission granted for %s %s', cls, _user)
1384 return func(*fargs, **fkwargs)
1384 return func(*fargs, **fkwargs)
1385
1385
1386 else:
1386 else:
1387 log.debug('Permission denied for %s %s', cls, _user)
1387 log.debug('Permission denied for %s %s', cls, _user)
1388 anonymous = _user.username == User.DEFAULT_USER
1388 anonymous = _user.username == User.DEFAULT_USER
1389
1389
1390 if anonymous:
1390 if anonymous:
1391 came_from = self._get_came_from()
1391 came_from = self._get_came_from()
1392 h.flash(_('You need to be signed in to view this page'),
1392 h.flash(_('You need to be signed in to view this page'),
1393 category='warning')
1393 category='warning')
1394 raise HTTPFound(
1394 raise HTTPFound(
1395 h.route_path('login', _query={'came_from': came_from}))
1395 h.route_path('login', _query={'came_from': came_from}))
1396
1396
1397 else:
1397 else:
1398 # redirect with forbidden ret code
1398 # redirect with forbidden ret code
1399 raise HTTPForbidden()
1399 raise HTTPForbidden()
1400
1400
1401 def check_permissions(self, user):
1401 def check_permissions(self, user):
1402 """Dummy function for overriding"""
1402 """Dummy function for overriding"""
1403 raise NotImplementedError(
1403 raise NotImplementedError(
1404 'You have to write this function in child class')
1404 'You have to write this function in child class')
1405
1405
1406
1406
1407 class HasPermissionAllDecorator(PermsDecorator):
1407 class HasPermissionAllDecorator(PermsDecorator):
1408 """
1408 """
1409 Checks for access permission for all given predicates. All of them
1409 Checks for access permission for all given predicates. All of them
1410 have to be meet in order to fulfill the request
1410 have to be meet in order to fulfill the request
1411 """
1411 """
1412
1412
1413 def check_permissions(self, user):
1413 def check_permissions(self, user):
1414 perms = user.permissions_with_scope({})
1414 perms = user.permissions_with_scope({})
1415 if self.required_perms.issubset(perms['global']):
1415 if self.required_perms.issubset(perms['global']):
1416 return True
1416 return True
1417 return False
1417 return False
1418
1418
1419
1419
1420 class HasPermissionAnyDecorator(PermsDecorator):
1420 class HasPermissionAnyDecorator(PermsDecorator):
1421 """
1421 """
1422 Checks for access permission for any of given predicates. In order to
1422 Checks for access permission for any of given predicates. In order to
1423 fulfill the request any of predicates must be meet
1423 fulfill the request any of predicates must be meet
1424 """
1424 """
1425
1425
1426 def check_permissions(self, user):
1426 def check_permissions(self, user):
1427 perms = user.permissions_with_scope({})
1427 perms = user.permissions_with_scope({})
1428 if self.required_perms.intersection(perms['global']):
1428 if self.required_perms.intersection(perms['global']):
1429 return True
1429 return True
1430 return False
1430 return False
1431
1431
1432
1432
1433 class HasRepoPermissionAllDecorator(PermsDecorator):
1433 class HasRepoPermissionAllDecorator(PermsDecorator):
1434 """
1434 """
1435 Checks for access permission for all given predicates for specific
1435 Checks for access permission for all given predicates for specific
1436 repository. All of them have to be meet in order to fulfill the request
1436 repository. All of them have to be meet in order to fulfill the request
1437 """
1437 """
1438 def _get_repo_name(self):
1438 def _get_repo_name(self):
1439 _request = self._get_request()
1439 _request = self._get_request()
1440 return get_repo_slug(_request)
1440 return get_repo_slug(_request)
1441
1441
1442 def check_permissions(self, user):
1442 def check_permissions(self, user):
1443 perms = user.permissions
1443 perms = user.permissions
1444 repo_name = self._get_repo_name()
1444 repo_name = self._get_repo_name()
1445
1445
1446 try:
1446 try:
1447 user_perms = set([perms['repositories'][repo_name]])
1447 user_perms = set([perms['repositories'][repo_name]])
1448 except KeyError:
1448 except KeyError:
1449 log.debug('cannot locate repo with name: `%s` in permissions defs',
1449 log.debug('cannot locate repo with name: `%s` in permissions defs',
1450 repo_name)
1450 repo_name)
1451 return False
1451 return False
1452
1452
1453 log.debug('checking `%s` permissions for repo `%s`',
1453 log.debug('checking `%s` permissions for repo `%s`',
1454 user_perms, repo_name)
1454 user_perms, repo_name)
1455 if self.required_perms.issubset(user_perms):
1455 if self.required_perms.issubset(user_perms):
1456 return True
1456 return True
1457 return False
1457 return False
1458
1458
1459
1459
1460 class HasRepoPermissionAnyDecorator(PermsDecorator):
1460 class HasRepoPermissionAnyDecorator(PermsDecorator):
1461 """
1461 """
1462 Checks for access permission for any of given predicates for specific
1462 Checks for access permission for any of given predicates for specific
1463 repository. In order to fulfill the request any of predicates must be meet
1463 repository. In order to fulfill the request any of predicates must be meet
1464 """
1464 """
1465 def _get_repo_name(self):
1465 def _get_repo_name(self):
1466 _request = self._get_request()
1466 _request = self._get_request()
1467 return get_repo_slug(_request)
1467 return get_repo_slug(_request)
1468
1468
1469 def check_permissions(self, user):
1469 def check_permissions(self, user):
1470 perms = user.permissions
1470 perms = user.permissions
1471 repo_name = self._get_repo_name()
1471 repo_name = self._get_repo_name()
1472
1472
1473 try:
1473 try:
1474 user_perms = set([perms['repositories'][repo_name]])
1474 user_perms = set([perms['repositories'][repo_name]])
1475 except KeyError:
1475 except KeyError:
1476 log.debug('cannot locate repo with name: `%s` in permissions defs',
1476 log.debug('cannot locate repo with name: `%s` in permissions defs',
1477 repo_name)
1477 repo_name)
1478 return False
1478 return False
1479
1479
1480 log.debug('checking `%s` permissions for repo `%s`',
1480 log.debug('checking `%s` permissions for repo `%s`',
1481 user_perms, repo_name)
1481 user_perms, repo_name)
1482 if self.required_perms.intersection(user_perms):
1482 if self.required_perms.intersection(user_perms):
1483 return True
1483 return True
1484 return False
1484 return False
1485
1485
1486
1486
1487 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1487 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1488 """
1488 """
1489 Checks for access permission for all given predicates for specific
1489 Checks for access permission for all given predicates for specific
1490 repository group. All of them have to be meet in order to
1490 repository group. All of them have to be meet in order to
1491 fulfill the request
1491 fulfill the request
1492 """
1492 """
1493 def _get_repo_group_name(self):
1493 def _get_repo_group_name(self):
1494 _request = self._get_request()
1494 _request = self._get_request()
1495 return get_repo_group_slug(_request)
1495 return get_repo_group_slug(_request)
1496
1496
1497 def check_permissions(self, user):
1497 def check_permissions(self, user):
1498 perms = user.permissions
1498 perms = user.permissions
1499 group_name = self._get_repo_group_name()
1499 group_name = self._get_repo_group_name()
1500 try:
1500 try:
1501 user_perms = set([perms['repositories_groups'][group_name]])
1501 user_perms = set([perms['repositories_groups'][group_name]])
1502 except KeyError:
1502 except KeyError:
1503 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1503 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1504 group_name)
1504 group_name)
1505 return False
1505 return False
1506
1506
1507 log.debug('checking `%s` permissions for repo group `%s`',
1507 log.debug('checking `%s` permissions for repo group `%s`',
1508 user_perms, group_name)
1508 user_perms, group_name)
1509 if self.required_perms.issubset(user_perms):
1509 if self.required_perms.issubset(user_perms):
1510 return True
1510 return True
1511 return False
1511 return False
1512
1512
1513
1513
1514 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1514 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1515 """
1515 """
1516 Checks for access permission for any of given predicates for specific
1516 Checks for access permission for any of given predicates for specific
1517 repository group. In order to fulfill the request any
1517 repository group. In order to fulfill the request any
1518 of predicates must be met
1518 of predicates must be met
1519 """
1519 """
1520 def _get_repo_group_name(self):
1520 def _get_repo_group_name(self):
1521 _request = self._get_request()
1521 _request = self._get_request()
1522 return get_repo_group_slug(_request)
1522 return get_repo_group_slug(_request)
1523
1523
1524 def check_permissions(self, user):
1524 def check_permissions(self, user):
1525 perms = user.permissions
1525 perms = user.permissions
1526 group_name = self._get_repo_group_name()
1526 group_name = self._get_repo_group_name()
1527
1527
1528 try:
1528 try:
1529 user_perms = set([perms['repositories_groups'][group_name]])
1529 user_perms = set([perms['repositories_groups'][group_name]])
1530 except KeyError:
1530 except KeyError:
1531 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1531 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1532 group_name)
1532 group_name)
1533 return False
1533 return False
1534
1534
1535 log.debug('checking `%s` permissions for repo group `%s`',
1535 log.debug('checking `%s` permissions for repo group `%s`',
1536 user_perms, group_name)
1536 user_perms, group_name)
1537 if self.required_perms.intersection(user_perms):
1537 if self.required_perms.intersection(user_perms):
1538 return True
1538 return True
1539 return False
1539 return False
1540
1540
1541
1541
1542 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1542 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1543 """
1543 """
1544 Checks for access permission for all given predicates for specific
1544 Checks for access permission for all given predicates for specific
1545 user group. All of them have to be meet in order to fulfill the request
1545 user group. All of them have to be meet in order to fulfill the request
1546 """
1546 """
1547 def _get_user_group_name(self):
1547 def _get_user_group_name(self):
1548 _request = self._get_request()
1548 _request = self._get_request()
1549 return get_user_group_slug(_request)
1549 return get_user_group_slug(_request)
1550
1550
1551 def check_permissions(self, user):
1551 def check_permissions(self, user):
1552 perms = user.permissions
1552 perms = user.permissions
1553 group_name = self._get_user_group_name()
1553 group_name = self._get_user_group_name()
1554 try:
1554 try:
1555 user_perms = set([perms['user_groups'][group_name]])
1555 user_perms = set([perms['user_groups'][group_name]])
1556 except KeyError:
1556 except KeyError:
1557 return False
1557 return False
1558
1558
1559 if self.required_perms.issubset(user_perms):
1559 if self.required_perms.issubset(user_perms):
1560 return True
1560 return True
1561 return False
1561 return False
1562
1562
1563
1563
1564 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1564 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1565 """
1565 """
1566 Checks for access permission for any of given predicates for specific
1566 Checks for access permission for any of given predicates for specific
1567 user group. In order to fulfill the request any of predicates must be meet
1567 user group. In order to fulfill the request any of predicates must be meet
1568 """
1568 """
1569 def _get_user_group_name(self):
1569 def _get_user_group_name(self):
1570 _request = self._get_request()
1570 _request = self._get_request()
1571 return get_user_group_slug(_request)
1571 return get_user_group_slug(_request)
1572
1572
1573 def check_permissions(self, user):
1573 def check_permissions(self, user):
1574 perms = user.permissions
1574 perms = user.permissions
1575 group_name = self._get_user_group_name()
1575 group_name = self._get_user_group_name()
1576 try:
1576 try:
1577 user_perms = set([perms['user_groups'][group_name]])
1577 user_perms = set([perms['user_groups'][group_name]])
1578 except KeyError:
1578 except KeyError:
1579 return False
1579 return False
1580
1580
1581 if self.required_perms.intersection(user_perms):
1581 if self.required_perms.intersection(user_perms):
1582 return True
1582 return True
1583 return False
1583 return False
1584
1584
1585
1585
1586 # CHECK FUNCTIONS
1586 # CHECK FUNCTIONS
1587 class PermsFunction(object):
1587 class PermsFunction(object):
1588 """Base function for other check functions"""
1588 """Base function for other check functions"""
1589
1589
1590 def __init__(self, *perms):
1590 def __init__(self, *perms):
1591 self.required_perms = set(perms)
1591 self.required_perms = set(perms)
1592 self.repo_name = None
1592 self.repo_name = None
1593 self.repo_group_name = None
1593 self.repo_group_name = None
1594 self.user_group_name = None
1594 self.user_group_name = None
1595
1595
1596 def __bool__(self):
1596 def __bool__(self):
1597 frame = inspect.currentframe()
1597 frame = inspect.currentframe()
1598 stack_trace = traceback.format_stack(frame)
1598 stack_trace = traceback.format_stack(frame)
1599 log.error('Checking bool value on a class instance of perm '
1599 log.error('Checking bool value on a class instance of perm '
1600 'function is not allowed: %s' % ''.join(stack_trace))
1600 'function is not allowed: %s' % ''.join(stack_trace))
1601 # rather than throwing errors, here we always return False so if by
1601 # rather than throwing errors, here we always return False so if by
1602 # accident someone checks truth for just an instance it will always end
1602 # accident someone checks truth for just an instance it will always end
1603 # up in returning False
1603 # up in returning False
1604 return False
1604 return False
1605 __nonzero__ = __bool__
1605 __nonzero__ = __bool__
1606
1606
1607 def __call__(self, check_location='', user=None):
1607 def __call__(self, check_location='', user=None):
1608 if not user:
1608 if not user:
1609 log.debug('Using user attribute from global request')
1609 log.debug('Using user attribute from global request')
1610 # TODO: remove this someday,put as user as attribute here
1610 # TODO: remove this someday,put as user as attribute here
1611 request = self._get_request()
1611 request = self._get_request()
1612 user = request.user
1612 user = request.user
1613
1613
1614 # init auth user if not already given
1614 # init auth user if not already given
1615 if not isinstance(user, AuthUser):
1615 if not isinstance(user, AuthUser):
1616 log.debug('Wrapping user %s into AuthUser', user)
1616 log.debug('Wrapping user %s into AuthUser', user)
1617 user = AuthUser(user.user_id)
1617 user = AuthUser(user.user_id)
1618
1618
1619 cls_name = self.__class__.__name__
1619 cls_name = self.__class__.__name__
1620 check_scope = self._get_check_scope(cls_name)
1620 check_scope = self._get_check_scope(cls_name)
1621 check_location = check_location or 'unspecified location'
1621 check_location = check_location or 'unspecified location'
1622
1622
1623 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1623 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1624 self.required_perms, user, check_scope, check_location)
1624 self.required_perms, user, check_scope, check_location)
1625 if not user:
1625 if not user:
1626 log.warning('Empty user given for permission check')
1626 log.warning('Empty user given for permission check')
1627 return False
1627 return False
1628
1628
1629 if self.check_permissions(user):
1629 if self.check_permissions(user):
1630 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1630 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1631 check_scope, user, check_location)
1631 check_scope, user, check_location)
1632 return True
1632 return True
1633
1633
1634 else:
1634 else:
1635 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1635 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1636 check_scope, user, check_location)
1636 check_scope, user, check_location)
1637 return False
1637 return False
1638
1638
1639 def _get_request(self):
1639 def _get_request(self):
1640 from pyramid.threadlocal import get_current_request
1640 from pyramid.threadlocal import get_current_request
1641 pyramid_request = get_current_request()
1641 pyramid_request = get_current_request()
1642 if not pyramid_request:
1642 if not pyramid_request:
1643 # return global request of pylons incase pyramid one isn't available
1643 # return global request of pylons incase pyramid one isn't available
1644 return request
1644 return request
1645 return pyramid_request
1645 return pyramid_request
1646
1646
1647 def _get_check_scope(self, cls_name):
1647 def _get_check_scope(self, cls_name):
1648 return {
1648 return {
1649 'HasPermissionAll': 'GLOBAL',
1649 'HasPermissionAll': 'GLOBAL',
1650 'HasPermissionAny': 'GLOBAL',
1650 'HasPermissionAny': 'GLOBAL',
1651 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1651 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1652 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1652 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1653 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1653 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1654 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1654 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1655 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1655 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1656 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1656 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1657 }.get(cls_name, '?:%s' % cls_name)
1657 }.get(cls_name, '?:%s' % cls_name)
1658
1658
1659 def check_permissions(self, user):
1659 def check_permissions(self, user):
1660 """Dummy function for overriding"""
1660 """Dummy function for overriding"""
1661 raise Exception('You have to write this function in child class')
1661 raise Exception('You have to write this function in child class')
1662
1662
1663
1663
1664 class HasPermissionAll(PermsFunction):
1664 class HasPermissionAll(PermsFunction):
1665 def check_permissions(self, user):
1665 def check_permissions(self, user):
1666 perms = user.permissions_with_scope({})
1666 perms = user.permissions_with_scope({})
1667 if self.required_perms.issubset(perms.get('global')):
1667 if self.required_perms.issubset(perms.get('global')):
1668 return True
1668 return True
1669 return False
1669 return False
1670
1670
1671
1671
1672 class HasPermissionAny(PermsFunction):
1672 class HasPermissionAny(PermsFunction):
1673 def check_permissions(self, user):
1673 def check_permissions(self, user):
1674 perms = user.permissions_with_scope({})
1674 perms = user.permissions_with_scope({})
1675 if self.required_perms.intersection(perms.get('global')):
1675 if self.required_perms.intersection(perms.get('global')):
1676 return True
1676 return True
1677 return False
1677 return False
1678
1678
1679
1679
1680 class HasRepoPermissionAll(PermsFunction):
1680 class HasRepoPermissionAll(PermsFunction):
1681 def __call__(self, repo_name=None, check_location='', user=None):
1681 def __call__(self, repo_name=None, check_location='', user=None):
1682 self.repo_name = repo_name
1682 self.repo_name = repo_name
1683 return super(HasRepoPermissionAll, self).__call__(check_location, user)
1683 return super(HasRepoPermissionAll, self).__call__(check_location, user)
1684
1684
1685 def _get_repo_name(self):
1685 def _get_repo_name(self):
1686 if not self.repo_name:
1686 if not self.repo_name:
1687 _request = self._get_request()
1687 _request = self._get_request()
1688 self.repo_name = get_repo_slug(_request)
1688 self.repo_name = get_repo_slug(_request)
1689 return self.repo_name
1689 return self.repo_name
1690
1690
1691 def check_permissions(self, user):
1691 def check_permissions(self, user):
1692 self.repo_name = self._get_repo_name()
1692 self.repo_name = self._get_repo_name()
1693 perms = user.permissions
1693 perms = user.permissions
1694 try:
1694 try:
1695 user_perms = set([perms['repositories'][self.repo_name]])
1695 user_perms = set([perms['repositories'][self.repo_name]])
1696 except KeyError:
1696 except KeyError:
1697 return False
1697 return False
1698 if self.required_perms.issubset(user_perms):
1698 if self.required_perms.issubset(user_perms):
1699 return True
1699 return True
1700 return False
1700 return False
1701
1701
1702
1702
1703 class HasRepoPermissionAny(PermsFunction):
1703 class HasRepoPermissionAny(PermsFunction):
1704 def __call__(self, repo_name=None, check_location='', user=None):
1704 def __call__(self, repo_name=None, check_location='', user=None):
1705 self.repo_name = repo_name
1705 self.repo_name = repo_name
1706 return super(HasRepoPermissionAny, self).__call__(check_location, user)
1706 return super(HasRepoPermissionAny, self).__call__(check_location, user)
1707
1707
1708 def _get_repo_name(self):
1708 def _get_repo_name(self):
1709 if not self.repo_name:
1709 if not self.repo_name:
1710 self.repo_name = get_repo_slug(request)
1710 self.repo_name = get_repo_slug(request)
1711 return self.repo_name
1711 return self.repo_name
1712
1712
1713 def check_permissions(self, user):
1713 def check_permissions(self, user):
1714 self.repo_name = self._get_repo_name()
1714 self.repo_name = self._get_repo_name()
1715 perms = user.permissions
1715 perms = user.permissions
1716 try:
1716 try:
1717 user_perms = set([perms['repositories'][self.repo_name]])
1717 user_perms = set([perms['repositories'][self.repo_name]])
1718 except KeyError:
1718 except KeyError:
1719 return False
1719 return False
1720 if self.required_perms.intersection(user_perms):
1720 if self.required_perms.intersection(user_perms):
1721 return True
1721 return True
1722 return False
1722 return False
1723
1723
1724
1724
1725 class HasRepoGroupPermissionAny(PermsFunction):
1725 class HasRepoGroupPermissionAny(PermsFunction):
1726 def __call__(self, group_name=None, check_location='', user=None):
1726 def __call__(self, group_name=None, check_location='', user=None):
1727 self.repo_group_name = group_name
1727 self.repo_group_name = group_name
1728 return super(HasRepoGroupPermissionAny, self).__call__(
1728 return super(HasRepoGroupPermissionAny, self).__call__(
1729 check_location, user)
1729 check_location, user)
1730
1730
1731 def check_permissions(self, user):
1731 def check_permissions(self, user):
1732 perms = user.permissions
1732 perms = user.permissions
1733 try:
1733 try:
1734 user_perms = set(
1734 user_perms = set(
1735 [perms['repositories_groups'][self.repo_group_name]])
1735 [perms['repositories_groups'][self.repo_group_name]])
1736 except KeyError:
1736 except KeyError:
1737 return False
1737 return False
1738 if self.required_perms.intersection(user_perms):
1738 if self.required_perms.intersection(user_perms):
1739 return True
1739 return True
1740 return False
1740 return False
1741
1741
1742
1742
1743 class HasRepoGroupPermissionAll(PermsFunction):
1743 class HasRepoGroupPermissionAll(PermsFunction):
1744 def __call__(self, group_name=None, check_location='', user=None):
1744 def __call__(self, group_name=None, check_location='', user=None):
1745 self.repo_group_name = group_name
1745 self.repo_group_name = group_name
1746 return super(HasRepoGroupPermissionAll, self).__call__(
1746 return super(HasRepoGroupPermissionAll, self).__call__(
1747 check_location, user)
1747 check_location, user)
1748
1748
1749 def check_permissions(self, user):
1749 def check_permissions(self, user):
1750 perms = user.permissions
1750 perms = user.permissions
1751 try:
1751 try:
1752 user_perms = set(
1752 user_perms = set(
1753 [perms['repositories_groups'][self.repo_group_name]])
1753 [perms['repositories_groups'][self.repo_group_name]])
1754 except KeyError:
1754 except KeyError:
1755 return False
1755 return False
1756 if self.required_perms.issubset(user_perms):
1756 if self.required_perms.issubset(user_perms):
1757 return True
1757 return True
1758 return False
1758 return False
1759
1759
1760
1760
1761 class HasUserGroupPermissionAny(PermsFunction):
1761 class HasUserGroupPermissionAny(PermsFunction):
1762 def __call__(self, user_group_name=None, check_location='', user=None):
1762 def __call__(self, user_group_name=None, check_location='', user=None):
1763 self.user_group_name = user_group_name
1763 self.user_group_name = user_group_name
1764 return super(HasUserGroupPermissionAny, self).__call__(
1764 return super(HasUserGroupPermissionAny, self).__call__(
1765 check_location, user)
1765 check_location, user)
1766
1766
1767 def check_permissions(self, user):
1767 def check_permissions(self, user):
1768 perms = user.permissions
1768 perms = user.permissions
1769 try:
1769 try:
1770 user_perms = set([perms['user_groups'][self.user_group_name]])
1770 user_perms = set([perms['user_groups'][self.user_group_name]])
1771 except KeyError:
1771 except KeyError:
1772 return False
1772 return False
1773 if self.required_perms.intersection(user_perms):
1773 if self.required_perms.intersection(user_perms):
1774 return True
1774 return True
1775 return False
1775 return False
1776
1776
1777
1777
1778 class HasUserGroupPermissionAll(PermsFunction):
1778 class HasUserGroupPermissionAll(PermsFunction):
1779 def __call__(self, user_group_name=None, check_location='', user=None):
1779 def __call__(self, user_group_name=None, check_location='', user=None):
1780 self.user_group_name = user_group_name
1780 self.user_group_name = user_group_name
1781 return super(HasUserGroupPermissionAll, self).__call__(
1781 return super(HasUserGroupPermissionAll, self).__call__(
1782 check_location, user)
1782 check_location, user)
1783
1783
1784 def check_permissions(self, user):
1784 def check_permissions(self, user):
1785 perms = user.permissions
1785 perms = user.permissions
1786 try:
1786 try:
1787 user_perms = set([perms['user_groups'][self.user_group_name]])
1787 user_perms = set([perms['user_groups'][self.user_group_name]])
1788 except KeyError:
1788 except KeyError:
1789 return False
1789 return False
1790 if self.required_perms.issubset(user_perms):
1790 if self.required_perms.issubset(user_perms):
1791 return True
1791 return True
1792 return False
1792 return False
1793
1793
1794
1794
1795 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
1795 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
1796 class HasPermissionAnyMiddleware(object):
1796 class HasPermissionAnyMiddleware(object):
1797 def __init__(self, *perms):
1797 def __init__(self, *perms):
1798 self.required_perms = set(perms)
1798 self.required_perms = set(perms)
1799
1799
1800 def __call__(self, user, repo_name):
1800 def __call__(self, user, repo_name):
1801 # repo_name MUST be unicode, since we handle keys in permission
1801 # repo_name MUST be unicode, since we handle keys in permission
1802 # dict by unicode
1802 # dict by unicode
1803 repo_name = safe_unicode(repo_name)
1803 repo_name = safe_unicode(repo_name)
1804 user = AuthUser(user.user_id)
1804 user = AuthUser(user.user_id)
1805 log.debug(
1805 log.debug(
1806 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
1806 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
1807 self.required_perms, user, repo_name)
1807 self.required_perms, user, repo_name)
1808
1808
1809 if self.check_permissions(user, repo_name):
1809 if self.check_permissions(user, repo_name):
1810 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
1810 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
1811 repo_name, user, 'PermissionMiddleware')
1811 repo_name, user, 'PermissionMiddleware')
1812 return True
1812 return True
1813
1813
1814 else:
1814 else:
1815 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
1815 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
1816 repo_name, user, 'PermissionMiddleware')
1816 repo_name, user, 'PermissionMiddleware')
1817 return False
1817 return False
1818
1818
1819 def check_permissions(self, user, repo_name):
1819 def check_permissions(self, user, repo_name):
1820 perms = user.permissions_with_scope({'repo_name': repo_name})
1820 perms = user.permissions_with_scope({'repo_name': repo_name})
1821
1821
1822 try:
1822 try:
1823 user_perms = set([perms['repositories'][repo_name]])
1823 user_perms = set([perms['repositories'][repo_name]])
1824 except Exception:
1824 except Exception:
1825 log.exception('Error while accessing user permissions')
1825 log.exception('Error while accessing user permissions')
1826 return False
1826 return False
1827
1827
1828 if self.required_perms.intersection(user_perms):
1828 if self.required_perms.intersection(user_perms):
1829 return True
1829 return True
1830 return False
1830 return False
1831
1831
1832
1832
1833 # SPECIAL VERSION TO HANDLE API AUTH
1833 # SPECIAL VERSION TO HANDLE API AUTH
1834 class _BaseApiPerm(object):
1834 class _BaseApiPerm(object):
1835 def __init__(self, *perms):
1835 def __init__(self, *perms):
1836 self.required_perms = set(perms)
1836 self.required_perms = set(perms)
1837
1837
1838 def __call__(self, check_location=None, user=None, repo_name=None,
1838 def __call__(self, check_location=None, user=None, repo_name=None,
1839 group_name=None, user_group_name=None):
1839 group_name=None, user_group_name=None):
1840 cls_name = self.__class__.__name__
1840 cls_name = self.__class__.__name__
1841 check_scope = 'global:%s' % (self.required_perms,)
1841 check_scope = 'global:%s' % (self.required_perms,)
1842 if repo_name:
1842 if repo_name:
1843 check_scope += ', repo_name:%s' % (repo_name,)
1843 check_scope += ', repo_name:%s' % (repo_name,)
1844
1844
1845 if group_name:
1845 if group_name:
1846 check_scope += ', repo_group_name:%s' % (group_name,)
1846 check_scope += ', repo_group_name:%s' % (group_name,)
1847
1847
1848 if user_group_name:
1848 if user_group_name:
1849 check_scope += ', user_group_name:%s' % (user_group_name,)
1849 check_scope += ', user_group_name:%s' % (user_group_name,)
1850
1850
1851 log.debug(
1851 log.debug(
1852 'checking cls:%s %s %s @ %s'
1852 'checking cls:%s %s %s @ %s'
1853 % (cls_name, self.required_perms, check_scope, check_location))
1853 % (cls_name, self.required_perms, check_scope, check_location))
1854 if not user:
1854 if not user:
1855 log.debug('Empty User passed into arguments')
1855 log.debug('Empty User passed into arguments')
1856 return False
1856 return False
1857
1857
1858 # process user
1858 # process user
1859 if not isinstance(user, AuthUser):
1859 if not isinstance(user, AuthUser):
1860 user = AuthUser(user.user_id)
1860 user = AuthUser(user.user_id)
1861 if not check_location:
1861 if not check_location:
1862 check_location = 'unspecified'
1862 check_location = 'unspecified'
1863 if self.check_permissions(user.permissions, repo_name, group_name,
1863 if self.check_permissions(user.permissions, repo_name, group_name,
1864 user_group_name):
1864 user_group_name):
1865 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1865 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1866 check_scope, user, check_location)
1866 check_scope, user, check_location)
1867 return True
1867 return True
1868
1868
1869 else:
1869 else:
1870 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1870 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1871 check_scope, user, check_location)
1871 check_scope, user, check_location)
1872 return False
1872 return False
1873
1873
1874 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1874 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1875 user_group_name=None):
1875 user_group_name=None):
1876 """
1876 """
1877 implement in child class should return True if permissions are ok,
1877 implement in child class should return True if permissions are ok,
1878 False otherwise
1878 False otherwise
1879
1879
1880 :param perm_defs: dict with permission definitions
1880 :param perm_defs: dict with permission definitions
1881 :param repo_name: repo name
1881 :param repo_name: repo name
1882 """
1882 """
1883 raise NotImplementedError()
1883 raise NotImplementedError()
1884
1884
1885
1885
1886 class HasPermissionAllApi(_BaseApiPerm):
1886 class HasPermissionAllApi(_BaseApiPerm):
1887 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1887 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1888 user_group_name=None):
1888 user_group_name=None):
1889 if self.required_perms.issubset(perm_defs.get('global')):
1889 if self.required_perms.issubset(perm_defs.get('global')):
1890 return True
1890 return True
1891 return False
1891 return False
1892
1892
1893
1893
1894 class HasPermissionAnyApi(_BaseApiPerm):
1894 class HasPermissionAnyApi(_BaseApiPerm):
1895 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1895 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1896 user_group_name=None):
1896 user_group_name=None):
1897 if self.required_perms.intersection(perm_defs.get('global')):
1897 if self.required_perms.intersection(perm_defs.get('global')):
1898 return True
1898 return True
1899 return False
1899 return False
1900
1900
1901
1901
1902 class HasRepoPermissionAllApi(_BaseApiPerm):
1902 class HasRepoPermissionAllApi(_BaseApiPerm):
1903 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1903 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1904 user_group_name=None):
1904 user_group_name=None):
1905 try:
1905 try:
1906 _user_perms = set([perm_defs['repositories'][repo_name]])
1906 _user_perms = set([perm_defs['repositories'][repo_name]])
1907 except KeyError:
1907 except KeyError:
1908 log.warning(traceback.format_exc())
1908 log.warning(traceback.format_exc())
1909 return False
1909 return False
1910 if self.required_perms.issubset(_user_perms):
1910 if self.required_perms.issubset(_user_perms):
1911 return True
1911 return True
1912 return False
1912 return False
1913
1913
1914
1914
1915 class HasRepoPermissionAnyApi(_BaseApiPerm):
1915 class HasRepoPermissionAnyApi(_BaseApiPerm):
1916 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1916 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1917 user_group_name=None):
1917 user_group_name=None):
1918 try:
1918 try:
1919 _user_perms = set([perm_defs['repositories'][repo_name]])
1919 _user_perms = set([perm_defs['repositories'][repo_name]])
1920 except KeyError:
1920 except KeyError:
1921 log.warning(traceback.format_exc())
1921 log.warning(traceback.format_exc())
1922 return False
1922 return False
1923 if self.required_perms.intersection(_user_perms):
1923 if self.required_perms.intersection(_user_perms):
1924 return True
1924 return True
1925 return False
1925 return False
1926
1926
1927
1927
1928 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
1928 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
1929 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1929 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1930 user_group_name=None):
1930 user_group_name=None):
1931 try:
1931 try:
1932 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1932 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1933 except KeyError:
1933 except KeyError:
1934 log.warning(traceback.format_exc())
1934 log.warning(traceback.format_exc())
1935 return False
1935 return False
1936 if self.required_perms.intersection(_user_perms):
1936 if self.required_perms.intersection(_user_perms):
1937 return True
1937 return True
1938 return False
1938 return False
1939
1939
1940
1940
1941 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
1941 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
1942 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1942 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1943 user_group_name=None):
1943 user_group_name=None):
1944 try:
1944 try:
1945 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1945 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1946 except KeyError:
1946 except KeyError:
1947 log.warning(traceback.format_exc())
1947 log.warning(traceback.format_exc())
1948 return False
1948 return False
1949 if self.required_perms.issubset(_user_perms):
1949 if self.required_perms.issubset(_user_perms):
1950 return True
1950 return True
1951 return False
1951 return False
1952
1952
1953
1953
1954 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
1954 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
1955 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1955 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1956 user_group_name=None):
1956 user_group_name=None):
1957 try:
1957 try:
1958 _user_perms = set([perm_defs['user_groups'][user_group_name]])
1958 _user_perms = set([perm_defs['user_groups'][user_group_name]])
1959 except KeyError:
1959 except KeyError:
1960 log.warning(traceback.format_exc())
1960 log.warning(traceback.format_exc())
1961 return False
1961 return False
1962 if self.required_perms.intersection(_user_perms):
1962 if self.required_perms.intersection(_user_perms):
1963 return True
1963 return True
1964 return False
1964 return False
1965
1965
1966
1966
1967 def check_ip_access(source_ip, allowed_ips=None):
1967 def check_ip_access(source_ip, allowed_ips=None):
1968 """
1968 """
1969 Checks if source_ip is a subnet of any of allowed_ips.
1969 Checks if source_ip is a subnet of any of allowed_ips.
1970
1970
1971 :param source_ip:
1971 :param source_ip:
1972 :param allowed_ips: list of allowed ips together with mask
1972 :param allowed_ips: list of allowed ips together with mask
1973 """
1973 """
1974 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
1974 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
1975 source_ip_address = ipaddress.ip_address(source_ip)
1975 source_ip_address = ipaddress.ip_address(source_ip)
1976 if isinstance(allowed_ips, (tuple, list, set)):
1976 if isinstance(allowed_ips, (tuple, list, set)):
1977 for ip in allowed_ips:
1977 for ip in allowed_ips:
1978 try:
1978 try:
1979 network_address = ipaddress.ip_network(ip, strict=False)
1979 network_address = ipaddress.ip_network(ip, strict=False)
1980 if source_ip_address in network_address:
1980 if source_ip_address in network_address:
1981 log.debug('IP %s is network %s' %
1981 log.debug('IP %s is network %s' %
1982 (source_ip_address, network_address))
1982 (source_ip_address, network_address))
1983 return True
1983 return True
1984 # for any case we cannot determine the IP, don't crash just
1984 # for any case we cannot determine the IP, don't crash just
1985 # skip it and log as error, we want to say forbidden still when
1985 # skip it and log as error, we want to say forbidden still when
1986 # sending bad IP
1986 # sending bad IP
1987 except Exception:
1987 except Exception:
1988 log.error(traceback.format_exc())
1988 log.error(traceback.format_exc())
1989 continue
1989 continue
1990 return False
1990 return False
1991
1991
1992
1992
1993 def get_cython_compat_decorator(wrapper, func):
1993 def get_cython_compat_decorator(wrapper, func):
1994 """
1994 """
1995 Creates a cython compatible decorator. The previously used
1995 Creates a cython compatible decorator. The previously used
1996 decorator.decorator() function seems to be incompatible with cython.
1996 decorator.decorator() function seems to be incompatible with cython.
1997
1997
1998 :param wrapper: __wrapper method of the decorator class
1998 :param wrapper: __wrapper method of the decorator class
1999 :param func: decorated function
1999 :param func: decorated function
2000 """
2000 """
2001 @wraps(func)
2001 @wraps(func)
2002 def local_wrapper(*args, **kwds):
2002 def local_wrapper(*args, **kwds):
2003 return wrapper(func, *args, **kwds)
2003 return wrapper(func, *args, **kwds)
2004 local_wrapper.__wrapped__ = func
2004 local_wrapper.__wrapped__ = func
2005 return local_wrapper
2005 return local_wrapper
2006
2006
2007
2007
@@ -1,592 +1,592 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 The base Controller API
22 The base Controller API
23 Provides the BaseController class for subclassing. And usage in different
23 Provides the BaseController class for subclassing. And usage in different
24 controllers
24 controllers
25 """
25 """
26
26
27 import logging
27 import logging
28 import socket
28 import socket
29
29
30 import ipaddress
30 import ipaddress
31 import pyramid.threadlocal
31 import pyramid.threadlocal
32
32
33 from paste.auth.basic import AuthBasicAuthenticator
33 from paste.auth.basic import AuthBasicAuthenticator
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
36 from pylons import config, tmpl_context as c, request, session, url
36 from pylons import config, tmpl_context as c, request, session, url
37 from pylons.controllers import WSGIController
37 from pylons.controllers import WSGIController
38 from pylons.controllers.util import redirect
38 from pylons.controllers.util import redirect
39 from pylons.i18n import translation
39 from pylons.i18n import translation
40 # marcink: don't remove this import
40 # marcink: don't remove this import
41 from pylons.templating import render_mako as render # noqa
41 from pylons.templating import render_mako as render # noqa
42 from pylons.i18n.translation import _
42 from pylons.i18n.translation import _
43 from webob.exc import HTTPFound
43 from webob.exc import HTTPFound
44
44
45
45
46 import rhodecode
46 import rhodecode
47 from rhodecode.authentication.base import VCS_TYPE
47 from rhodecode.authentication.base import VCS_TYPE
48 from rhodecode.lib import auth, utils2
48 from rhodecode.lib import auth, utils2
49 from rhodecode.lib import helpers as h
49 from rhodecode.lib import helpers as h
50 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
50 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
51 from rhodecode.lib.exceptions import UserCreationError
51 from rhodecode.lib.exceptions import UserCreationError
52 from rhodecode.lib.utils import (
52 from rhodecode.lib.utils import (
53 get_repo_slug, set_rhodecode_config, password_changed,
53 get_repo_slug, set_rhodecode_config, password_changed,
54 get_enabled_hook_classes)
54 get_enabled_hook_classes)
55 from rhodecode.lib.utils2 import (
55 from rhodecode.lib.utils2 import (
56 str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist)
56 str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist)
57 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
57 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
58 from rhodecode.model import meta
58 from rhodecode.model import meta
59 from rhodecode.model.db import Repository, User, ChangesetComment
59 from rhodecode.model.db import Repository, User, ChangesetComment
60 from rhodecode.model.notification import NotificationModel
60 from rhodecode.model.notification import NotificationModel
61 from rhodecode.model.scm import ScmModel
61 from rhodecode.model.scm import ScmModel
62 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
62 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
63
63
64
64
65 log = logging.getLogger(__name__)
65 log = logging.getLogger(__name__)
66
66
67
67
68 def _filter_proxy(ip):
68 def _filter_proxy(ip):
69 """
69 """
70 Passed in IP addresses in HEADERS can be in a special format of multiple
70 Passed in IP addresses in HEADERS can be in a special format of multiple
71 ips. Those comma separated IPs are passed from various proxies in the
71 ips. Those comma separated IPs are passed from various proxies in the
72 chain of request processing. The left-most being the original client.
72 chain of request processing. The left-most being the original client.
73 We only care about the first IP which came from the org. client.
73 We only care about the first IP which came from the org. client.
74
74
75 :param ip: ip string from headers
75 :param ip: ip string from headers
76 """
76 """
77 if ',' in ip:
77 if ',' in ip:
78 _ips = ip.split(',')
78 _ips = ip.split(',')
79 _first_ip = _ips[0].strip()
79 _first_ip = _ips[0].strip()
80 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
80 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
81 return _first_ip
81 return _first_ip
82 return ip
82 return ip
83
83
84
84
85 def _filter_port(ip):
85 def _filter_port(ip):
86 """
86 """
87 Removes a port from ip, there are 4 main cases to handle here.
87 Removes a port from ip, there are 4 main cases to handle here.
88 - ipv4 eg. 127.0.0.1
88 - ipv4 eg. 127.0.0.1
89 - ipv6 eg. ::1
89 - ipv6 eg. ::1
90 - ipv4+port eg. 127.0.0.1:8080
90 - ipv4+port eg. 127.0.0.1:8080
91 - ipv6+port eg. [::1]:8080
91 - ipv6+port eg. [::1]:8080
92
92
93 :param ip:
93 :param ip:
94 """
94 """
95 def is_ipv6(ip_addr):
95 def is_ipv6(ip_addr):
96 if hasattr(socket, 'inet_pton'):
96 if hasattr(socket, 'inet_pton'):
97 try:
97 try:
98 socket.inet_pton(socket.AF_INET6, ip_addr)
98 socket.inet_pton(socket.AF_INET6, ip_addr)
99 except socket.error:
99 except socket.error:
100 return False
100 return False
101 else:
101 else:
102 # fallback to ipaddress
102 # fallback to ipaddress
103 try:
103 try:
104 ipaddress.IPv6Address(ip_addr)
104 ipaddress.IPv6Address(ip_addr)
105 except Exception:
105 except Exception:
106 return False
106 return False
107 return True
107 return True
108
108
109 if ':' not in ip: # must be ipv4 pure ip
109 if ':' not in ip: # must be ipv4 pure ip
110 return ip
110 return ip
111
111
112 if '[' in ip and ']' in ip: # ipv6 with port
112 if '[' in ip and ']' in ip: # ipv6 with port
113 return ip.split(']')[0][1:].lower()
113 return ip.split(']')[0][1:].lower()
114
114
115 # must be ipv6 or ipv4 with port
115 # must be ipv6 or ipv4 with port
116 if is_ipv6(ip):
116 if is_ipv6(ip):
117 return ip
117 return ip
118 else:
118 else:
119 ip, _port = ip.split(':')[:2] # means ipv4+port
119 ip, _port = ip.split(':')[:2] # means ipv4+port
120 return ip
120 return ip
121
121
122
122
123 def get_ip_addr(environ):
123 def get_ip_addr(environ):
124 proxy_key = 'HTTP_X_REAL_IP'
124 proxy_key = 'HTTP_X_REAL_IP'
125 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
125 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
126 def_key = 'REMOTE_ADDR'
126 def_key = 'REMOTE_ADDR'
127 _filters = lambda x: _filter_port(_filter_proxy(x))
127 _filters = lambda x: _filter_port(_filter_proxy(x))
128
128
129 ip = environ.get(proxy_key)
129 ip = environ.get(proxy_key)
130 if ip:
130 if ip:
131 return _filters(ip)
131 return _filters(ip)
132
132
133 ip = environ.get(proxy_key2)
133 ip = environ.get(proxy_key2)
134 if ip:
134 if ip:
135 return _filters(ip)
135 return _filters(ip)
136
136
137 ip = environ.get(def_key, '0.0.0.0')
137 ip = environ.get(def_key, '0.0.0.0')
138 return _filters(ip)
138 return _filters(ip)
139
139
140
140
141 def get_server_ip_addr(environ, log_errors=True):
141 def get_server_ip_addr(environ, log_errors=True):
142 hostname = environ.get('SERVER_NAME')
142 hostname = environ.get('SERVER_NAME')
143 try:
143 try:
144 return socket.gethostbyname(hostname)
144 return socket.gethostbyname(hostname)
145 except Exception as e:
145 except Exception as e:
146 if log_errors:
146 if log_errors:
147 # in some cases this lookup is not possible, and we don't want to
147 # in some cases this lookup is not possible, and we don't want to
148 # make it an exception in logs
148 # make it an exception in logs
149 log.exception('Could not retrieve server ip address: %s', e)
149 log.exception('Could not retrieve server ip address: %s', e)
150 return hostname
150 return hostname
151
151
152
152
153 def get_server_port(environ):
153 def get_server_port(environ):
154 return environ.get('SERVER_PORT')
154 return environ.get('SERVER_PORT')
155
155
156
156
157 def get_access_path(environ):
157 def get_access_path(environ):
158 path = environ.get('PATH_INFO')
158 path = environ.get('PATH_INFO')
159 org_req = environ.get('pylons.original_request')
159 org_req = environ.get('pylons.original_request')
160 if org_req:
160 if org_req:
161 path = org_req.environ.get('PATH_INFO')
161 path = org_req.environ.get('PATH_INFO')
162 return path
162 return path
163
163
164
164
165 def get_user_agent(environ):
165 def get_user_agent(environ):
166 return environ.get('HTTP_USER_AGENT')
166 return environ.get('HTTP_USER_AGENT')
167
167
168
168
169 def vcs_operation_context(
169 def vcs_operation_context(
170 environ, repo_name, username, action, scm, check_locking=True,
170 environ, repo_name, username, action, scm, check_locking=True,
171 is_shadow_repo=False):
171 is_shadow_repo=False):
172 """
172 """
173 Generate the context for a vcs operation, e.g. push or pull.
173 Generate the context for a vcs operation, e.g. push or pull.
174
174
175 This context is passed over the layers so that hooks triggered by the
175 This context is passed over the layers so that hooks triggered by the
176 vcs operation know details like the user, the user's IP address etc.
176 vcs operation know details like the user, the user's IP address etc.
177
177
178 :param check_locking: Allows to switch of the computation of the locking
178 :param check_locking: Allows to switch of the computation of the locking
179 data. This serves mainly the need of the simplevcs middleware to be
179 data. This serves mainly the need of the simplevcs middleware to be
180 able to disable this for certain operations.
180 able to disable this for certain operations.
181
181
182 """
182 """
183 # Tri-state value: False: unlock, None: nothing, True: lock
183 # Tri-state value: False: unlock, None: nothing, True: lock
184 make_lock = None
184 make_lock = None
185 locked_by = [None, None, None]
185 locked_by = [None, None, None]
186 is_anonymous = username == User.DEFAULT_USER
186 is_anonymous = username == User.DEFAULT_USER
187 if not is_anonymous and check_locking:
187 if not is_anonymous and check_locking:
188 log.debug('Checking locking on repository "%s"', repo_name)
188 log.debug('Checking locking on repository "%s"', repo_name)
189 user = User.get_by_username(username)
189 user = User.get_by_username(username)
190 repo = Repository.get_by_repo_name(repo_name)
190 repo = Repository.get_by_repo_name(repo_name)
191 make_lock, __, locked_by = repo.get_locking_state(
191 make_lock, __, locked_by = repo.get_locking_state(
192 action, user.user_id)
192 action, user.user_id)
193
193
194 settings_model = VcsSettingsModel(repo=repo_name)
194 settings_model = VcsSettingsModel(repo=repo_name)
195 ui_settings = settings_model.get_ui_settings()
195 ui_settings = settings_model.get_ui_settings()
196
196
197 extras = {
197 extras = {
198 'ip': get_ip_addr(environ),
198 'ip': get_ip_addr(environ),
199 'username': username,
199 'username': username,
200 'action': action,
200 'action': action,
201 'repository': repo_name,
201 'repository': repo_name,
202 'scm': scm,
202 'scm': scm,
203 'config': rhodecode.CONFIG['__file__'],
203 'config': rhodecode.CONFIG['__file__'],
204 'make_lock': make_lock,
204 'make_lock': make_lock,
205 'locked_by': locked_by,
205 'locked_by': locked_by,
206 'server_url': utils2.get_server_url(environ),
206 'server_url': utils2.get_server_url(environ),
207 'user_agent': get_user_agent(environ),
207 'user_agent': get_user_agent(environ),
208 'hooks': get_enabled_hook_classes(ui_settings),
208 'hooks': get_enabled_hook_classes(ui_settings),
209 'is_shadow_repo': is_shadow_repo,
209 'is_shadow_repo': is_shadow_repo,
210 }
210 }
211 return extras
211 return extras
212
212
213
213
214 class BasicAuth(AuthBasicAuthenticator):
214 class BasicAuth(AuthBasicAuthenticator):
215
215
216 def __init__(self, realm, authfunc, registry, auth_http_code=None,
216 def __init__(self, realm, authfunc, registry, auth_http_code=None,
217 initial_call_detection=False, acl_repo_name=None):
217 initial_call_detection=False, acl_repo_name=None):
218 self.realm = realm
218 self.realm = realm
219 self.initial_call = initial_call_detection
219 self.initial_call = initial_call_detection
220 self.authfunc = authfunc
220 self.authfunc = authfunc
221 self.registry = registry
221 self.registry = registry
222 self.acl_repo_name = acl_repo_name
222 self.acl_repo_name = acl_repo_name
223 self._rc_auth_http_code = auth_http_code
223 self._rc_auth_http_code = auth_http_code
224
224
225 def _get_response_from_code(self, http_code):
225 def _get_response_from_code(self, http_code):
226 try:
226 try:
227 return get_exception(safe_int(http_code))
227 return get_exception(safe_int(http_code))
228 except Exception:
228 except Exception:
229 log.exception('Failed to fetch response for code %s' % http_code)
229 log.exception('Failed to fetch response for code %s' % http_code)
230 return HTTPForbidden
230 return HTTPForbidden
231
231
232 def build_authentication(self):
232 def build_authentication(self):
233 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
233 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
234 if self._rc_auth_http_code and not self.initial_call:
234 if self._rc_auth_http_code and not self.initial_call:
235 # return alternative HTTP code if alternative http return code
235 # return alternative HTTP code if alternative http return code
236 # is specified in RhodeCode config, but ONLY if it's not the
236 # is specified in RhodeCode config, but ONLY if it's not the
237 # FIRST call
237 # FIRST call
238 custom_response_klass = self._get_response_from_code(
238 custom_response_klass = self._get_response_from_code(
239 self._rc_auth_http_code)
239 self._rc_auth_http_code)
240 return custom_response_klass(headers=head)
240 return custom_response_klass(headers=head)
241 return HTTPUnauthorized(headers=head)
241 return HTTPUnauthorized(headers=head)
242
242
243 def authenticate(self, environ):
243 def authenticate(self, environ):
244 authorization = AUTHORIZATION(environ)
244 authorization = AUTHORIZATION(environ)
245 if not authorization:
245 if not authorization:
246 return self.build_authentication()
246 return self.build_authentication()
247 (authmeth, auth) = authorization.split(' ', 1)
247 (authmeth, auth) = authorization.split(' ', 1)
248 if 'basic' != authmeth.lower():
248 if 'basic' != authmeth.lower():
249 return self.build_authentication()
249 return self.build_authentication()
250 auth = auth.strip().decode('base64')
250 auth = auth.strip().decode('base64')
251 _parts = auth.split(':', 1)
251 _parts = auth.split(':', 1)
252 if len(_parts) == 2:
252 if len(_parts) == 2:
253 username, password = _parts
253 username, password = _parts
254 if self.authfunc(
254 if self.authfunc(
255 username, password, environ, VCS_TYPE,
255 username, password, environ, VCS_TYPE,
256 registry=self.registry, acl_repo_name=self.acl_repo_name):
256 registry=self.registry, acl_repo_name=self.acl_repo_name):
257 return username
257 return username
258 if username and password:
258 if username and password:
259 # we mark that we actually executed authentication once, at
259 # we mark that we actually executed authentication once, at
260 # that point we can use the alternative auth code
260 # that point we can use the alternative auth code
261 self.initial_call = False
261 self.initial_call = False
262
262
263 return self.build_authentication()
263 return self.build_authentication()
264
264
265 __call__ = authenticate
265 __call__ = authenticate
266
266
267
267
268 def attach_context_attributes(context, request, user_id):
268 def attach_context_attributes(context, request, user_id):
269 """
269 """
270 Attach variables into template context called `c`, please note that
270 Attach variables into template context called `c`, please note that
271 request could be pylons or pyramid request in here.
271 request could be pylons or pyramid request in here.
272 """
272 """
273 rc_config = SettingsModel().get_all_settings(cache=True)
273 rc_config = SettingsModel().get_all_settings(cache=True)
274
274
275 context.rhodecode_version = rhodecode.__version__
275 context.rhodecode_version = rhodecode.__version__
276 context.rhodecode_edition = config.get('rhodecode.edition')
276 context.rhodecode_edition = config.get('rhodecode.edition')
277 # unique secret + version does not leak the version but keep consistency
277 # unique secret + version does not leak the version but keep consistency
278 context.rhodecode_version_hash = md5(
278 context.rhodecode_version_hash = md5(
279 config.get('beaker.session.secret', '') +
279 config.get('beaker.session.secret', '') +
280 rhodecode.__version__)[:8]
280 rhodecode.__version__)[:8]
281
281
282 # Default language set for the incoming request
282 # Default language set for the incoming request
283 context.language = translation.get_lang()[0]
283 context.language = translation.get_lang()[0]
284
284
285 # Visual options
285 # Visual options
286 context.visual = AttributeDict({})
286 context.visual = AttributeDict({})
287
287
288 # DB stored Visual Items
288 # DB stored Visual Items
289 context.visual.show_public_icon = str2bool(
289 context.visual.show_public_icon = str2bool(
290 rc_config.get('rhodecode_show_public_icon'))
290 rc_config.get('rhodecode_show_public_icon'))
291 context.visual.show_private_icon = str2bool(
291 context.visual.show_private_icon = str2bool(
292 rc_config.get('rhodecode_show_private_icon'))
292 rc_config.get('rhodecode_show_private_icon'))
293 context.visual.stylify_metatags = str2bool(
293 context.visual.stylify_metatags = str2bool(
294 rc_config.get('rhodecode_stylify_metatags'))
294 rc_config.get('rhodecode_stylify_metatags'))
295 context.visual.dashboard_items = safe_int(
295 context.visual.dashboard_items = safe_int(
296 rc_config.get('rhodecode_dashboard_items', 100))
296 rc_config.get('rhodecode_dashboard_items', 100))
297 context.visual.admin_grid_items = safe_int(
297 context.visual.admin_grid_items = safe_int(
298 rc_config.get('rhodecode_admin_grid_items', 100))
298 rc_config.get('rhodecode_admin_grid_items', 100))
299 context.visual.repository_fields = str2bool(
299 context.visual.repository_fields = str2bool(
300 rc_config.get('rhodecode_repository_fields'))
300 rc_config.get('rhodecode_repository_fields'))
301 context.visual.show_version = str2bool(
301 context.visual.show_version = str2bool(
302 rc_config.get('rhodecode_show_version'))
302 rc_config.get('rhodecode_show_version'))
303 context.visual.use_gravatar = str2bool(
303 context.visual.use_gravatar = str2bool(
304 rc_config.get('rhodecode_use_gravatar'))
304 rc_config.get('rhodecode_use_gravatar'))
305 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
305 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
306 context.visual.default_renderer = rc_config.get(
306 context.visual.default_renderer = rc_config.get(
307 'rhodecode_markup_renderer', 'rst')
307 'rhodecode_markup_renderer', 'rst')
308 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
308 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
309 context.visual.rhodecode_support_url = \
309 context.visual.rhodecode_support_url = \
310 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
310 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
311
311
312 context.pre_code = rc_config.get('rhodecode_pre_code')
312 context.pre_code = rc_config.get('rhodecode_pre_code')
313 context.post_code = rc_config.get('rhodecode_post_code')
313 context.post_code = rc_config.get('rhodecode_post_code')
314 context.rhodecode_name = rc_config.get('rhodecode_title')
314 context.rhodecode_name = rc_config.get('rhodecode_title')
315 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
315 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
316 # if we have specified default_encoding in the request, it has more
316 # if we have specified default_encoding in the request, it has more
317 # priority
317 # priority
318 if request.GET.get('default_encoding'):
318 if request.GET.get('default_encoding'):
319 context.default_encodings.insert(0, request.GET.get('default_encoding'))
319 context.default_encodings.insert(0, request.GET.get('default_encoding'))
320 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
320 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
321
321
322 # INI stored
322 # INI stored
323 context.labs_active = str2bool(
323 context.labs_active = str2bool(
324 config.get('labs_settings_active', 'false'))
324 config.get('labs_settings_active', 'false'))
325 context.visual.allow_repo_location_change = str2bool(
325 context.visual.allow_repo_location_change = str2bool(
326 config.get('allow_repo_location_change', True))
326 config.get('allow_repo_location_change', True))
327 context.visual.allow_custom_hooks_settings = str2bool(
327 context.visual.allow_custom_hooks_settings = str2bool(
328 config.get('allow_custom_hooks_settings', True))
328 config.get('allow_custom_hooks_settings', True))
329 context.debug_style = str2bool(config.get('debug_style', False))
329 context.debug_style = str2bool(config.get('debug_style', False))
330
330
331 context.rhodecode_instanceid = config.get('instance_id')
331 context.rhodecode_instanceid = config.get('instance_id')
332
332
333 # AppEnlight
333 # AppEnlight
334 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
334 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
335 context.appenlight_api_public_key = config.get(
335 context.appenlight_api_public_key = config.get(
336 'appenlight.api_public_key', '')
336 'appenlight.api_public_key', '')
337 context.appenlight_server_url = config.get('appenlight.server_url', '')
337 context.appenlight_server_url = config.get('appenlight.server_url', '')
338
338
339 # JS template context
339 # JS template context
340 context.template_context = {
340 context.template_context = {
341 'repo_name': None,
341 'repo_name': None,
342 'repo_type': None,
342 'repo_type': None,
343 'repo_landing_commit': None,
343 'repo_landing_commit': None,
344 'rhodecode_user': {
344 'rhodecode_user': {
345 'username': None,
345 'username': None,
346 'email': None,
346 'email': None,
347 'notification_status': False
347 'notification_status': False
348 },
348 },
349 'visual': {
349 'visual': {
350 'default_renderer': None
350 'default_renderer': None
351 },
351 },
352 'commit_data': {
352 'commit_data': {
353 'commit_id': None
353 'commit_id': None
354 },
354 },
355 'pull_request_data': {'pull_request_id': None},
355 'pull_request_data': {'pull_request_id': None},
356 'timeago': {
356 'timeago': {
357 'refresh_time': 120 * 1000,
357 'refresh_time': 120 * 1000,
358 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
358 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
359 },
359 },
360 'pylons_dispatch': {
360 'pylons_dispatch': {
361 # 'controller': request.environ['pylons.routes_dict']['controller'],
361 # 'controller': request.environ['pylons.routes_dict']['controller'],
362 # 'action': request.environ['pylons.routes_dict']['action'],
362 # 'action': request.environ['pylons.routes_dict']['action'],
363 },
363 },
364 'pyramid_dispatch': {
364 'pyramid_dispatch': {
365
365
366 },
366 },
367 'extra': {'plugins': {}}
367 'extra': {'plugins': {}}
368 }
368 }
369 # END CONFIG VARS
369 # END CONFIG VARS
370
370
371 # TODO: This dosn't work when called from pylons compatibility tween.
371 # TODO: This dosn't work when called from pylons compatibility tween.
372 # Fix this and remove it from base controller.
372 # Fix this and remove it from base controller.
373 # context.repo_name = get_repo_slug(request) # can be empty
373 # context.repo_name = get_repo_slug(request) # can be empty
374
374
375 diffmode = 'sideside'
375 diffmode = 'sideside'
376 if request.GET.get('diffmode'):
376 if request.GET.get('diffmode'):
377 if request.GET['diffmode'] == 'unified':
377 if request.GET['diffmode'] == 'unified':
378 diffmode = 'unified'
378 diffmode = 'unified'
379 elif request.session.get('diffmode'):
379 elif request.session.get('diffmode'):
380 diffmode = request.session['diffmode']
380 diffmode = request.session['diffmode']
381
381
382 context.diffmode = diffmode
382 context.diffmode = diffmode
383
383
384 if request.session.get('diffmode') != diffmode:
384 if request.session.get('diffmode') != diffmode:
385 request.session['diffmode'] = diffmode
385 request.session['diffmode'] = diffmode
386
386
387 context.csrf_token = auth.get_csrf_token()
387 context.csrf_token = auth.get_csrf_token()
388 context.backends = rhodecode.BACKENDS.keys()
388 context.backends = rhodecode.BACKENDS.keys()
389 context.backends.sort()
389 context.backends.sort()
390 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id)
390 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id)
391 context.pyramid_request = pyramid.threadlocal.get_current_request()
391 context.pyramid_request = pyramid.threadlocal.get_current_request()
392
392
393
393
394 def get_auth_user(environ):
394 def get_auth_user(environ):
395 ip_addr = get_ip_addr(environ)
395 ip_addr = get_ip_addr(environ)
396 # make sure that we update permissions each time we call controller
396 # make sure that we update permissions each time we call controller
397 _auth_token = (request.GET.get('auth_token', '') or
397 _auth_token = (request.GET.get('auth_token', '') or
398 request.GET.get('api_key', ''))
398 request.GET.get('api_key', ''))
399
399
400 if _auth_token:
400 if _auth_token:
401 # when using API_KEY we assume user exists, and
401 # when using API_KEY we assume user exists, and
402 # doesn't need auth based on cookies.
402 # doesn't need auth based on cookies.
403 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
403 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
404 authenticated = False
404 authenticated = False
405 else:
405 else:
406 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
406 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
407 try:
407 try:
408 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
408 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
409 ip_addr=ip_addr)
409 ip_addr=ip_addr)
410 except UserCreationError as e:
410 except UserCreationError as e:
411 h.flash(e, 'error')
411 h.flash(e, 'error')
412 # container auth or other auth functions that create users
412 # container auth or other auth functions that create users
413 # on the fly can throw this exception signaling that there's
413 # on the fly can throw this exception signaling that there's
414 # issue with user creation, explanation should be provided
414 # issue with user creation, explanation should be provided
415 # in Exception itself. We then create a simple blank
415 # in Exception itself. We then create a simple blank
416 # AuthUser
416 # AuthUser
417 auth_user = AuthUser(ip_addr=ip_addr)
417 auth_user = AuthUser(ip_addr=ip_addr)
418
418
419 if password_changed(auth_user, session):
419 if password_changed(auth_user, session):
420 session.invalidate()
420 session.invalidate()
421 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
421 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
422 auth_user = AuthUser(ip_addr=ip_addr)
422 auth_user = AuthUser(ip_addr=ip_addr)
423
423
424 authenticated = cookie_store.get('is_authenticated')
424 authenticated = cookie_store.get('is_authenticated')
425
425
426 if not auth_user.is_authenticated and auth_user.is_user_object:
426 if not auth_user.is_authenticated and auth_user.is_user_object:
427 # user is not authenticated and not empty
427 # user is not authenticated and not empty
428 auth_user.set_authenticated(authenticated)
428 auth_user.set_authenticated(authenticated)
429
429
430 return auth_user
430 return auth_user
431
431
432
432
433 class BaseController(WSGIController):
433 class BaseController(WSGIController):
434
434
435 def __before__(self):
435 def __before__(self):
436 """
436 """
437 __before__ is called before controller methods and after __call__
437 __before__ is called before controller methods and after __call__
438 """
438 """
439 # on each call propagate settings calls into global settings.
439 # on each call propagate settings calls into global settings.
440 set_rhodecode_config(config)
440 set_rhodecode_config(config)
441 attach_context_attributes(c, request, c.rhodecode_user.user_id)
441 attach_context_attributes(c, request, c.rhodecode_user.user_id)
442
442
443 # TODO: Remove this when fixed in attach_context_attributes()
443 # TODO: Remove this when fixed in attach_context_attributes()
444 c.repo_name = get_repo_slug(request) # can be empty
444 c.repo_name = get_repo_slug(request) # can be empty
445
445
446 self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff'))
446 self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff'))
447 self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file'))
447 self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file'))
448 self.sa = meta.Session
448 self.sa = meta.Session
449 self.scm_model = ScmModel(self.sa)
449 self.scm_model = ScmModel(self.sa)
450
450
451 # set user language
451 # set user language
452 user_lang = getattr(c.pyramid_request, '_LOCALE_', None)
452 user_lang = getattr(c.pyramid_request, '_LOCALE_', None)
453 if user_lang:
453 if user_lang:
454 translation.set_lang(user_lang)
454 translation.set_lang(user_lang)
455 log.debug('set language to %s for user %s',
455 log.debug('set language to %s for user %s',
456 user_lang, self._rhodecode_user)
456 user_lang, self._rhodecode_user)
457
457
458 def _dispatch_redirect(self, with_url, environ, start_response):
458 def _dispatch_redirect(self, with_url, environ, start_response):
459 resp = HTTPFound(with_url)
459 resp = HTTPFound(with_url)
460 environ['SCRIPT_NAME'] = '' # handle prefix middleware
460 environ['SCRIPT_NAME'] = '' # handle prefix middleware
461 environ['PATH_INFO'] = with_url
461 environ['PATH_INFO'] = with_url
462 return resp(environ, start_response)
462 return resp(environ, start_response)
463
463
464 def __call__(self, environ, start_response):
464 def __call__(self, environ, start_response):
465 """Invoke the Controller"""
465 """Invoke the Controller"""
466 # WSGIController.__call__ dispatches to the Controller method
466 # WSGIController.__call__ dispatches to the Controller method
467 # the request is routed to. This routing information is
467 # the request is routed to. This routing information is
468 # available in environ['pylons.routes_dict']
468 # available in environ['pylons.routes_dict']
469 from rhodecode.lib import helpers as h
469 from rhodecode.lib import helpers as h
470
470
471 # Provide the Pylons context to Pyramid's debugtoolbar if it asks
471 # Provide the Pylons context to Pyramid's debugtoolbar if it asks
472 if environ.get('debugtoolbar.wants_pylons_context', False):
472 if environ.get('debugtoolbar.wants_pylons_context', False):
473 environ['debugtoolbar.pylons_context'] = c._current_obj()
473 environ['debugtoolbar.pylons_context'] = c._current_obj()
474
474
475 _route_name = '.'.join([environ['pylons.routes_dict']['controller'],
475 _route_name = '.'.join([environ['pylons.routes_dict']['controller'],
476 environ['pylons.routes_dict']['action']])
476 environ['pylons.routes_dict']['action']])
477
477
478 self.rc_config = SettingsModel().get_all_settings(cache=True)
478 self.rc_config = SettingsModel().get_all_settings(cache=True)
479 self.ip_addr = get_ip_addr(environ)
479 self.ip_addr = get_ip_addr(environ)
480
480
481 # The rhodecode auth user is looked up and passed through the
481 # The rhodecode auth user is looked up and passed through the
482 # environ by the pylons compatibility tween in pyramid.
482 # environ by the pylons compatibility tween in pyramid.
483 # So we can just grab it from there.
483 # So we can just grab it from there.
484 auth_user = environ['rc_auth_user']
484 auth_user = environ['rc_auth_user']
485
485
486 # set globals for auth user
486 # set globals for auth user
487 request.user = auth_user
487 request.user = auth_user
488 c.rhodecode_user = self._rhodecode_user = auth_user
488 c.rhodecode_user = self._rhodecode_user = auth_user
489
489
490 log.info('IP: %s User: %s accessed %s [%s]' % (
490 log.info('IP: %s User: %s accessed %s [%s]' % (
491 self.ip_addr, auth_user, safe_unicode(get_access_path(environ)),
491 self.ip_addr, auth_user, safe_unicode(get_access_path(environ)),
492 _route_name)
492 _route_name)
493 )
493 )
494
494
495 user_obj = auth_user.get_instance()
495 user_obj = auth_user.get_instance()
496 if user_obj and user_obj.user_data.get('force_password_change'):
496 if user_obj and user_obj.user_data.get('force_password_change'):
497 h.flash('You are required to change your password', 'warning',
497 h.flash('You are required to change your password', 'warning',
498 ignore_duplicate=True)
498 ignore_duplicate=True)
499 return self._dispatch_redirect(
499 return self._dispatch_redirect(
500 url('my_account_password'), environ, start_response)
500 url('my_account_password'), environ, start_response)
501
501
502 return WSGIController.__call__(self, environ, start_response)
502 return WSGIController.__call__(self, environ, start_response)
503
503
504
504
505 class BaseRepoController(BaseController):
505 class BaseRepoController(BaseController):
506 """
506 """
507 Base class for controllers responsible for loading all needed data for
507 Base class for controllers responsible for loading all needed data for
508 repository loaded items are
508 repository loaded items are
509
509
510 c.rhodecode_repo: instance of scm repository
510 c.rhodecode_repo: instance of scm repository
511 c.rhodecode_db_repo: instance of db
511 c.rhodecode_db_repo: instance of db
512 c.repository_requirements_missing: shows that repository specific data
512 c.repository_requirements_missing: shows that repository specific data
513 could not be displayed due to the missing requirements
513 could not be displayed due to the missing requirements
514 c.repository_pull_requests: show number of open pull requests
514 c.repository_pull_requests: show number of open pull requests
515 """
515 """
516
516
517 def __before__(self):
517 def __before__(self):
518 super(BaseRepoController, self).__before__()
518 super(BaseRepoController, self).__before__()
519 if c.repo_name: # extracted from routes
519 if c.repo_name: # extracted from routes
520 db_repo = Repository.get_by_repo_name(c.repo_name)
520 db_repo = Repository.get_by_repo_name(c.repo_name)
521 if not db_repo:
521 if not db_repo:
522 return
522 return
523
523
524 log.debug(
524 log.debug(
525 'Found repository in database %s with state `%s`',
525 'Found repository in database %s with state `%s`',
526 safe_unicode(db_repo), safe_unicode(db_repo.repo_state))
526 safe_unicode(db_repo), safe_unicode(db_repo.repo_state))
527 route = getattr(request.environ.get('routes.route'), 'name', '')
527 route = getattr(request.environ.get('routes.route'), 'name', '')
528
528
529 # allow to delete repos that are somehow damages in filesystem
529 # allow to delete repos that are somehow damages in filesystem
530 if route in ['delete_repo']:
530 if route in ['delete_repo']:
531 return
531 return
532
532
533 if db_repo.repo_state in [Repository.STATE_PENDING]:
533 if db_repo.repo_state in [Repository.STATE_PENDING]:
534 if route in ['repo_creating_home']:
534 if route in ['repo_creating_home']:
535 return
535 return
536 check_url = url('repo_creating_home', repo_name=c.repo_name)
536 check_url = url('repo_creating_home', repo_name=c.repo_name)
537 return redirect(check_url)
537 return redirect(check_url)
538
538
539 self.rhodecode_db_repo = db_repo
539 self.rhodecode_db_repo = db_repo
540
540
541 missing_requirements = False
541 missing_requirements = False
542 try:
542 try:
543 self.rhodecode_repo = self.rhodecode_db_repo.scm_instance()
543 self.rhodecode_repo = self.rhodecode_db_repo.scm_instance()
544 except RepositoryRequirementError as e:
544 except RepositoryRequirementError as e:
545 missing_requirements = True
545 missing_requirements = True
546 self._handle_missing_requirements(e)
546 self._handle_missing_requirements(e)
547
547
548 if self.rhodecode_repo is None and not missing_requirements:
548 if self.rhodecode_repo is None and not missing_requirements:
549 log.error('%s this repository is present in database but it '
549 log.error('%s this repository is present in database but it '
550 'cannot be created as an scm instance', c.repo_name)
550 'cannot be created as an scm instance', c.repo_name)
551
551
552 h.flash(_(
552 h.flash(_(
553 "The repository at %(repo_name)s cannot be located.") %
553 "The repository at %(repo_name)s cannot be located.") %
554 {'repo_name': c.repo_name},
554 {'repo_name': c.repo_name},
555 category='error', ignore_duplicate=True)
555 category='error', ignore_duplicate=True)
556 redirect(h.route_path('home'))
556 redirect(h.route_path('home'))
557
557
558 # update last change according to VCS data
558 # update last change according to VCS data
559 if not missing_requirements:
559 if not missing_requirements:
560 commit = db_repo.get_commit(
560 commit = db_repo.get_commit(
561 pre_load=["author", "date", "message", "parents"])
561 pre_load=["author", "date", "message", "parents"])
562 db_repo.update_commit_cache(commit)
562 db_repo.update_commit_cache(commit)
563
563
564 # Prepare context
564 # Prepare context
565 c.rhodecode_db_repo = db_repo
565 c.rhodecode_db_repo = db_repo
566 c.rhodecode_repo = self.rhodecode_repo
566 c.rhodecode_repo = self.rhodecode_repo
567 c.repository_requirements_missing = missing_requirements
567 c.repository_requirements_missing = missing_requirements
568
568
569 self._update_global_counters(self.scm_model, db_repo)
569 self._update_global_counters(self.scm_model, db_repo)
570
570
571 def _update_global_counters(self, scm_model, db_repo):
571 def _update_global_counters(self, scm_model, db_repo):
572 """
572 """
573 Base variables that are exposed to every page of repository
573 Base variables that are exposed to every page of repository
574 """
574 """
575 c.repository_pull_requests = scm_model.get_pull_requests(db_repo)
575 c.repository_pull_requests = scm_model.get_pull_requests(db_repo)
576
576
577 def _handle_missing_requirements(self, error):
577 def _handle_missing_requirements(self, error):
578 self.rhodecode_repo = None
578 self.rhodecode_repo = None
579 log.error(
579 log.error(
580 'Requirements are missing for repository %s: %s',
580 'Requirements are missing for repository %s: %s',
581 c.repo_name, error.message)
581 c.repo_name, error.message)
582
582
583 summary_url = url('summary_home', repo_name=c.repo_name)
583 summary_url = h.route_path('repo_summary', repo_name=c.repo_name)
584 statistics_url = url('edit_repo_statistics', repo_name=c.repo_name)
584 statistics_url = url('edit_repo_statistics', repo_name=c.repo_name)
585 settings_update_url = url('repo', repo_name=c.repo_name)
585 settings_update_url = url('repo', repo_name=c.repo_name)
586 path = request.path
586 path = request.path
587 should_redirect = (
587 should_redirect = (
588 path not in (summary_url, settings_update_url)
588 path not in (summary_url, settings_update_url)
589 and '/settings' not in path or path == statistics_url
589 and '/settings' not in path or path == statistics_url
590 )
590 )
591 if should_redirect:
591 if should_redirect:
592 redirect(summary_url)
592 redirect(summary_url)
@@ -1,2028 +1,2028 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Helper functions
22 Helper functions
23
23
24 Consists of functions to typically be used within templates, but also
24 Consists of functions to typically be used within templates, but also
25 available to Controllers. This module is available to both as 'h'.
25 available to Controllers. This module is available to both as 'h'.
26 """
26 """
27
27
28 import random
28 import random
29 import hashlib
29 import hashlib
30 import StringIO
30 import StringIO
31 import urllib
31 import urllib
32 import math
32 import math
33 import logging
33 import logging
34 import re
34 import re
35 import urlparse
35 import urlparse
36 import time
36 import time
37 import string
37 import string
38 import hashlib
38 import hashlib
39 from collections import OrderedDict
39 from collections import OrderedDict
40
40
41 import pygments
41 import pygments
42 import itertools
42 import itertools
43 import fnmatch
43 import fnmatch
44
44
45 from datetime import datetime
45 from datetime import datetime
46 from functools import partial
46 from functools import partial
47 from pygments.formatters.html import HtmlFormatter
47 from pygments.formatters.html import HtmlFormatter
48 from pygments import highlight as code_highlight
48 from pygments import highlight as code_highlight
49 from pygments.lexers import (
49 from pygments.lexers import (
50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
51 from pylons import url as pylons_url
51 from pylons import url as pylons_url
52 from pylons.i18n.translation import _, ungettext
52 from pylons.i18n.translation import _, ungettext
53 from pyramid.threadlocal import get_current_request
53 from pyramid.threadlocal import get_current_request
54
54
55 from webhelpers.html import literal, HTML, escape
55 from webhelpers.html import literal, HTML, escape
56 from webhelpers.html.tools import *
56 from webhelpers.html.tools import *
57 from webhelpers.html.builder import make_tag
57 from webhelpers.html.builder import make_tag
58 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
58 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
59 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
59 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
60 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
60 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
61 submit, text, password, textarea, title, ul, xml_declaration, radio
61 submit, text, password, textarea, title, ul, xml_declaration, radio
62 from webhelpers.html.tools import auto_link, button_to, highlight, \
62 from webhelpers.html.tools import auto_link, button_to, highlight, \
63 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
63 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
64 from webhelpers.pylonslib import Flash as _Flash
64 from webhelpers.pylonslib import Flash as _Flash
65 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
65 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
66 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
66 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
67 replace_whitespace, urlify, truncate, wrap_paragraphs
67 replace_whitespace, urlify, truncate, wrap_paragraphs
68 from webhelpers.date import time_ago_in_words
68 from webhelpers.date import time_ago_in_words
69 from webhelpers.paginate import Page as _Page
69 from webhelpers.paginate import Page as _Page
70 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
70 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
71 convert_boolean_attrs, NotGiven, _make_safe_id_component
71 convert_boolean_attrs, NotGiven, _make_safe_id_component
72 from webhelpers2.number import format_byte_size
72 from webhelpers2.number import format_byte_size
73
73
74 from rhodecode.lib.action_parser import action_parser
74 from rhodecode.lib.action_parser import action_parser
75 from rhodecode.lib.ext_json import json
75 from rhodecode.lib.ext_json import json
76 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
76 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
77 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
77 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
78 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
78 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
79 AttributeDict, safe_int, md5, md5_safe
79 AttributeDict, safe_int, md5, md5_safe
80 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
80 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
81 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
81 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
82 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
82 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
83 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
83 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
84 from rhodecode.model.changeset_status import ChangesetStatusModel
84 from rhodecode.model.changeset_status import ChangesetStatusModel
85 from rhodecode.model.db import Permission, User, Repository
85 from rhodecode.model.db import Permission, User, Repository
86 from rhodecode.model.repo_group import RepoGroupModel
86 from rhodecode.model.repo_group import RepoGroupModel
87 from rhodecode.model.settings import IssueTrackerSettingsModel
87 from rhodecode.model.settings import IssueTrackerSettingsModel
88
88
89 log = logging.getLogger(__name__)
89 log = logging.getLogger(__name__)
90
90
91
91
92 DEFAULT_USER = User.DEFAULT_USER
92 DEFAULT_USER = User.DEFAULT_USER
93 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
93 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
94
94
95
95
96 def url(*args, **kw):
96 def url(*args, **kw):
97 return pylons_url(*args, **kw)
97 return pylons_url(*args, **kw)
98
98
99
99
100 def pylons_url_current(*args, **kw):
100 def pylons_url_current(*args, **kw):
101 """
101 """
102 This function overrides pylons.url.current() which returns the current
102 This function overrides pylons.url.current() which returns the current
103 path so that it will also work from a pyramid only context. This
103 path so that it will also work from a pyramid only context. This
104 should be removed once port to pyramid is complete.
104 should be removed once port to pyramid is complete.
105 """
105 """
106 if not args and not kw:
106 if not args and not kw:
107 request = get_current_request()
107 request = get_current_request()
108 return request.path
108 return request.path
109 return pylons_url.current(*args, **kw)
109 return pylons_url.current(*args, **kw)
110
110
111 url.current = pylons_url_current
111 url.current = pylons_url_current
112
112
113
113
114 def url_replace(**qargs):
114 def url_replace(**qargs):
115 """ Returns the current request url while replacing query string args """
115 """ Returns the current request url while replacing query string args """
116
116
117 request = get_current_request()
117 request = get_current_request()
118 new_args = request.GET.mixed()
118 new_args = request.GET.mixed()
119 new_args.update(qargs)
119 new_args.update(qargs)
120 return url('', **new_args)
120 return url('', **new_args)
121
121
122
122
123 def asset(path, ver=None, **kwargs):
123 def asset(path, ver=None, **kwargs):
124 """
124 """
125 Helper to generate a static asset file path for rhodecode assets
125 Helper to generate a static asset file path for rhodecode assets
126
126
127 eg. h.asset('images/image.png', ver='3923')
127 eg. h.asset('images/image.png', ver='3923')
128
128
129 :param path: path of asset
129 :param path: path of asset
130 :param ver: optional version query param to append as ?ver=
130 :param ver: optional version query param to append as ?ver=
131 """
131 """
132 request = get_current_request()
132 request = get_current_request()
133 query = {}
133 query = {}
134 query.update(kwargs)
134 query.update(kwargs)
135 if ver:
135 if ver:
136 query = {'ver': ver}
136 query = {'ver': ver}
137 return request.static_path(
137 return request.static_path(
138 'rhodecode:public/{}'.format(path), _query=query)
138 'rhodecode:public/{}'.format(path), _query=query)
139
139
140
140
141 default_html_escape_table = {
141 default_html_escape_table = {
142 ord('&'): u'&amp;',
142 ord('&'): u'&amp;',
143 ord('<'): u'&lt;',
143 ord('<'): u'&lt;',
144 ord('>'): u'&gt;',
144 ord('>'): u'&gt;',
145 ord('"'): u'&quot;',
145 ord('"'): u'&quot;',
146 ord("'"): u'&#39;',
146 ord("'"): u'&#39;',
147 }
147 }
148
148
149
149
150 def html_escape(text, html_escape_table=default_html_escape_table):
150 def html_escape(text, html_escape_table=default_html_escape_table):
151 """Produce entities within text."""
151 """Produce entities within text."""
152 return text.translate(html_escape_table)
152 return text.translate(html_escape_table)
153
153
154
154
155 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
155 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
156 """
156 """
157 Truncate string ``s`` at the first occurrence of ``sub``.
157 Truncate string ``s`` at the first occurrence of ``sub``.
158
158
159 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
159 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
160 """
160 """
161 suffix_if_chopped = suffix_if_chopped or ''
161 suffix_if_chopped = suffix_if_chopped or ''
162 pos = s.find(sub)
162 pos = s.find(sub)
163 if pos == -1:
163 if pos == -1:
164 return s
164 return s
165
165
166 if inclusive:
166 if inclusive:
167 pos += len(sub)
167 pos += len(sub)
168
168
169 chopped = s[:pos]
169 chopped = s[:pos]
170 left = s[pos:].strip()
170 left = s[pos:].strip()
171
171
172 if left and suffix_if_chopped:
172 if left and suffix_if_chopped:
173 chopped += suffix_if_chopped
173 chopped += suffix_if_chopped
174
174
175 return chopped
175 return chopped
176
176
177
177
178 def shorter(text, size=20):
178 def shorter(text, size=20):
179 postfix = '...'
179 postfix = '...'
180 if len(text) > size:
180 if len(text) > size:
181 return text[:size - len(postfix)] + postfix
181 return text[:size - len(postfix)] + postfix
182 return text
182 return text
183
183
184
184
185 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
185 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
186 """
186 """
187 Reset button
187 Reset button
188 """
188 """
189 _set_input_attrs(attrs, type, name, value)
189 _set_input_attrs(attrs, type, name, value)
190 _set_id_attr(attrs, id, name)
190 _set_id_attr(attrs, id, name)
191 convert_boolean_attrs(attrs, ["disabled"])
191 convert_boolean_attrs(attrs, ["disabled"])
192 return HTML.input(**attrs)
192 return HTML.input(**attrs)
193
193
194 reset = _reset
194 reset = _reset
195 safeid = _make_safe_id_component
195 safeid = _make_safe_id_component
196
196
197
197
198 def branding(name, length=40):
198 def branding(name, length=40):
199 return truncate(name, length, indicator="")
199 return truncate(name, length, indicator="")
200
200
201
201
202 def FID(raw_id, path):
202 def FID(raw_id, path):
203 """
203 """
204 Creates a unique ID for filenode based on it's hash of path and commit
204 Creates a unique ID for filenode based on it's hash of path and commit
205 it's safe to use in urls
205 it's safe to use in urls
206
206
207 :param raw_id:
207 :param raw_id:
208 :param path:
208 :param path:
209 """
209 """
210
210
211 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
211 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
212
212
213
213
214 class _GetError(object):
214 class _GetError(object):
215 """Get error from form_errors, and represent it as span wrapped error
215 """Get error from form_errors, and represent it as span wrapped error
216 message
216 message
217
217
218 :param field_name: field to fetch errors for
218 :param field_name: field to fetch errors for
219 :param form_errors: form errors dict
219 :param form_errors: form errors dict
220 """
220 """
221
221
222 def __call__(self, field_name, form_errors):
222 def __call__(self, field_name, form_errors):
223 tmpl = """<span class="error_msg">%s</span>"""
223 tmpl = """<span class="error_msg">%s</span>"""
224 if form_errors and field_name in form_errors:
224 if form_errors and field_name in form_errors:
225 return literal(tmpl % form_errors.get(field_name))
225 return literal(tmpl % form_errors.get(field_name))
226
226
227 get_error = _GetError()
227 get_error = _GetError()
228
228
229
229
230 class _ToolTip(object):
230 class _ToolTip(object):
231
231
232 def __call__(self, tooltip_title, trim_at=50):
232 def __call__(self, tooltip_title, trim_at=50):
233 """
233 """
234 Special function just to wrap our text into nice formatted
234 Special function just to wrap our text into nice formatted
235 autowrapped text
235 autowrapped text
236
236
237 :param tooltip_title:
237 :param tooltip_title:
238 """
238 """
239 tooltip_title = escape(tooltip_title)
239 tooltip_title = escape(tooltip_title)
240 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
240 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
241 return tooltip_title
241 return tooltip_title
242 tooltip = _ToolTip()
242 tooltip = _ToolTip()
243
243
244
244
245 def files_breadcrumbs(repo_name, commit_id, file_path):
245 def files_breadcrumbs(repo_name, commit_id, file_path):
246 if isinstance(file_path, str):
246 if isinstance(file_path, str):
247 file_path = safe_unicode(file_path)
247 file_path = safe_unicode(file_path)
248
248
249 # TODO: johbo: Is this always a url like path, or is this operating
249 # TODO: johbo: Is this always a url like path, or is this operating
250 # system dependent?
250 # system dependent?
251 path_segments = file_path.split('/')
251 path_segments = file_path.split('/')
252
252
253 repo_name_html = escape(repo_name)
253 repo_name_html = escape(repo_name)
254 if len(path_segments) == 1 and path_segments[0] == '':
254 if len(path_segments) == 1 and path_segments[0] == '':
255 url_segments = [repo_name_html]
255 url_segments = [repo_name_html]
256 else:
256 else:
257 url_segments = [
257 url_segments = [
258 link_to(
258 link_to(
259 repo_name_html,
259 repo_name_html,
260 url('files_home',
260 url('files_home',
261 repo_name=repo_name,
261 repo_name=repo_name,
262 revision=commit_id,
262 revision=commit_id,
263 f_path=''),
263 f_path=''),
264 class_='pjax-link')]
264 class_='pjax-link')]
265
265
266 last_cnt = len(path_segments) - 1
266 last_cnt = len(path_segments) - 1
267 for cnt, segment in enumerate(path_segments):
267 for cnt, segment in enumerate(path_segments):
268 if not segment:
268 if not segment:
269 continue
269 continue
270 segment_html = escape(segment)
270 segment_html = escape(segment)
271
271
272 if cnt != last_cnt:
272 if cnt != last_cnt:
273 url_segments.append(
273 url_segments.append(
274 link_to(
274 link_to(
275 segment_html,
275 segment_html,
276 url('files_home',
276 url('files_home',
277 repo_name=repo_name,
277 repo_name=repo_name,
278 revision=commit_id,
278 revision=commit_id,
279 f_path='/'.join(path_segments[:cnt + 1])),
279 f_path='/'.join(path_segments[:cnt + 1])),
280 class_='pjax-link'))
280 class_='pjax-link'))
281 else:
281 else:
282 url_segments.append(segment_html)
282 url_segments.append(segment_html)
283
283
284 return literal('/'.join(url_segments))
284 return literal('/'.join(url_segments))
285
285
286
286
287 class CodeHtmlFormatter(HtmlFormatter):
287 class CodeHtmlFormatter(HtmlFormatter):
288 """
288 """
289 My code Html Formatter for source codes
289 My code Html Formatter for source codes
290 """
290 """
291
291
292 def wrap(self, source, outfile):
292 def wrap(self, source, outfile):
293 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
293 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
294
294
295 def _wrap_code(self, source):
295 def _wrap_code(self, source):
296 for cnt, it in enumerate(source):
296 for cnt, it in enumerate(source):
297 i, t = it
297 i, t = it
298 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
298 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
299 yield i, t
299 yield i, t
300
300
301 def _wrap_tablelinenos(self, inner):
301 def _wrap_tablelinenos(self, inner):
302 dummyoutfile = StringIO.StringIO()
302 dummyoutfile = StringIO.StringIO()
303 lncount = 0
303 lncount = 0
304 for t, line in inner:
304 for t, line in inner:
305 if t:
305 if t:
306 lncount += 1
306 lncount += 1
307 dummyoutfile.write(line)
307 dummyoutfile.write(line)
308
308
309 fl = self.linenostart
309 fl = self.linenostart
310 mw = len(str(lncount + fl - 1))
310 mw = len(str(lncount + fl - 1))
311 sp = self.linenospecial
311 sp = self.linenospecial
312 st = self.linenostep
312 st = self.linenostep
313 la = self.lineanchors
313 la = self.lineanchors
314 aln = self.anchorlinenos
314 aln = self.anchorlinenos
315 nocls = self.noclasses
315 nocls = self.noclasses
316 if sp:
316 if sp:
317 lines = []
317 lines = []
318
318
319 for i in range(fl, fl + lncount):
319 for i in range(fl, fl + lncount):
320 if i % st == 0:
320 if i % st == 0:
321 if i % sp == 0:
321 if i % sp == 0:
322 if aln:
322 if aln:
323 lines.append('<a href="#%s%d" class="special">%*d</a>' %
323 lines.append('<a href="#%s%d" class="special">%*d</a>' %
324 (la, i, mw, i))
324 (la, i, mw, i))
325 else:
325 else:
326 lines.append('<span class="special">%*d</span>' % (mw, i))
326 lines.append('<span class="special">%*d</span>' % (mw, i))
327 else:
327 else:
328 if aln:
328 if aln:
329 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
329 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
330 else:
330 else:
331 lines.append('%*d' % (mw, i))
331 lines.append('%*d' % (mw, i))
332 else:
332 else:
333 lines.append('')
333 lines.append('')
334 ls = '\n'.join(lines)
334 ls = '\n'.join(lines)
335 else:
335 else:
336 lines = []
336 lines = []
337 for i in range(fl, fl + lncount):
337 for i in range(fl, fl + lncount):
338 if i % st == 0:
338 if i % st == 0:
339 if aln:
339 if aln:
340 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
340 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
341 else:
341 else:
342 lines.append('%*d' % (mw, i))
342 lines.append('%*d' % (mw, i))
343 else:
343 else:
344 lines.append('')
344 lines.append('')
345 ls = '\n'.join(lines)
345 ls = '\n'.join(lines)
346
346
347 # in case you wonder about the seemingly redundant <div> here: since the
347 # in case you wonder about the seemingly redundant <div> here: since the
348 # content in the other cell also is wrapped in a div, some browsers in
348 # content in the other cell also is wrapped in a div, some browsers in
349 # some configurations seem to mess up the formatting...
349 # some configurations seem to mess up the formatting...
350 if nocls:
350 if nocls:
351 yield 0, ('<table class="%stable">' % self.cssclass +
351 yield 0, ('<table class="%stable">' % self.cssclass +
352 '<tr><td><div class="linenodiv" '
352 '<tr><td><div class="linenodiv" '
353 'style="background-color: #f0f0f0; padding-right: 10px">'
353 'style="background-color: #f0f0f0; padding-right: 10px">'
354 '<pre style="line-height: 125%">' +
354 '<pre style="line-height: 125%">' +
355 ls + '</pre></div></td><td id="hlcode" class="code">')
355 ls + '</pre></div></td><td id="hlcode" class="code">')
356 else:
356 else:
357 yield 0, ('<table class="%stable">' % self.cssclass +
357 yield 0, ('<table class="%stable">' % self.cssclass +
358 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
358 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
359 ls + '</pre></div></td><td id="hlcode" class="code">')
359 ls + '</pre></div></td><td id="hlcode" class="code">')
360 yield 0, dummyoutfile.getvalue()
360 yield 0, dummyoutfile.getvalue()
361 yield 0, '</td></tr></table>'
361 yield 0, '</td></tr></table>'
362
362
363
363
364 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
364 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
365 def __init__(self, **kw):
365 def __init__(self, **kw):
366 # only show these line numbers if set
366 # only show these line numbers if set
367 self.only_lines = kw.pop('only_line_numbers', [])
367 self.only_lines = kw.pop('only_line_numbers', [])
368 self.query_terms = kw.pop('query_terms', [])
368 self.query_terms = kw.pop('query_terms', [])
369 self.max_lines = kw.pop('max_lines', 5)
369 self.max_lines = kw.pop('max_lines', 5)
370 self.line_context = kw.pop('line_context', 3)
370 self.line_context = kw.pop('line_context', 3)
371 self.url = kw.pop('url', None)
371 self.url = kw.pop('url', None)
372
372
373 super(CodeHtmlFormatter, self).__init__(**kw)
373 super(CodeHtmlFormatter, self).__init__(**kw)
374
374
375 def _wrap_code(self, source):
375 def _wrap_code(self, source):
376 for cnt, it in enumerate(source):
376 for cnt, it in enumerate(source):
377 i, t = it
377 i, t = it
378 t = '<pre>%s</pre>' % t
378 t = '<pre>%s</pre>' % t
379 yield i, t
379 yield i, t
380
380
381 def _wrap_tablelinenos(self, inner):
381 def _wrap_tablelinenos(self, inner):
382 yield 0, '<table class="code-highlight %stable">' % self.cssclass
382 yield 0, '<table class="code-highlight %stable">' % self.cssclass
383
383
384 last_shown_line_number = 0
384 last_shown_line_number = 0
385 current_line_number = 1
385 current_line_number = 1
386
386
387 for t, line in inner:
387 for t, line in inner:
388 if not t:
388 if not t:
389 yield t, line
389 yield t, line
390 continue
390 continue
391
391
392 if current_line_number in self.only_lines:
392 if current_line_number in self.only_lines:
393 if last_shown_line_number + 1 != current_line_number:
393 if last_shown_line_number + 1 != current_line_number:
394 yield 0, '<tr>'
394 yield 0, '<tr>'
395 yield 0, '<td class="line">...</td>'
395 yield 0, '<td class="line">...</td>'
396 yield 0, '<td id="hlcode" class="code"></td>'
396 yield 0, '<td id="hlcode" class="code"></td>'
397 yield 0, '</tr>'
397 yield 0, '</tr>'
398
398
399 yield 0, '<tr>'
399 yield 0, '<tr>'
400 if self.url:
400 if self.url:
401 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
401 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
402 self.url, current_line_number, current_line_number)
402 self.url, current_line_number, current_line_number)
403 else:
403 else:
404 yield 0, '<td class="line"><a href="">%i</a></td>' % (
404 yield 0, '<td class="line"><a href="">%i</a></td>' % (
405 current_line_number)
405 current_line_number)
406 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
406 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
407 yield 0, '</tr>'
407 yield 0, '</tr>'
408
408
409 last_shown_line_number = current_line_number
409 last_shown_line_number = current_line_number
410
410
411 current_line_number += 1
411 current_line_number += 1
412
412
413
413
414 yield 0, '</table>'
414 yield 0, '</table>'
415
415
416
416
417 def extract_phrases(text_query):
417 def extract_phrases(text_query):
418 """
418 """
419 Extracts phrases from search term string making sure phrases
419 Extracts phrases from search term string making sure phrases
420 contained in double quotes are kept together - and discarding empty values
420 contained in double quotes are kept together - and discarding empty values
421 or fully whitespace values eg.
421 or fully whitespace values eg.
422
422
423 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
423 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
424
424
425 """
425 """
426
426
427 in_phrase = False
427 in_phrase = False
428 buf = ''
428 buf = ''
429 phrases = []
429 phrases = []
430 for char in text_query:
430 for char in text_query:
431 if in_phrase:
431 if in_phrase:
432 if char == '"': # end phrase
432 if char == '"': # end phrase
433 phrases.append(buf)
433 phrases.append(buf)
434 buf = ''
434 buf = ''
435 in_phrase = False
435 in_phrase = False
436 continue
436 continue
437 else:
437 else:
438 buf += char
438 buf += char
439 continue
439 continue
440 else:
440 else:
441 if char == '"': # start phrase
441 if char == '"': # start phrase
442 in_phrase = True
442 in_phrase = True
443 phrases.append(buf)
443 phrases.append(buf)
444 buf = ''
444 buf = ''
445 continue
445 continue
446 elif char == ' ':
446 elif char == ' ':
447 phrases.append(buf)
447 phrases.append(buf)
448 buf = ''
448 buf = ''
449 continue
449 continue
450 else:
450 else:
451 buf += char
451 buf += char
452
452
453 phrases.append(buf)
453 phrases.append(buf)
454 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
454 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
455 return phrases
455 return phrases
456
456
457
457
458 def get_matching_offsets(text, phrases):
458 def get_matching_offsets(text, phrases):
459 """
459 """
460 Returns a list of string offsets in `text` that the list of `terms` match
460 Returns a list of string offsets in `text` that the list of `terms` match
461
461
462 >>> get_matching_offsets('some text here', ['some', 'here'])
462 >>> get_matching_offsets('some text here', ['some', 'here'])
463 [(0, 4), (10, 14)]
463 [(0, 4), (10, 14)]
464
464
465 """
465 """
466 offsets = []
466 offsets = []
467 for phrase in phrases:
467 for phrase in phrases:
468 for match in re.finditer(phrase, text):
468 for match in re.finditer(phrase, text):
469 offsets.append((match.start(), match.end()))
469 offsets.append((match.start(), match.end()))
470
470
471 return offsets
471 return offsets
472
472
473
473
474 def normalize_text_for_matching(x):
474 def normalize_text_for_matching(x):
475 """
475 """
476 Replaces all non alnum characters to spaces and lower cases the string,
476 Replaces all non alnum characters to spaces and lower cases the string,
477 useful for comparing two text strings without punctuation
477 useful for comparing two text strings without punctuation
478 """
478 """
479 return re.sub(r'[^\w]', ' ', x.lower())
479 return re.sub(r'[^\w]', ' ', x.lower())
480
480
481
481
482 def get_matching_line_offsets(lines, terms):
482 def get_matching_line_offsets(lines, terms):
483 """ Return a set of `lines` indices (starting from 1) matching a
483 """ Return a set of `lines` indices (starting from 1) matching a
484 text search query, along with `context` lines above/below matching lines
484 text search query, along with `context` lines above/below matching lines
485
485
486 :param lines: list of strings representing lines
486 :param lines: list of strings representing lines
487 :param terms: search term string to match in lines eg. 'some text'
487 :param terms: search term string to match in lines eg. 'some text'
488 :param context: number of lines above/below a matching line to add to result
488 :param context: number of lines above/below a matching line to add to result
489 :param max_lines: cut off for lines of interest
489 :param max_lines: cut off for lines of interest
490 eg.
490 eg.
491
491
492 text = '''
492 text = '''
493 words words words
493 words words words
494 words words words
494 words words words
495 some text some
495 some text some
496 words words words
496 words words words
497 words words words
497 words words words
498 text here what
498 text here what
499 '''
499 '''
500 get_matching_line_offsets(text, 'text', context=1)
500 get_matching_line_offsets(text, 'text', context=1)
501 {3: [(5, 9)], 6: [(0, 4)]]
501 {3: [(5, 9)], 6: [(0, 4)]]
502
502
503 """
503 """
504 matching_lines = {}
504 matching_lines = {}
505 phrases = [normalize_text_for_matching(phrase)
505 phrases = [normalize_text_for_matching(phrase)
506 for phrase in extract_phrases(terms)]
506 for phrase in extract_phrases(terms)]
507
507
508 for line_index, line in enumerate(lines, start=1):
508 for line_index, line in enumerate(lines, start=1):
509 match_offsets = get_matching_offsets(
509 match_offsets = get_matching_offsets(
510 normalize_text_for_matching(line), phrases)
510 normalize_text_for_matching(line), phrases)
511 if match_offsets:
511 if match_offsets:
512 matching_lines[line_index] = match_offsets
512 matching_lines[line_index] = match_offsets
513
513
514 return matching_lines
514 return matching_lines
515
515
516
516
517 def hsv_to_rgb(h, s, v):
517 def hsv_to_rgb(h, s, v):
518 """ Convert hsv color values to rgb """
518 """ Convert hsv color values to rgb """
519
519
520 if s == 0.0:
520 if s == 0.0:
521 return v, v, v
521 return v, v, v
522 i = int(h * 6.0) # XXX assume int() truncates!
522 i = int(h * 6.0) # XXX assume int() truncates!
523 f = (h * 6.0) - i
523 f = (h * 6.0) - i
524 p = v * (1.0 - s)
524 p = v * (1.0 - s)
525 q = v * (1.0 - s * f)
525 q = v * (1.0 - s * f)
526 t = v * (1.0 - s * (1.0 - f))
526 t = v * (1.0 - s * (1.0 - f))
527 i = i % 6
527 i = i % 6
528 if i == 0:
528 if i == 0:
529 return v, t, p
529 return v, t, p
530 if i == 1:
530 if i == 1:
531 return q, v, p
531 return q, v, p
532 if i == 2:
532 if i == 2:
533 return p, v, t
533 return p, v, t
534 if i == 3:
534 if i == 3:
535 return p, q, v
535 return p, q, v
536 if i == 4:
536 if i == 4:
537 return t, p, v
537 return t, p, v
538 if i == 5:
538 if i == 5:
539 return v, p, q
539 return v, p, q
540
540
541
541
542 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
542 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
543 """
543 """
544 Generator for getting n of evenly distributed colors using
544 Generator for getting n of evenly distributed colors using
545 hsv color and golden ratio. It always return same order of colors
545 hsv color and golden ratio. It always return same order of colors
546
546
547 :param n: number of colors to generate
547 :param n: number of colors to generate
548 :param saturation: saturation of returned colors
548 :param saturation: saturation of returned colors
549 :param lightness: lightness of returned colors
549 :param lightness: lightness of returned colors
550 :returns: RGB tuple
550 :returns: RGB tuple
551 """
551 """
552
552
553 golden_ratio = 0.618033988749895
553 golden_ratio = 0.618033988749895
554 h = 0.22717784590367374
554 h = 0.22717784590367374
555
555
556 for _ in xrange(n):
556 for _ in xrange(n):
557 h += golden_ratio
557 h += golden_ratio
558 h %= 1
558 h %= 1
559 HSV_tuple = [h, saturation, lightness]
559 HSV_tuple = [h, saturation, lightness]
560 RGB_tuple = hsv_to_rgb(*HSV_tuple)
560 RGB_tuple = hsv_to_rgb(*HSV_tuple)
561 yield map(lambda x: str(int(x * 256)), RGB_tuple)
561 yield map(lambda x: str(int(x * 256)), RGB_tuple)
562
562
563
563
564 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
564 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
565 """
565 """
566 Returns a function which when called with an argument returns a unique
566 Returns a function which when called with an argument returns a unique
567 color for that argument, eg.
567 color for that argument, eg.
568
568
569 :param n: number of colors to generate
569 :param n: number of colors to generate
570 :param saturation: saturation of returned colors
570 :param saturation: saturation of returned colors
571 :param lightness: lightness of returned colors
571 :param lightness: lightness of returned colors
572 :returns: css RGB string
572 :returns: css RGB string
573
573
574 >>> color_hash = color_hasher()
574 >>> color_hash = color_hasher()
575 >>> color_hash('hello')
575 >>> color_hash('hello')
576 'rgb(34, 12, 59)'
576 'rgb(34, 12, 59)'
577 >>> color_hash('hello')
577 >>> color_hash('hello')
578 'rgb(34, 12, 59)'
578 'rgb(34, 12, 59)'
579 >>> color_hash('other')
579 >>> color_hash('other')
580 'rgb(90, 224, 159)'
580 'rgb(90, 224, 159)'
581 """
581 """
582
582
583 color_dict = {}
583 color_dict = {}
584 cgenerator = unique_color_generator(
584 cgenerator = unique_color_generator(
585 saturation=saturation, lightness=lightness)
585 saturation=saturation, lightness=lightness)
586
586
587 def get_color_string(thing):
587 def get_color_string(thing):
588 if thing in color_dict:
588 if thing in color_dict:
589 col = color_dict[thing]
589 col = color_dict[thing]
590 else:
590 else:
591 col = color_dict[thing] = cgenerator.next()
591 col = color_dict[thing] = cgenerator.next()
592 return "rgb(%s)" % (', '.join(col))
592 return "rgb(%s)" % (', '.join(col))
593
593
594 return get_color_string
594 return get_color_string
595
595
596
596
597 def get_lexer_safe(mimetype=None, filepath=None):
597 def get_lexer_safe(mimetype=None, filepath=None):
598 """
598 """
599 Tries to return a relevant pygments lexer using mimetype/filepath name,
599 Tries to return a relevant pygments lexer using mimetype/filepath name,
600 defaulting to plain text if none could be found
600 defaulting to plain text if none could be found
601 """
601 """
602 lexer = None
602 lexer = None
603 try:
603 try:
604 if mimetype:
604 if mimetype:
605 lexer = get_lexer_for_mimetype(mimetype)
605 lexer = get_lexer_for_mimetype(mimetype)
606 if not lexer:
606 if not lexer:
607 lexer = get_lexer_for_filename(filepath)
607 lexer = get_lexer_for_filename(filepath)
608 except pygments.util.ClassNotFound:
608 except pygments.util.ClassNotFound:
609 pass
609 pass
610
610
611 if not lexer:
611 if not lexer:
612 lexer = get_lexer_by_name('text')
612 lexer = get_lexer_by_name('text')
613
613
614 return lexer
614 return lexer
615
615
616
616
617 def get_lexer_for_filenode(filenode):
617 def get_lexer_for_filenode(filenode):
618 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
618 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
619 return lexer
619 return lexer
620
620
621
621
622 def pygmentize(filenode, **kwargs):
622 def pygmentize(filenode, **kwargs):
623 """
623 """
624 pygmentize function using pygments
624 pygmentize function using pygments
625
625
626 :param filenode:
626 :param filenode:
627 """
627 """
628 lexer = get_lexer_for_filenode(filenode)
628 lexer = get_lexer_for_filenode(filenode)
629 return literal(code_highlight(filenode.content, lexer,
629 return literal(code_highlight(filenode.content, lexer,
630 CodeHtmlFormatter(**kwargs)))
630 CodeHtmlFormatter(**kwargs)))
631
631
632
632
633 def is_following_repo(repo_name, user_id):
633 def is_following_repo(repo_name, user_id):
634 from rhodecode.model.scm import ScmModel
634 from rhodecode.model.scm import ScmModel
635 return ScmModel().is_following_repo(repo_name, user_id)
635 return ScmModel().is_following_repo(repo_name, user_id)
636
636
637
637
638 class _Message(object):
638 class _Message(object):
639 """A message returned by ``Flash.pop_messages()``.
639 """A message returned by ``Flash.pop_messages()``.
640
640
641 Converting the message to a string returns the message text. Instances
641 Converting the message to a string returns the message text. Instances
642 also have the following attributes:
642 also have the following attributes:
643
643
644 * ``message``: the message text.
644 * ``message``: the message text.
645 * ``category``: the category specified when the message was created.
645 * ``category``: the category specified when the message was created.
646 """
646 """
647
647
648 def __init__(self, category, message):
648 def __init__(self, category, message):
649 self.category = category
649 self.category = category
650 self.message = message
650 self.message = message
651
651
652 def __str__(self):
652 def __str__(self):
653 return self.message
653 return self.message
654
654
655 __unicode__ = __str__
655 __unicode__ = __str__
656
656
657 def __html__(self):
657 def __html__(self):
658 return escape(safe_unicode(self.message))
658 return escape(safe_unicode(self.message))
659
659
660
660
661 class Flash(_Flash):
661 class Flash(_Flash):
662
662
663 def pop_messages(self):
663 def pop_messages(self):
664 """Return all accumulated messages and delete them from the session.
664 """Return all accumulated messages and delete them from the session.
665
665
666 The return value is a list of ``Message`` objects.
666 The return value is a list of ``Message`` objects.
667 """
667 """
668 from pylons import session
668 from pylons import session
669
669
670 messages = []
670 messages = []
671
671
672 # Pop the 'old' pylons flash messages. They are tuples of the form
672 # Pop the 'old' pylons flash messages. They are tuples of the form
673 # (category, message)
673 # (category, message)
674 for cat, msg in session.pop(self.session_key, []):
674 for cat, msg in session.pop(self.session_key, []):
675 messages.append(_Message(cat, msg))
675 messages.append(_Message(cat, msg))
676
676
677 # Pop the 'new' pyramid flash messages for each category as list
677 # Pop the 'new' pyramid flash messages for each category as list
678 # of strings.
678 # of strings.
679 for cat in self.categories:
679 for cat in self.categories:
680 for msg in session.pop_flash(queue=cat):
680 for msg in session.pop_flash(queue=cat):
681 messages.append(_Message(cat, msg))
681 messages.append(_Message(cat, msg))
682 # Map messages from the default queue to the 'notice' category.
682 # Map messages from the default queue to the 'notice' category.
683 for msg in session.pop_flash():
683 for msg in session.pop_flash():
684 messages.append(_Message('notice', msg))
684 messages.append(_Message('notice', msg))
685
685
686 session.save()
686 session.save()
687 return messages
687 return messages
688
688
689 def json_alerts(self):
689 def json_alerts(self):
690 payloads = []
690 payloads = []
691 messages = flash.pop_messages()
691 messages = flash.pop_messages()
692 if messages:
692 if messages:
693 for message in messages:
693 for message in messages:
694 subdata = {}
694 subdata = {}
695 if hasattr(message.message, 'rsplit'):
695 if hasattr(message.message, 'rsplit'):
696 flash_data = message.message.rsplit('|DELIM|', 1)
696 flash_data = message.message.rsplit('|DELIM|', 1)
697 org_message = flash_data[0]
697 org_message = flash_data[0]
698 if len(flash_data) > 1:
698 if len(flash_data) > 1:
699 subdata = json.loads(flash_data[1])
699 subdata = json.loads(flash_data[1])
700 else:
700 else:
701 org_message = message.message
701 org_message = message.message
702 payloads.append({
702 payloads.append({
703 'message': {
703 'message': {
704 'message': u'{}'.format(org_message),
704 'message': u'{}'.format(org_message),
705 'level': message.category,
705 'level': message.category,
706 'force': True,
706 'force': True,
707 'subdata': subdata
707 'subdata': subdata
708 }
708 }
709 })
709 })
710 return json.dumps(payloads)
710 return json.dumps(payloads)
711
711
712 flash = Flash()
712 flash = Flash()
713
713
714 #==============================================================================
714 #==============================================================================
715 # SCM FILTERS available via h.
715 # SCM FILTERS available via h.
716 #==============================================================================
716 #==============================================================================
717 from rhodecode.lib.vcs.utils import author_name, author_email
717 from rhodecode.lib.vcs.utils import author_name, author_email
718 from rhodecode.lib.utils2 import credentials_filter, age as _age
718 from rhodecode.lib.utils2 import credentials_filter, age as _age
719 from rhodecode.model.db import User, ChangesetStatus
719 from rhodecode.model.db import User, ChangesetStatus
720
720
721 age = _age
721 age = _age
722 capitalize = lambda x: x.capitalize()
722 capitalize = lambda x: x.capitalize()
723 email = author_email
723 email = author_email
724 short_id = lambda x: x[:12]
724 short_id = lambda x: x[:12]
725 hide_credentials = lambda x: ''.join(credentials_filter(x))
725 hide_credentials = lambda x: ''.join(credentials_filter(x))
726
726
727
727
728 def age_component(datetime_iso, value=None, time_is_local=False):
728 def age_component(datetime_iso, value=None, time_is_local=False):
729 title = value or format_date(datetime_iso)
729 title = value or format_date(datetime_iso)
730 tzinfo = '+00:00'
730 tzinfo = '+00:00'
731
731
732 # detect if we have a timezone info, otherwise, add it
732 # detect if we have a timezone info, otherwise, add it
733 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
733 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
734 if time_is_local:
734 if time_is_local:
735 tzinfo = time.strftime("+%H:%M",
735 tzinfo = time.strftime("+%H:%M",
736 time.gmtime(
736 time.gmtime(
737 (datetime.now() - datetime.utcnow()).seconds + 1
737 (datetime.now() - datetime.utcnow()).seconds + 1
738 )
738 )
739 )
739 )
740
740
741 return literal(
741 return literal(
742 '<time class="timeago tooltip" '
742 '<time class="timeago tooltip" '
743 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
743 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
744 datetime_iso, title, tzinfo))
744 datetime_iso, title, tzinfo))
745
745
746
746
747 def _shorten_commit_id(commit_id):
747 def _shorten_commit_id(commit_id):
748 from rhodecode import CONFIG
748 from rhodecode import CONFIG
749 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
749 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
750 return commit_id[:def_len]
750 return commit_id[:def_len]
751
751
752
752
753 def show_id(commit):
753 def show_id(commit):
754 """
754 """
755 Configurable function that shows ID
755 Configurable function that shows ID
756 by default it's r123:fffeeefffeee
756 by default it's r123:fffeeefffeee
757
757
758 :param commit: commit instance
758 :param commit: commit instance
759 """
759 """
760 from rhodecode import CONFIG
760 from rhodecode import CONFIG
761 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
761 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
762
762
763 raw_id = _shorten_commit_id(commit.raw_id)
763 raw_id = _shorten_commit_id(commit.raw_id)
764 if show_idx:
764 if show_idx:
765 return 'r%s:%s' % (commit.idx, raw_id)
765 return 'r%s:%s' % (commit.idx, raw_id)
766 else:
766 else:
767 return '%s' % (raw_id, )
767 return '%s' % (raw_id, )
768
768
769
769
770 def format_date(date):
770 def format_date(date):
771 """
771 """
772 use a standardized formatting for dates used in RhodeCode
772 use a standardized formatting for dates used in RhodeCode
773
773
774 :param date: date/datetime object
774 :param date: date/datetime object
775 :return: formatted date
775 :return: formatted date
776 """
776 """
777
777
778 if date:
778 if date:
779 _fmt = "%a, %d %b %Y %H:%M:%S"
779 _fmt = "%a, %d %b %Y %H:%M:%S"
780 return safe_unicode(date.strftime(_fmt))
780 return safe_unicode(date.strftime(_fmt))
781
781
782 return u""
782 return u""
783
783
784
784
785 class _RepoChecker(object):
785 class _RepoChecker(object):
786
786
787 def __init__(self, backend_alias):
787 def __init__(self, backend_alias):
788 self._backend_alias = backend_alias
788 self._backend_alias = backend_alias
789
789
790 def __call__(self, repository):
790 def __call__(self, repository):
791 if hasattr(repository, 'alias'):
791 if hasattr(repository, 'alias'):
792 _type = repository.alias
792 _type = repository.alias
793 elif hasattr(repository, 'repo_type'):
793 elif hasattr(repository, 'repo_type'):
794 _type = repository.repo_type
794 _type = repository.repo_type
795 else:
795 else:
796 _type = repository
796 _type = repository
797 return _type == self._backend_alias
797 return _type == self._backend_alias
798
798
799 is_git = _RepoChecker('git')
799 is_git = _RepoChecker('git')
800 is_hg = _RepoChecker('hg')
800 is_hg = _RepoChecker('hg')
801 is_svn = _RepoChecker('svn')
801 is_svn = _RepoChecker('svn')
802
802
803
803
804 def get_repo_type_by_name(repo_name):
804 def get_repo_type_by_name(repo_name):
805 repo = Repository.get_by_repo_name(repo_name)
805 repo = Repository.get_by_repo_name(repo_name)
806 return repo.repo_type
806 return repo.repo_type
807
807
808
808
809 def is_svn_without_proxy(repository):
809 def is_svn_without_proxy(repository):
810 if is_svn(repository):
810 if is_svn(repository):
811 from rhodecode.model.settings import VcsSettingsModel
811 from rhodecode.model.settings import VcsSettingsModel
812 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
812 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
813 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
813 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
814 return False
814 return False
815
815
816
816
817 def discover_user(author):
817 def discover_user(author):
818 """
818 """
819 Tries to discover RhodeCode User based on the autho string. Author string
819 Tries to discover RhodeCode User based on the autho string. Author string
820 is typically `FirstName LastName <email@address.com>`
820 is typically `FirstName LastName <email@address.com>`
821 """
821 """
822
822
823 # if author is already an instance use it for extraction
823 # if author is already an instance use it for extraction
824 if isinstance(author, User):
824 if isinstance(author, User):
825 return author
825 return author
826
826
827 # Valid email in the attribute passed, see if they're in the system
827 # Valid email in the attribute passed, see if they're in the system
828 _email = author_email(author)
828 _email = author_email(author)
829 if _email != '':
829 if _email != '':
830 user = User.get_by_email(_email, case_insensitive=True, cache=True)
830 user = User.get_by_email(_email, case_insensitive=True, cache=True)
831 if user is not None:
831 if user is not None:
832 return user
832 return user
833
833
834 # Maybe it's a username, we try to extract it and fetch by username ?
834 # Maybe it's a username, we try to extract it and fetch by username ?
835 _author = author_name(author)
835 _author = author_name(author)
836 user = User.get_by_username(_author, case_insensitive=True, cache=True)
836 user = User.get_by_username(_author, case_insensitive=True, cache=True)
837 if user is not None:
837 if user is not None:
838 return user
838 return user
839
839
840 return None
840 return None
841
841
842
842
843 def email_or_none(author):
843 def email_or_none(author):
844 # extract email from the commit string
844 # extract email from the commit string
845 _email = author_email(author)
845 _email = author_email(author)
846
846
847 # If we have an email, use it, otherwise
847 # If we have an email, use it, otherwise
848 # see if it contains a username we can get an email from
848 # see if it contains a username we can get an email from
849 if _email != '':
849 if _email != '':
850 return _email
850 return _email
851 else:
851 else:
852 user = User.get_by_username(
852 user = User.get_by_username(
853 author_name(author), case_insensitive=True, cache=True)
853 author_name(author), case_insensitive=True, cache=True)
854
854
855 if user is not None:
855 if user is not None:
856 return user.email
856 return user.email
857
857
858 # No valid email, not a valid user in the system, none!
858 # No valid email, not a valid user in the system, none!
859 return None
859 return None
860
860
861
861
862 def link_to_user(author, length=0, **kwargs):
862 def link_to_user(author, length=0, **kwargs):
863 user = discover_user(author)
863 user = discover_user(author)
864 # user can be None, but if we have it already it means we can re-use it
864 # user can be None, but if we have it already it means we can re-use it
865 # in the person() function, so we save 1 intensive-query
865 # in the person() function, so we save 1 intensive-query
866 if user:
866 if user:
867 author = user
867 author = user
868
868
869 display_person = person(author, 'username_or_name_or_email')
869 display_person = person(author, 'username_or_name_or_email')
870 if length:
870 if length:
871 display_person = shorter(display_person, length)
871 display_person = shorter(display_person, length)
872
872
873 if user:
873 if user:
874 return link_to(
874 return link_to(
875 escape(display_person),
875 escape(display_person),
876 route_path('user_profile', username=user.username),
876 route_path('user_profile', username=user.username),
877 **kwargs)
877 **kwargs)
878 else:
878 else:
879 return escape(display_person)
879 return escape(display_person)
880
880
881
881
882 def person(author, show_attr="username_and_name"):
882 def person(author, show_attr="username_and_name"):
883 user = discover_user(author)
883 user = discover_user(author)
884 if user:
884 if user:
885 return getattr(user, show_attr)
885 return getattr(user, show_attr)
886 else:
886 else:
887 _author = author_name(author)
887 _author = author_name(author)
888 _email = email(author)
888 _email = email(author)
889 return _author or _email
889 return _author or _email
890
890
891
891
892 def author_string(email):
892 def author_string(email):
893 if email:
893 if email:
894 user = User.get_by_email(email, case_insensitive=True, cache=True)
894 user = User.get_by_email(email, case_insensitive=True, cache=True)
895 if user:
895 if user:
896 if user.firstname or user.lastname:
896 if user.firstname or user.lastname:
897 return '%s %s &lt;%s&gt;' % (
897 return '%s %s &lt;%s&gt;' % (
898 escape(user.firstname), escape(user.lastname), email)
898 escape(user.firstname), escape(user.lastname), email)
899 else:
899 else:
900 return email
900 return email
901 else:
901 else:
902 return email
902 return email
903 else:
903 else:
904 return None
904 return None
905
905
906
906
907 def person_by_id(id_, show_attr="username_and_name"):
907 def person_by_id(id_, show_attr="username_and_name"):
908 # attr to return from fetched user
908 # attr to return from fetched user
909 person_getter = lambda usr: getattr(usr, show_attr)
909 person_getter = lambda usr: getattr(usr, show_attr)
910
910
911 #maybe it's an ID ?
911 #maybe it's an ID ?
912 if str(id_).isdigit() or isinstance(id_, int):
912 if str(id_).isdigit() or isinstance(id_, int):
913 id_ = int(id_)
913 id_ = int(id_)
914 user = User.get(id_)
914 user = User.get(id_)
915 if user is not None:
915 if user is not None:
916 return person_getter(user)
916 return person_getter(user)
917 return id_
917 return id_
918
918
919
919
920 def gravatar_with_user(author, show_disabled=False):
920 def gravatar_with_user(author, show_disabled=False):
921 from rhodecode.lib.utils import PartialRenderer
921 from rhodecode.lib.utils import PartialRenderer
922 _render = PartialRenderer('base/base.mako')
922 _render = PartialRenderer('base/base.mako')
923 return _render('gravatar_with_user', author, show_disabled=show_disabled)
923 return _render('gravatar_with_user', author, show_disabled=show_disabled)
924
924
925
925
926 def desc_stylize(value):
926 def desc_stylize(value):
927 """
927 """
928 converts tags from value into html equivalent
928 converts tags from value into html equivalent
929
929
930 :param value:
930 :param value:
931 """
931 """
932 if not value:
932 if not value:
933 return ''
933 return ''
934
934
935 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
935 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
936 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
936 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
937 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
937 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
938 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
938 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
939 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
939 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
940 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
940 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
941 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
941 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
942 '<div class="metatag" tag="lang">\\2</div>', value)
942 '<div class="metatag" tag="lang">\\2</div>', value)
943 value = re.sub(r'\[([a-z]+)\]',
943 value = re.sub(r'\[([a-z]+)\]',
944 '<div class="metatag" tag="\\1">\\1</div>', value)
944 '<div class="metatag" tag="\\1">\\1</div>', value)
945
945
946 return value
946 return value
947
947
948
948
949 def escaped_stylize(value):
949 def escaped_stylize(value):
950 """
950 """
951 converts tags from value into html equivalent, but escaping its value first
951 converts tags from value into html equivalent, but escaping its value first
952 """
952 """
953 if not value:
953 if not value:
954 return ''
954 return ''
955
955
956 # Using default webhelper escape method, but has to force it as a
956 # Using default webhelper escape method, but has to force it as a
957 # plain unicode instead of a markup tag to be used in regex expressions
957 # plain unicode instead of a markup tag to be used in regex expressions
958 value = unicode(escape(safe_unicode(value)))
958 value = unicode(escape(safe_unicode(value)))
959
959
960 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
960 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
961 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
961 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
962 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
962 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
963 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
963 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
964 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
964 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
965 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
965 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
966 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
966 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
967 '<div class="metatag" tag="lang">\\2</div>', value)
967 '<div class="metatag" tag="lang">\\2</div>', value)
968 value = re.sub(r'\[([a-z]+)\]',
968 value = re.sub(r'\[([a-z]+)\]',
969 '<div class="metatag" tag="\\1">\\1</div>', value)
969 '<div class="metatag" tag="\\1">\\1</div>', value)
970
970
971 return value
971 return value
972
972
973
973
974 def bool2icon(value):
974 def bool2icon(value):
975 """
975 """
976 Returns boolean value of a given value, represented as html element with
976 Returns boolean value of a given value, represented as html element with
977 classes that will represent icons
977 classes that will represent icons
978
978
979 :param value: given value to convert to html node
979 :param value: given value to convert to html node
980 """
980 """
981
981
982 if value: # does bool conversion
982 if value: # does bool conversion
983 return HTML.tag('i', class_="icon-true")
983 return HTML.tag('i', class_="icon-true")
984 else: # not true as bool
984 else: # not true as bool
985 return HTML.tag('i', class_="icon-false")
985 return HTML.tag('i', class_="icon-false")
986
986
987
987
988 #==============================================================================
988 #==============================================================================
989 # PERMS
989 # PERMS
990 #==============================================================================
990 #==============================================================================
991 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
991 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
992 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
992 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
993 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
993 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
994 csrf_token_key
994 csrf_token_key
995
995
996
996
997 #==============================================================================
997 #==============================================================================
998 # GRAVATAR URL
998 # GRAVATAR URL
999 #==============================================================================
999 #==============================================================================
1000 class InitialsGravatar(object):
1000 class InitialsGravatar(object):
1001 def __init__(self, email_address, first_name, last_name, size=30,
1001 def __init__(self, email_address, first_name, last_name, size=30,
1002 background=None, text_color='#fff'):
1002 background=None, text_color='#fff'):
1003 self.size = size
1003 self.size = size
1004 self.first_name = first_name
1004 self.first_name = first_name
1005 self.last_name = last_name
1005 self.last_name = last_name
1006 self.email_address = email_address
1006 self.email_address = email_address
1007 self.background = background or self.str2color(email_address)
1007 self.background = background or self.str2color(email_address)
1008 self.text_color = text_color
1008 self.text_color = text_color
1009
1009
1010 def get_color_bank(self):
1010 def get_color_bank(self):
1011 """
1011 """
1012 returns a predefined list of colors that gravatars can use.
1012 returns a predefined list of colors that gravatars can use.
1013 Those are randomized distinct colors that guarantee readability and
1013 Those are randomized distinct colors that guarantee readability and
1014 uniqueness.
1014 uniqueness.
1015
1015
1016 generated with: http://phrogz.net/css/distinct-colors.html
1016 generated with: http://phrogz.net/css/distinct-colors.html
1017 """
1017 """
1018 return [
1018 return [
1019 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1019 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1020 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1020 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1021 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1021 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1022 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1022 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1023 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1023 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1024 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1024 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1025 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1025 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1026 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1026 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1027 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1027 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1028 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1028 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1029 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1029 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1030 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1030 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1031 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1031 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1032 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1032 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1033 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1033 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1034 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1034 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1035 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1035 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1036 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1036 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1037 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1037 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1038 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1038 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1039 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1039 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1040 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1040 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1041 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1041 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1042 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1042 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1043 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1043 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1044 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1044 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1045 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1045 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1046 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1046 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1047 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1047 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1048 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1048 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1049 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1049 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1050 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1050 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1051 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1051 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1052 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1052 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1053 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1053 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1054 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1054 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1055 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1055 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1056 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1056 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1057 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1057 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1058 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1058 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1059 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1059 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1060 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1060 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1061 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1061 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1062 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1062 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1063 '#4f8c46', '#368dd9', '#5c0073'
1063 '#4f8c46', '#368dd9', '#5c0073'
1064 ]
1064 ]
1065
1065
1066 def rgb_to_hex_color(self, rgb_tuple):
1066 def rgb_to_hex_color(self, rgb_tuple):
1067 """
1067 """
1068 Converts an rgb_tuple passed to an hex color.
1068 Converts an rgb_tuple passed to an hex color.
1069
1069
1070 :param rgb_tuple: tuple with 3 ints represents rgb color space
1070 :param rgb_tuple: tuple with 3 ints represents rgb color space
1071 """
1071 """
1072 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1072 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1073
1073
1074 def email_to_int_list(self, email_str):
1074 def email_to_int_list(self, email_str):
1075 """
1075 """
1076 Get every byte of the hex digest value of email and turn it to integer.
1076 Get every byte of the hex digest value of email and turn it to integer.
1077 It's going to be always between 0-255
1077 It's going to be always between 0-255
1078 """
1078 """
1079 digest = md5_safe(email_str.lower())
1079 digest = md5_safe(email_str.lower())
1080 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1080 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1081
1081
1082 def pick_color_bank_index(self, email_str, color_bank):
1082 def pick_color_bank_index(self, email_str, color_bank):
1083 return self.email_to_int_list(email_str)[0] % len(color_bank)
1083 return self.email_to_int_list(email_str)[0] % len(color_bank)
1084
1084
1085 def str2color(self, email_str):
1085 def str2color(self, email_str):
1086 """
1086 """
1087 Tries to map in a stable algorithm an email to color
1087 Tries to map in a stable algorithm an email to color
1088
1088
1089 :param email_str:
1089 :param email_str:
1090 """
1090 """
1091 color_bank = self.get_color_bank()
1091 color_bank = self.get_color_bank()
1092 # pick position (module it's length so we always find it in the
1092 # pick position (module it's length so we always find it in the
1093 # bank even if it's smaller than 256 values
1093 # bank even if it's smaller than 256 values
1094 pos = self.pick_color_bank_index(email_str, color_bank)
1094 pos = self.pick_color_bank_index(email_str, color_bank)
1095 return color_bank[pos]
1095 return color_bank[pos]
1096
1096
1097 def normalize_email(self, email_address):
1097 def normalize_email(self, email_address):
1098 import unicodedata
1098 import unicodedata
1099 # default host used to fill in the fake/missing email
1099 # default host used to fill in the fake/missing email
1100 default_host = u'localhost'
1100 default_host = u'localhost'
1101
1101
1102 if not email_address:
1102 if not email_address:
1103 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1103 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1104
1104
1105 email_address = safe_unicode(email_address)
1105 email_address = safe_unicode(email_address)
1106
1106
1107 if u'@' not in email_address:
1107 if u'@' not in email_address:
1108 email_address = u'%s@%s' % (email_address, default_host)
1108 email_address = u'%s@%s' % (email_address, default_host)
1109
1109
1110 if email_address.endswith(u'@'):
1110 if email_address.endswith(u'@'):
1111 email_address = u'%s%s' % (email_address, default_host)
1111 email_address = u'%s%s' % (email_address, default_host)
1112
1112
1113 email_address = unicodedata.normalize('NFKD', email_address)\
1113 email_address = unicodedata.normalize('NFKD', email_address)\
1114 .encode('ascii', 'ignore')
1114 .encode('ascii', 'ignore')
1115 return email_address
1115 return email_address
1116
1116
1117 def get_initials(self):
1117 def get_initials(self):
1118 """
1118 """
1119 Returns 2 letter initials calculated based on the input.
1119 Returns 2 letter initials calculated based on the input.
1120 The algorithm picks first given email address, and takes first letter
1120 The algorithm picks first given email address, and takes first letter
1121 of part before @, and then the first letter of server name. In case
1121 of part before @, and then the first letter of server name. In case
1122 the part before @ is in a format of `somestring.somestring2` it replaces
1122 the part before @ is in a format of `somestring.somestring2` it replaces
1123 the server letter with first letter of somestring2
1123 the server letter with first letter of somestring2
1124
1124
1125 In case function was initialized with both first and lastname, this
1125 In case function was initialized with both first and lastname, this
1126 overrides the extraction from email by first letter of the first and
1126 overrides the extraction from email by first letter of the first and
1127 last name. We add special logic to that functionality, In case Full name
1127 last name. We add special logic to that functionality, In case Full name
1128 is compound, like Guido Von Rossum, we use last part of the last name
1128 is compound, like Guido Von Rossum, we use last part of the last name
1129 (Von Rossum) picking `R`.
1129 (Von Rossum) picking `R`.
1130
1130
1131 Function also normalizes the non-ascii characters to they ascii
1131 Function also normalizes the non-ascii characters to they ascii
1132 representation, eg Ą => A
1132 representation, eg Ą => A
1133 """
1133 """
1134 import unicodedata
1134 import unicodedata
1135 # replace non-ascii to ascii
1135 # replace non-ascii to ascii
1136 first_name = unicodedata.normalize(
1136 first_name = unicodedata.normalize(
1137 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1137 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1138 last_name = unicodedata.normalize(
1138 last_name = unicodedata.normalize(
1139 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1139 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1140
1140
1141 # do NFKD encoding, and also make sure email has proper format
1141 # do NFKD encoding, and also make sure email has proper format
1142 email_address = self.normalize_email(self.email_address)
1142 email_address = self.normalize_email(self.email_address)
1143
1143
1144 # first push the email initials
1144 # first push the email initials
1145 prefix, server = email_address.split('@', 1)
1145 prefix, server = email_address.split('@', 1)
1146
1146
1147 # check if prefix is maybe a 'firstname.lastname' syntax
1147 # check if prefix is maybe a 'firstname.lastname' syntax
1148 _dot_split = prefix.rsplit('.', 1)
1148 _dot_split = prefix.rsplit('.', 1)
1149 if len(_dot_split) == 2:
1149 if len(_dot_split) == 2:
1150 initials = [_dot_split[0][0], _dot_split[1][0]]
1150 initials = [_dot_split[0][0], _dot_split[1][0]]
1151 else:
1151 else:
1152 initials = [prefix[0], server[0]]
1152 initials = [prefix[0], server[0]]
1153
1153
1154 # then try to replace either firtname or lastname
1154 # then try to replace either firtname or lastname
1155 fn_letter = (first_name or " ")[0].strip()
1155 fn_letter = (first_name or " ")[0].strip()
1156 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1156 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1157
1157
1158 if fn_letter:
1158 if fn_letter:
1159 initials[0] = fn_letter
1159 initials[0] = fn_letter
1160
1160
1161 if ln_letter:
1161 if ln_letter:
1162 initials[1] = ln_letter
1162 initials[1] = ln_letter
1163
1163
1164 return ''.join(initials).upper()
1164 return ''.join(initials).upper()
1165
1165
1166 def get_img_data_by_type(self, font_family, img_type):
1166 def get_img_data_by_type(self, font_family, img_type):
1167 default_user = """
1167 default_user = """
1168 <svg xmlns="http://www.w3.org/2000/svg"
1168 <svg xmlns="http://www.w3.org/2000/svg"
1169 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1169 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1170 viewBox="-15 -10 439.165 429.164"
1170 viewBox="-15 -10 439.165 429.164"
1171
1171
1172 xml:space="preserve"
1172 xml:space="preserve"
1173 style="background:{background};" >
1173 style="background:{background};" >
1174
1174
1175 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1175 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1176 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1176 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1177 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1177 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1178 168.596,153.916,216.671,
1178 168.596,153.916,216.671,
1179 204.583,216.671z" fill="{text_color}"/>
1179 204.583,216.671z" fill="{text_color}"/>
1180 <path d="M407.164,374.717L360.88,
1180 <path d="M407.164,374.717L360.88,
1181 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1181 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1182 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1182 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1183 15.366-44.203,23.488-69.076,23.488c-24.877,
1183 15.366-44.203,23.488-69.076,23.488c-24.877,
1184 0-48.762-8.122-69.078-23.488
1184 0-48.762-8.122-69.078-23.488
1185 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1185 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1186 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1186 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1187 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1187 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1188 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1188 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1189 19.402-10.527 C409.699,390.129,
1189 19.402-10.527 C409.699,390.129,
1190 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1190 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1191 </svg>""".format(
1191 </svg>""".format(
1192 size=self.size,
1192 size=self.size,
1193 background='#979797', # @grey4
1193 background='#979797', # @grey4
1194 text_color=self.text_color,
1194 text_color=self.text_color,
1195 font_family=font_family)
1195 font_family=font_family)
1196
1196
1197 return {
1197 return {
1198 "default_user": default_user
1198 "default_user": default_user
1199 }[img_type]
1199 }[img_type]
1200
1200
1201 def get_img_data(self, svg_type=None):
1201 def get_img_data(self, svg_type=None):
1202 """
1202 """
1203 generates the svg metadata for image
1203 generates the svg metadata for image
1204 """
1204 """
1205
1205
1206 font_family = ','.join([
1206 font_family = ','.join([
1207 'proximanovaregular',
1207 'proximanovaregular',
1208 'Proxima Nova Regular',
1208 'Proxima Nova Regular',
1209 'Proxima Nova',
1209 'Proxima Nova',
1210 'Arial',
1210 'Arial',
1211 'Lucida Grande',
1211 'Lucida Grande',
1212 'sans-serif'
1212 'sans-serif'
1213 ])
1213 ])
1214 if svg_type:
1214 if svg_type:
1215 return self.get_img_data_by_type(font_family, svg_type)
1215 return self.get_img_data_by_type(font_family, svg_type)
1216
1216
1217 initials = self.get_initials()
1217 initials = self.get_initials()
1218 img_data = """
1218 img_data = """
1219 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1219 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1220 width="{size}" height="{size}"
1220 width="{size}" height="{size}"
1221 style="width: 100%; height: 100%; background-color: {background}"
1221 style="width: 100%; height: 100%; background-color: {background}"
1222 viewBox="0 0 {size} {size}">
1222 viewBox="0 0 {size} {size}">
1223 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1223 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1224 pointer-events="auto" fill="{text_color}"
1224 pointer-events="auto" fill="{text_color}"
1225 font-family="{font_family}"
1225 font-family="{font_family}"
1226 style="font-weight: 400; font-size: {f_size}px;">{text}
1226 style="font-weight: 400; font-size: {f_size}px;">{text}
1227 </text>
1227 </text>
1228 </svg>""".format(
1228 </svg>""".format(
1229 size=self.size,
1229 size=self.size,
1230 f_size=self.size/1.85, # scale the text inside the box nicely
1230 f_size=self.size/1.85, # scale the text inside the box nicely
1231 background=self.background,
1231 background=self.background,
1232 text_color=self.text_color,
1232 text_color=self.text_color,
1233 text=initials.upper(),
1233 text=initials.upper(),
1234 font_family=font_family)
1234 font_family=font_family)
1235
1235
1236 return img_data
1236 return img_data
1237
1237
1238 def generate_svg(self, svg_type=None):
1238 def generate_svg(self, svg_type=None):
1239 img_data = self.get_img_data(svg_type)
1239 img_data = self.get_img_data(svg_type)
1240 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1240 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1241
1241
1242
1242
1243 def initials_gravatar(email_address, first_name, last_name, size=30):
1243 def initials_gravatar(email_address, first_name, last_name, size=30):
1244 svg_type = None
1244 svg_type = None
1245 if email_address == User.DEFAULT_USER_EMAIL:
1245 if email_address == User.DEFAULT_USER_EMAIL:
1246 svg_type = 'default_user'
1246 svg_type = 'default_user'
1247 klass = InitialsGravatar(email_address, first_name, last_name, size)
1247 klass = InitialsGravatar(email_address, first_name, last_name, size)
1248 return klass.generate_svg(svg_type=svg_type)
1248 return klass.generate_svg(svg_type=svg_type)
1249
1249
1250
1250
1251 def gravatar_url(email_address, size=30):
1251 def gravatar_url(email_address, size=30):
1252 # doh, we need to re-import those to mock it later
1252 # doh, we need to re-import those to mock it later
1253 from pylons import tmpl_context as c
1253 from pylons import tmpl_context as c
1254
1254
1255 _use_gravatar = c.visual.use_gravatar
1255 _use_gravatar = c.visual.use_gravatar
1256 _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL
1256 _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL
1257
1257
1258 email_address = email_address or User.DEFAULT_USER_EMAIL
1258 email_address = email_address or User.DEFAULT_USER_EMAIL
1259 if isinstance(email_address, unicode):
1259 if isinstance(email_address, unicode):
1260 # hashlib crashes on unicode items
1260 # hashlib crashes on unicode items
1261 email_address = safe_str(email_address)
1261 email_address = safe_str(email_address)
1262
1262
1263 # empty email or default user
1263 # empty email or default user
1264 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1264 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1265 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1265 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1266
1266
1267 if _use_gravatar:
1267 if _use_gravatar:
1268 # TODO: Disuse pyramid thread locals. Think about another solution to
1268 # TODO: Disuse pyramid thread locals. Think about another solution to
1269 # get the host and schema here.
1269 # get the host and schema here.
1270 request = get_current_request()
1270 request = get_current_request()
1271 tmpl = safe_str(_gravatar_url)
1271 tmpl = safe_str(_gravatar_url)
1272 tmpl = tmpl.replace('{email}', email_address)\
1272 tmpl = tmpl.replace('{email}', email_address)\
1273 .replace('{md5email}', md5_safe(email_address.lower())) \
1273 .replace('{md5email}', md5_safe(email_address.lower())) \
1274 .replace('{netloc}', request.host)\
1274 .replace('{netloc}', request.host)\
1275 .replace('{scheme}', request.scheme)\
1275 .replace('{scheme}', request.scheme)\
1276 .replace('{size}', safe_str(size))
1276 .replace('{size}', safe_str(size))
1277 return tmpl
1277 return tmpl
1278 else:
1278 else:
1279 return initials_gravatar(email_address, '', '', size=size)
1279 return initials_gravatar(email_address, '', '', size=size)
1280
1280
1281
1281
1282 class Page(_Page):
1282 class Page(_Page):
1283 """
1283 """
1284 Custom pager to match rendering style with paginator
1284 Custom pager to match rendering style with paginator
1285 """
1285 """
1286
1286
1287 def _get_pos(self, cur_page, max_page, items):
1287 def _get_pos(self, cur_page, max_page, items):
1288 edge = (items / 2) + 1
1288 edge = (items / 2) + 1
1289 if (cur_page <= edge):
1289 if (cur_page <= edge):
1290 radius = max(items / 2, items - cur_page)
1290 radius = max(items / 2, items - cur_page)
1291 elif (max_page - cur_page) < edge:
1291 elif (max_page - cur_page) < edge:
1292 radius = (items - 1) - (max_page - cur_page)
1292 radius = (items - 1) - (max_page - cur_page)
1293 else:
1293 else:
1294 radius = items / 2
1294 radius = items / 2
1295
1295
1296 left = max(1, (cur_page - (radius)))
1296 left = max(1, (cur_page - (radius)))
1297 right = min(max_page, cur_page + (radius))
1297 right = min(max_page, cur_page + (radius))
1298 return left, cur_page, right
1298 return left, cur_page, right
1299
1299
1300 def _range(self, regexp_match):
1300 def _range(self, regexp_match):
1301 """
1301 """
1302 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1302 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1303
1303
1304 Arguments:
1304 Arguments:
1305
1305
1306 regexp_match
1306 regexp_match
1307 A "re" (regular expressions) match object containing the
1307 A "re" (regular expressions) match object containing the
1308 radius of linked pages around the current page in
1308 radius of linked pages around the current page in
1309 regexp_match.group(1) as a string
1309 regexp_match.group(1) as a string
1310
1310
1311 This function is supposed to be called as a callable in
1311 This function is supposed to be called as a callable in
1312 re.sub.
1312 re.sub.
1313
1313
1314 """
1314 """
1315 radius = int(regexp_match.group(1))
1315 radius = int(regexp_match.group(1))
1316
1316
1317 # Compute the first and last page number within the radius
1317 # Compute the first and last page number within the radius
1318 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1318 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1319 # -> leftmost_page = 5
1319 # -> leftmost_page = 5
1320 # -> rightmost_page = 9
1320 # -> rightmost_page = 9
1321 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1321 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1322 self.last_page,
1322 self.last_page,
1323 (radius * 2) + 1)
1323 (radius * 2) + 1)
1324 nav_items = []
1324 nav_items = []
1325
1325
1326 # Create a link to the first page (unless we are on the first page
1326 # Create a link to the first page (unless we are on the first page
1327 # or there would be no need to insert '..' spacers)
1327 # or there would be no need to insert '..' spacers)
1328 if self.page != self.first_page and self.first_page < leftmost_page:
1328 if self.page != self.first_page and self.first_page < leftmost_page:
1329 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1329 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1330
1330
1331 # Insert dots if there are pages between the first page
1331 # Insert dots if there are pages between the first page
1332 # and the currently displayed page range
1332 # and the currently displayed page range
1333 if leftmost_page - self.first_page > 1:
1333 if leftmost_page - self.first_page > 1:
1334 # Wrap in a SPAN tag if nolink_attr is set
1334 # Wrap in a SPAN tag if nolink_attr is set
1335 text = '..'
1335 text = '..'
1336 if self.dotdot_attr:
1336 if self.dotdot_attr:
1337 text = HTML.span(c=text, **self.dotdot_attr)
1337 text = HTML.span(c=text, **self.dotdot_attr)
1338 nav_items.append(text)
1338 nav_items.append(text)
1339
1339
1340 for thispage in xrange(leftmost_page, rightmost_page + 1):
1340 for thispage in xrange(leftmost_page, rightmost_page + 1):
1341 # Hilight the current page number and do not use a link
1341 # Hilight the current page number and do not use a link
1342 if thispage == self.page:
1342 if thispage == self.page:
1343 text = '%s' % (thispage,)
1343 text = '%s' % (thispage,)
1344 # Wrap in a SPAN tag if nolink_attr is set
1344 # Wrap in a SPAN tag if nolink_attr is set
1345 if self.curpage_attr:
1345 if self.curpage_attr:
1346 text = HTML.span(c=text, **self.curpage_attr)
1346 text = HTML.span(c=text, **self.curpage_attr)
1347 nav_items.append(text)
1347 nav_items.append(text)
1348 # Otherwise create just a link to that page
1348 # Otherwise create just a link to that page
1349 else:
1349 else:
1350 text = '%s' % (thispage,)
1350 text = '%s' % (thispage,)
1351 nav_items.append(self._pagerlink(thispage, text))
1351 nav_items.append(self._pagerlink(thispage, text))
1352
1352
1353 # Insert dots if there are pages between the displayed
1353 # Insert dots if there are pages between the displayed
1354 # page numbers and the end of the page range
1354 # page numbers and the end of the page range
1355 if self.last_page - rightmost_page > 1:
1355 if self.last_page - rightmost_page > 1:
1356 text = '..'
1356 text = '..'
1357 # Wrap in a SPAN tag if nolink_attr is set
1357 # Wrap in a SPAN tag if nolink_attr is set
1358 if self.dotdot_attr:
1358 if self.dotdot_attr:
1359 text = HTML.span(c=text, **self.dotdot_attr)
1359 text = HTML.span(c=text, **self.dotdot_attr)
1360 nav_items.append(text)
1360 nav_items.append(text)
1361
1361
1362 # Create a link to the very last page (unless we are on the last
1362 # Create a link to the very last page (unless we are on the last
1363 # page or there would be no need to insert '..' spacers)
1363 # page or there would be no need to insert '..' spacers)
1364 if self.page != self.last_page and rightmost_page < self.last_page:
1364 if self.page != self.last_page and rightmost_page < self.last_page:
1365 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1365 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1366
1366
1367 ## prerender links
1367 ## prerender links
1368 #_page_link = url.current()
1368 #_page_link = url.current()
1369 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1369 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1370 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1370 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1371 return self.separator.join(nav_items)
1371 return self.separator.join(nav_items)
1372
1372
1373 def pager(self, format='~2~', page_param='page', partial_param='partial',
1373 def pager(self, format='~2~', page_param='page', partial_param='partial',
1374 show_if_single_page=False, separator=' ', onclick=None,
1374 show_if_single_page=False, separator=' ', onclick=None,
1375 symbol_first='<<', symbol_last='>>',
1375 symbol_first='<<', symbol_last='>>',
1376 symbol_previous='<', symbol_next='>',
1376 symbol_previous='<', symbol_next='>',
1377 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1377 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1378 curpage_attr={'class': 'pager_curpage'},
1378 curpage_attr={'class': 'pager_curpage'},
1379 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1379 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1380
1380
1381 self.curpage_attr = curpage_attr
1381 self.curpage_attr = curpage_attr
1382 self.separator = separator
1382 self.separator = separator
1383 self.pager_kwargs = kwargs
1383 self.pager_kwargs = kwargs
1384 self.page_param = page_param
1384 self.page_param = page_param
1385 self.partial_param = partial_param
1385 self.partial_param = partial_param
1386 self.onclick = onclick
1386 self.onclick = onclick
1387 self.link_attr = link_attr
1387 self.link_attr = link_attr
1388 self.dotdot_attr = dotdot_attr
1388 self.dotdot_attr = dotdot_attr
1389
1389
1390 # Don't show navigator if there is no more than one page
1390 # Don't show navigator if there is no more than one page
1391 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1391 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1392 return ''
1392 return ''
1393
1393
1394 from string import Template
1394 from string import Template
1395 # Replace ~...~ in token format by range of pages
1395 # Replace ~...~ in token format by range of pages
1396 result = re.sub(r'~(\d+)~', self._range, format)
1396 result = re.sub(r'~(\d+)~', self._range, format)
1397
1397
1398 # Interpolate '%' variables
1398 # Interpolate '%' variables
1399 result = Template(result).safe_substitute({
1399 result = Template(result).safe_substitute({
1400 'first_page': self.first_page,
1400 'first_page': self.first_page,
1401 'last_page': self.last_page,
1401 'last_page': self.last_page,
1402 'page': self.page,
1402 'page': self.page,
1403 'page_count': self.page_count,
1403 'page_count': self.page_count,
1404 'items_per_page': self.items_per_page,
1404 'items_per_page': self.items_per_page,
1405 'first_item': self.first_item,
1405 'first_item': self.first_item,
1406 'last_item': self.last_item,
1406 'last_item': self.last_item,
1407 'item_count': self.item_count,
1407 'item_count': self.item_count,
1408 'link_first': self.page > self.first_page and \
1408 'link_first': self.page > self.first_page and \
1409 self._pagerlink(self.first_page, symbol_first) or '',
1409 self._pagerlink(self.first_page, symbol_first) or '',
1410 'link_last': self.page < self.last_page and \
1410 'link_last': self.page < self.last_page and \
1411 self._pagerlink(self.last_page, symbol_last) or '',
1411 self._pagerlink(self.last_page, symbol_last) or '',
1412 'link_previous': self.previous_page and \
1412 'link_previous': self.previous_page and \
1413 self._pagerlink(self.previous_page, symbol_previous) \
1413 self._pagerlink(self.previous_page, symbol_previous) \
1414 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1414 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1415 'link_next': self.next_page and \
1415 'link_next': self.next_page and \
1416 self._pagerlink(self.next_page, symbol_next) \
1416 self._pagerlink(self.next_page, symbol_next) \
1417 or HTML.span(symbol_next, class_="pg-next disabled")
1417 or HTML.span(symbol_next, class_="pg-next disabled")
1418 })
1418 })
1419
1419
1420 return literal(result)
1420 return literal(result)
1421
1421
1422
1422
1423 #==============================================================================
1423 #==============================================================================
1424 # REPO PAGER, PAGER FOR REPOSITORY
1424 # REPO PAGER, PAGER FOR REPOSITORY
1425 #==============================================================================
1425 #==============================================================================
1426 class RepoPage(Page):
1426 class RepoPage(Page):
1427
1427
1428 def __init__(self, collection, page=1, items_per_page=20,
1428 def __init__(self, collection, page=1, items_per_page=20,
1429 item_count=None, url=None, **kwargs):
1429 item_count=None, url=None, **kwargs):
1430
1430
1431 """Create a "RepoPage" instance. special pager for paging
1431 """Create a "RepoPage" instance. special pager for paging
1432 repository
1432 repository
1433 """
1433 """
1434 self._url_generator = url
1434 self._url_generator = url
1435
1435
1436 # Safe the kwargs class-wide so they can be used in the pager() method
1436 # Safe the kwargs class-wide so they can be used in the pager() method
1437 self.kwargs = kwargs
1437 self.kwargs = kwargs
1438
1438
1439 # Save a reference to the collection
1439 # Save a reference to the collection
1440 self.original_collection = collection
1440 self.original_collection = collection
1441
1441
1442 self.collection = collection
1442 self.collection = collection
1443
1443
1444 # The self.page is the number of the current page.
1444 # The self.page is the number of the current page.
1445 # The first page has the number 1!
1445 # The first page has the number 1!
1446 try:
1446 try:
1447 self.page = int(page) # make it int() if we get it as a string
1447 self.page = int(page) # make it int() if we get it as a string
1448 except (ValueError, TypeError):
1448 except (ValueError, TypeError):
1449 self.page = 1
1449 self.page = 1
1450
1450
1451 self.items_per_page = items_per_page
1451 self.items_per_page = items_per_page
1452
1452
1453 # Unless the user tells us how many items the collections has
1453 # Unless the user tells us how many items the collections has
1454 # we calculate that ourselves.
1454 # we calculate that ourselves.
1455 if item_count is not None:
1455 if item_count is not None:
1456 self.item_count = item_count
1456 self.item_count = item_count
1457 else:
1457 else:
1458 self.item_count = len(self.collection)
1458 self.item_count = len(self.collection)
1459
1459
1460 # Compute the number of the first and last available page
1460 # Compute the number of the first and last available page
1461 if self.item_count > 0:
1461 if self.item_count > 0:
1462 self.first_page = 1
1462 self.first_page = 1
1463 self.page_count = int(math.ceil(float(self.item_count) /
1463 self.page_count = int(math.ceil(float(self.item_count) /
1464 self.items_per_page))
1464 self.items_per_page))
1465 self.last_page = self.first_page + self.page_count - 1
1465 self.last_page = self.first_page + self.page_count - 1
1466
1466
1467 # Make sure that the requested page number is the range of
1467 # Make sure that the requested page number is the range of
1468 # valid pages
1468 # valid pages
1469 if self.page > self.last_page:
1469 if self.page > self.last_page:
1470 self.page = self.last_page
1470 self.page = self.last_page
1471 elif self.page < self.first_page:
1471 elif self.page < self.first_page:
1472 self.page = self.first_page
1472 self.page = self.first_page
1473
1473
1474 # Note: the number of items on this page can be less than
1474 # Note: the number of items on this page can be less than
1475 # items_per_page if the last page is not full
1475 # items_per_page if the last page is not full
1476 self.first_item = max(0, (self.item_count) - (self.page *
1476 self.first_item = max(0, (self.item_count) - (self.page *
1477 items_per_page))
1477 items_per_page))
1478 self.last_item = ((self.item_count - 1) - items_per_page *
1478 self.last_item = ((self.item_count - 1) - items_per_page *
1479 (self.page - 1))
1479 (self.page - 1))
1480
1480
1481 self.items = list(self.collection[self.first_item:self.last_item + 1])
1481 self.items = list(self.collection[self.first_item:self.last_item + 1])
1482
1482
1483 # Links to previous and next page
1483 # Links to previous and next page
1484 if self.page > self.first_page:
1484 if self.page > self.first_page:
1485 self.previous_page = self.page - 1
1485 self.previous_page = self.page - 1
1486 else:
1486 else:
1487 self.previous_page = None
1487 self.previous_page = None
1488
1488
1489 if self.page < self.last_page:
1489 if self.page < self.last_page:
1490 self.next_page = self.page + 1
1490 self.next_page = self.page + 1
1491 else:
1491 else:
1492 self.next_page = None
1492 self.next_page = None
1493
1493
1494 # No items available
1494 # No items available
1495 else:
1495 else:
1496 self.first_page = None
1496 self.first_page = None
1497 self.page_count = 0
1497 self.page_count = 0
1498 self.last_page = None
1498 self.last_page = None
1499 self.first_item = None
1499 self.first_item = None
1500 self.last_item = None
1500 self.last_item = None
1501 self.previous_page = None
1501 self.previous_page = None
1502 self.next_page = None
1502 self.next_page = None
1503 self.items = []
1503 self.items = []
1504
1504
1505 # This is a subclass of the 'list' type. Initialise the list now.
1505 # This is a subclass of the 'list' type. Initialise the list now.
1506 list.__init__(self, reversed(self.items))
1506 list.__init__(self, reversed(self.items))
1507
1507
1508
1508
1509 def changed_tooltip(nodes):
1509 def changed_tooltip(nodes):
1510 """
1510 """
1511 Generates a html string for changed nodes in commit page.
1511 Generates a html string for changed nodes in commit page.
1512 It limits the output to 30 entries
1512 It limits the output to 30 entries
1513
1513
1514 :param nodes: LazyNodesGenerator
1514 :param nodes: LazyNodesGenerator
1515 """
1515 """
1516 if nodes:
1516 if nodes:
1517 pref = ': <br/> '
1517 pref = ': <br/> '
1518 suf = ''
1518 suf = ''
1519 if len(nodes) > 30:
1519 if len(nodes) > 30:
1520 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1520 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1521 return literal(pref + '<br/> '.join([safe_unicode(x.path)
1521 return literal(pref + '<br/> '.join([safe_unicode(x.path)
1522 for x in nodes[:30]]) + suf)
1522 for x in nodes[:30]]) + suf)
1523 else:
1523 else:
1524 return ': ' + _('No Files')
1524 return ': ' + _('No Files')
1525
1525
1526
1526
1527 def breadcrumb_repo_link(repo):
1527 def breadcrumb_repo_link(repo):
1528 """
1528 """
1529 Makes a breadcrumbs path link to repo
1529 Makes a breadcrumbs path link to repo
1530
1530
1531 ex::
1531 ex::
1532 group >> subgroup >> repo
1532 group >> subgroup >> repo
1533
1533
1534 :param repo: a Repository instance
1534 :param repo: a Repository instance
1535 """
1535 """
1536
1536
1537 path = [
1537 path = [
1538 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1538 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1539 for group in repo.groups_with_parents
1539 for group in repo.groups_with_parents
1540 ] + [
1540 ] + [
1541 link_to(repo.just_name, url('summary_home', repo_name=repo.repo_name))
1541 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1542 ]
1542 ]
1543
1543
1544 return literal(' &raquo; '.join(path))
1544 return literal(' &raquo; '.join(path))
1545
1545
1546
1546
1547 def format_byte_size_binary(file_size):
1547 def format_byte_size_binary(file_size):
1548 """
1548 """
1549 Formats file/folder sizes to standard.
1549 Formats file/folder sizes to standard.
1550 """
1550 """
1551 formatted_size = format_byte_size(file_size, binary=True)
1551 formatted_size = format_byte_size(file_size, binary=True)
1552 return formatted_size
1552 return formatted_size
1553
1553
1554
1554
1555 def urlify_text(text_, safe=True):
1555 def urlify_text(text_, safe=True):
1556 """
1556 """
1557 Extrac urls from text and make html links out of them
1557 Extrac urls from text and make html links out of them
1558
1558
1559 :param text_:
1559 :param text_:
1560 """
1560 """
1561
1561
1562 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1562 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1563 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1563 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1564
1564
1565 def url_func(match_obj):
1565 def url_func(match_obj):
1566 url_full = match_obj.groups()[0]
1566 url_full = match_obj.groups()[0]
1567 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1567 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1568 _newtext = url_pat.sub(url_func, text_)
1568 _newtext = url_pat.sub(url_func, text_)
1569 if safe:
1569 if safe:
1570 return literal(_newtext)
1570 return literal(_newtext)
1571 return _newtext
1571 return _newtext
1572
1572
1573
1573
1574 def urlify_commits(text_, repository):
1574 def urlify_commits(text_, repository):
1575 """
1575 """
1576 Extract commit ids from text and make link from them
1576 Extract commit ids from text and make link from them
1577
1577
1578 :param text_:
1578 :param text_:
1579 :param repository: repo name to build the URL with
1579 :param repository: repo name to build the URL with
1580 """
1580 """
1581 from pylons import url # doh, we need to re-import url to mock it later
1581 from pylons import url # doh, we need to re-import url to mock it later
1582 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1582 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1583
1583
1584 def url_func(match_obj):
1584 def url_func(match_obj):
1585 commit_id = match_obj.groups()[1]
1585 commit_id = match_obj.groups()[1]
1586 pref = match_obj.groups()[0]
1586 pref = match_obj.groups()[0]
1587 suf = match_obj.groups()[2]
1587 suf = match_obj.groups()[2]
1588
1588
1589 tmpl = (
1589 tmpl = (
1590 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1590 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1591 '%(commit_id)s</a>%(suf)s'
1591 '%(commit_id)s</a>%(suf)s'
1592 )
1592 )
1593 return tmpl % {
1593 return tmpl % {
1594 'pref': pref,
1594 'pref': pref,
1595 'cls': 'revision-link',
1595 'cls': 'revision-link',
1596 'url': url('changeset_home', repo_name=repository,
1596 'url': url('changeset_home', repo_name=repository,
1597 revision=commit_id, qualified=True),
1597 revision=commit_id, qualified=True),
1598 'commit_id': commit_id,
1598 'commit_id': commit_id,
1599 'suf': suf
1599 'suf': suf
1600 }
1600 }
1601
1601
1602 newtext = URL_PAT.sub(url_func, text_)
1602 newtext = URL_PAT.sub(url_func, text_)
1603
1603
1604 return newtext
1604 return newtext
1605
1605
1606
1606
1607 def _process_url_func(match_obj, repo_name, uid, entry,
1607 def _process_url_func(match_obj, repo_name, uid, entry,
1608 return_raw_data=False, link_format='html'):
1608 return_raw_data=False, link_format='html'):
1609 pref = ''
1609 pref = ''
1610 if match_obj.group().startswith(' '):
1610 if match_obj.group().startswith(' '):
1611 pref = ' '
1611 pref = ' '
1612
1612
1613 issue_id = ''.join(match_obj.groups())
1613 issue_id = ''.join(match_obj.groups())
1614
1614
1615 if link_format == 'html':
1615 if link_format == 'html':
1616 tmpl = (
1616 tmpl = (
1617 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1617 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1618 '%(issue-prefix)s%(id-repr)s'
1618 '%(issue-prefix)s%(id-repr)s'
1619 '</a>')
1619 '</a>')
1620 elif link_format == 'rst':
1620 elif link_format == 'rst':
1621 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1621 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1622 elif link_format == 'markdown':
1622 elif link_format == 'markdown':
1623 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1623 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1624 else:
1624 else:
1625 raise ValueError('Bad link_format:{}'.format(link_format))
1625 raise ValueError('Bad link_format:{}'.format(link_format))
1626
1626
1627 (repo_name_cleaned,
1627 (repo_name_cleaned,
1628 parent_group_name) = RepoGroupModel().\
1628 parent_group_name) = RepoGroupModel().\
1629 _get_group_name_and_parent(repo_name)
1629 _get_group_name_and_parent(repo_name)
1630
1630
1631 # variables replacement
1631 # variables replacement
1632 named_vars = {
1632 named_vars = {
1633 'id': issue_id,
1633 'id': issue_id,
1634 'repo': repo_name,
1634 'repo': repo_name,
1635 'repo_name': repo_name_cleaned,
1635 'repo_name': repo_name_cleaned,
1636 'group_name': parent_group_name
1636 'group_name': parent_group_name
1637 }
1637 }
1638 # named regex variables
1638 # named regex variables
1639 named_vars.update(match_obj.groupdict())
1639 named_vars.update(match_obj.groupdict())
1640 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1640 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1641
1641
1642 data = {
1642 data = {
1643 'pref': pref,
1643 'pref': pref,
1644 'cls': 'issue-tracker-link',
1644 'cls': 'issue-tracker-link',
1645 'url': _url,
1645 'url': _url,
1646 'id-repr': issue_id,
1646 'id-repr': issue_id,
1647 'issue-prefix': entry['pref'],
1647 'issue-prefix': entry['pref'],
1648 'serv': entry['url'],
1648 'serv': entry['url'],
1649 }
1649 }
1650 if return_raw_data:
1650 if return_raw_data:
1651 return {
1651 return {
1652 'id': issue_id,
1652 'id': issue_id,
1653 'url': _url
1653 'url': _url
1654 }
1654 }
1655 return tmpl % data
1655 return tmpl % data
1656
1656
1657
1657
1658 def process_patterns(text_string, repo_name, link_format='html'):
1658 def process_patterns(text_string, repo_name, link_format='html'):
1659 allowed_formats = ['html', 'rst', 'markdown']
1659 allowed_formats = ['html', 'rst', 'markdown']
1660 if link_format not in allowed_formats:
1660 if link_format not in allowed_formats:
1661 raise ValueError('Link format can be only one of:{} got {}'.format(
1661 raise ValueError('Link format can be only one of:{} got {}'.format(
1662 allowed_formats, link_format))
1662 allowed_formats, link_format))
1663
1663
1664 repo = None
1664 repo = None
1665 if repo_name:
1665 if repo_name:
1666 # Retrieving repo_name to avoid invalid repo_name to explode on
1666 # Retrieving repo_name to avoid invalid repo_name to explode on
1667 # IssueTrackerSettingsModel but still passing invalid name further down
1667 # IssueTrackerSettingsModel but still passing invalid name further down
1668 repo = Repository.get_by_repo_name(repo_name, cache=True)
1668 repo = Repository.get_by_repo_name(repo_name, cache=True)
1669
1669
1670 settings_model = IssueTrackerSettingsModel(repo=repo)
1670 settings_model = IssueTrackerSettingsModel(repo=repo)
1671 active_entries = settings_model.get_settings(cache=True)
1671 active_entries = settings_model.get_settings(cache=True)
1672
1672
1673 issues_data = []
1673 issues_data = []
1674 newtext = text_string
1674 newtext = text_string
1675
1675
1676 for uid, entry in active_entries.items():
1676 for uid, entry in active_entries.items():
1677 log.debug('found issue tracker entry with uid %s' % (uid,))
1677 log.debug('found issue tracker entry with uid %s' % (uid,))
1678
1678
1679 if not (entry['pat'] and entry['url']):
1679 if not (entry['pat'] and entry['url']):
1680 log.debug('skipping due to missing data')
1680 log.debug('skipping due to missing data')
1681 continue
1681 continue
1682
1682
1683 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1683 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1684 % (uid, entry['pat'], entry['url'], entry['pref']))
1684 % (uid, entry['pat'], entry['url'], entry['pref']))
1685
1685
1686 try:
1686 try:
1687 pattern = re.compile(r'%s' % entry['pat'])
1687 pattern = re.compile(r'%s' % entry['pat'])
1688 except re.error:
1688 except re.error:
1689 log.exception(
1689 log.exception(
1690 'issue tracker pattern: `%s` failed to compile',
1690 'issue tracker pattern: `%s` failed to compile',
1691 entry['pat'])
1691 entry['pat'])
1692 continue
1692 continue
1693
1693
1694 data_func = partial(
1694 data_func = partial(
1695 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1695 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1696 return_raw_data=True)
1696 return_raw_data=True)
1697
1697
1698 for match_obj in pattern.finditer(text_string):
1698 for match_obj in pattern.finditer(text_string):
1699 issues_data.append(data_func(match_obj))
1699 issues_data.append(data_func(match_obj))
1700
1700
1701 url_func = partial(
1701 url_func = partial(
1702 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1702 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1703 link_format=link_format)
1703 link_format=link_format)
1704
1704
1705 newtext = pattern.sub(url_func, newtext)
1705 newtext = pattern.sub(url_func, newtext)
1706 log.debug('processed prefix:uid `%s`' % (uid,))
1706 log.debug('processed prefix:uid `%s`' % (uid,))
1707
1707
1708 return newtext, issues_data
1708 return newtext, issues_data
1709
1709
1710
1710
1711 def urlify_commit_message(commit_text, repository=None):
1711 def urlify_commit_message(commit_text, repository=None):
1712 """
1712 """
1713 Parses given text message and makes proper links.
1713 Parses given text message and makes proper links.
1714 issues are linked to given issue-server, and rest is a commit link
1714 issues are linked to given issue-server, and rest is a commit link
1715
1715
1716 :param commit_text:
1716 :param commit_text:
1717 :param repository:
1717 :param repository:
1718 """
1718 """
1719 from pylons import url # doh, we need to re-import url to mock it later
1719 from pylons import url # doh, we need to re-import url to mock it later
1720
1720
1721 def escaper(string):
1721 def escaper(string):
1722 return string.replace('<', '&lt;').replace('>', '&gt;')
1722 return string.replace('<', '&lt;').replace('>', '&gt;')
1723
1723
1724 newtext = escaper(commit_text)
1724 newtext = escaper(commit_text)
1725
1725
1726 # extract http/https links and make them real urls
1726 # extract http/https links and make them real urls
1727 newtext = urlify_text(newtext, safe=False)
1727 newtext = urlify_text(newtext, safe=False)
1728
1728
1729 # urlify commits - extract commit ids and make link out of them, if we have
1729 # urlify commits - extract commit ids and make link out of them, if we have
1730 # the scope of repository present.
1730 # the scope of repository present.
1731 if repository:
1731 if repository:
1732 newtext = urlify_commits(newtext, repository)
1732 newtext = urlify_commits(newtext, repository)
1733
1733
1734 # process issue tracker patterns
1734 # process issue tracker patterns
1735 newtext, issues = process_patterns(newtext, repository or '')
1735 newtext, issues = process_patterns(newtext, repository or '')
1736
1736
1737 return literal(newtext)
1737 return literal(newtext)
1738
1738
1739
1739
1740 def render_binary(repo_name, file_obj):
1740 def render_binary(repo_name, file_obj):
1741 """
1741 """
1742 Choose how to render a binary file
1742 Choose how to render a binary file
1743 """
1743 """
1744 filename = file_obj.name
1744 filename = file_obj.name
1745
1745
1746 # images
1746 # images
1747 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1747 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1748 if fnmatch.fnmatch(filename, pat=ext):
1748 if fnmatch.fnmatch(filename, pat=ext):
1749 alt = filename
1749 alt = filename
1750 src = url('files_raw_home', repo_name=repo_name,
1750 src = url('files_raw_home', repo_name=repo_name,
1751 revision=file_obj.commit.raw_id, f_path=file_obj.path)
1751 revision=file_obj.commit.raw_id, f_path=file_obj.path)
1752 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1752 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1753
1753
1754
1754
1755 def renderer_from_filename(filename, exclude=None):
1755 def renderer_from_filename(filename, exclude=None):
1756 """
1756 """
1757 choose a renderer based on filename, this works only for text based files
1757 choose a renderer based on filename, this works only for text based files
1758 """
1758 """
1759
1759
1760 # ipython
1760 # ipython
1761 for ext in ['*.ipynb']:
1761 for ext in ['*.ipynb']:
1762 if fnmatch.fnmatch(filename, pat=ext):
1762 if fnmatch.fnmatch(filename, pat=ext):
1763 return 'jupyter'
1763 return 'jupyter'
1764
1764
1765 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1765 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1766 if is_markup:
1766 if is_markup:
1767 return is_markup
1767 return is_markup
1768 return None
1768 return None
1769
1769
1770
1770
1771 def render(source, renderer='rst', mentions=False, relative_url=None,
1771 def render(source, renderer='rst', mentions=False, relative_url=None,
1772 repo_name=None):
1772 repo_name=None):
1773
1773
1774 def maybe_convert_relative_links(html_source):
1774 def maybe_convert_relative_links(html_source):
1775 if relative_url:
1775 if relative_url:
1776 return relative_links(html_source, relative_url)
1776 return relative_links(html_source, relative_url)
1777 return html_source
1777 return html_source
1778
1778
1779 if renderer == 'rst':
1779 if renderer == 'rst':
1780 if repo_name:
1780 if repo_name:
1781 # process patterns on comments if we pass in repo name
1781 # process patterns on comments if we pass in repo name
1782 source, issues = process_patterns(
1782 source, issues = process_patterns(
1783 source, repo_name, link_format='rst')
1783 source, repo_name, link_format='rst')
1784
1784
1785 return literal(
1785 return literal(
1786 '<div class="rst-block">%s</div>' %
1786 '<div class="rst-block">%s</div>' %
1787 maybe_convert_relative_links(
1787 maybe_convert_relative_links(
1788 MarkupRenderer.rst(source, mentions=mentions)))
1788 MarkupRenderer.rst(source, mentions=mentions)))
1789 elif renderer == 'markdown':
1789 elif renderer == 'markdown':
1790 if repo_name:
1790 if repo_name:
1791 # process patterns on comments if we pass in repo name
1791 # process patterns on comments if we pass in repo name
1792 source, issues = process_patterns(
1792 source, issues = process_patterns(
1793 source, repo_name, link_format='markdown')
1793 source, repo_name, link_format='markdown')
1794
1794
1795 return literal(
1795 return literal(
1796 '<div class="markdown-block">%s</div>' %
1796 '<div class="markdown-block">%s</div>' %
1797 maybe_convert_relative_links(
1797 maybe_convert_relative_links(
1798 MarkupRenderer.markdown(source, flavored=True,
1798 MarkupRenderer.markdown(source, flavored=True,
1799 mentions=mentions)))
1799 mentions=mentions)))
1800 elif renderer == 'jupyter':
1800 elif renderer == 'jupyter':
1801 return literal(
1801 return literal(
1802 '<div class="ipynb">%s</div>' %
1802 '<div class="ipynb">%s</div>' %
1803 maybe_convert_relative_links(
1803 maybe_convert_relative_links(
1804 MarkupRenderer.jupyter(source)))
1804 MarkupRenderer.jupyter(source)))
1805
1805
1806 # None means just show the file-source
1806 # None means just show the file-source
1807 return None
1807 return None
1808
1808
1809
1809
1810 def commit_status(repo, commit_id):
1810 def commit_status(repo, commit_id):
1811 return ChangesetStatusModel().get_status(repo, commit_id)
1811 return ChangesetStatusModel().get_status(repo, commit_id)
1812
1812
1813
1813
1814 def commit_status_lbl(commit_status):
1814 def commit_status_lbl(commit_status):
1815 return dict(ChangesetStatus.STATUSES).get(commit_status)
1815 return dict(ChangesetStatus.STATUSES).get(commit_status)
1816
1816
1817
1817
1818 def commit_time(repo_name, commit_id):
1818 def commit_time(repo_name, commit_id):
1819 repo = Repository.get_by_repo_name(repo_name)
1819 repo = Repository.get_by_repo_name(repo_name)
1820 commit = repo.get_commit(commit_id=commit_id)
1820 commit = repo.get_commit(commit_id=commit_id)
1821 return commit.date
1821 return commit.date
1822
1822
1823
1823
1824 def get_permission_name(key):
1824 def get_permission_name(key):
1825 return dict(Permission.PERMS).get(key)
1825 return dict(Permission.PERMS).get(key)
1826
1826
1827
1827
1828 def journal_filter_help():
1828 def journal_filter_help():
1829 return _(
1829 return _(
1830 'Example filter terms:\n' +
1830 'Example filter terms:\n' +
1831 ' repository:vcs\n' +
1831 ' repository:vcs\n' +
1832 ' username:marcin\n' +
1832 ' username:marcin\n' +
1833 ' action:*push*\n' +
1833 ' action:*push*\n' +
1834 ' ip:127.0.0.1\n' +
1834 ' ip:127.0.0.1\n' +
1835 ' date:20120101\n' +
1835 ' date:20120101\n' +
1836 ' date:[20120101100000 TO 20120102]\n' +
1836 ' date:[20120101100000 TO 20120102]\n' +
1837 '\n' +
1837 '\n' +
1838 'Generate wildcards using \'*\' character:\n' +
1838 'Generate wildcards using \'*\' character:\n' +
1839 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1839 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1840 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1840 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1841 '\n' +
1841 '\n' +
1842 'Optional AND / OR operators in queries\n' +
1842 'Optional AND / OR operators in queries\n' +
1843 ' "repository:vcs OR repository:test"\n' +
1843 ' "repository:vcs OR repository:test"\n' +
1844 ' "username:test AND repository:test*"\n'
1844 ' "username:test AND repository:test*"\n'
1845 )
1845 )
1846
1846
1847
1847
1848 def search_filter_help(searcher):
1848 def search_filter_help(searcher):
1849
1849
1850 terms = ''
1850 terms = ''
1851 return _(
1851 return _(
1852 'Example filter terms for `{searcher}` search:\n' +
1852 'Example filter terms for `{searcher}` search:\n' +
1853 '{terms}\n' +
1853 '{terms}\n' +
1854 'Generate wildcards using \'*\' character:\n' +
1854 'Generate wildcards using \'*\' character:\n' +
1855 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1855 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1856 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1856 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1857 '\n' +
1857 '\n' +
1858 'Optional AND / OR operators in queries\n' +
1858 'Optional AND / OR operators in queries\n' +
1859 ' "repo_name:vcs OR repo_name:test"\n' +
1859 ' "repo_name:vcs OR repo_name:test"\n' +
1860 ' "owner:test AND repo_name:test*"\n' +
1860 ' "owner:test AND repo_name:test*"\n' +
1861 'More: {search_doc}'
1861 'More: {search_doc}'
1862 ).format(searcher=searcher.name,
1862 ).format(searcher=searcher.name,
1863 terms=terms, search_doc=searcher.query_lang_doc)
1863 terms=terms, search_doc=searcher.query_lang_doc)
1864
1864
1865
1865
1866 def not_mapped_error(repo_name):
1866 def not_mapped_error(repo_name):
1867 flash(_('%s repository is not mapped to db perhaps'
1867 flash(_('%s repository is not mapped to db perhaps'
1868 ' it was created or renamed from the filesystem'
1868 ' it was created or renamed from the filesystem'
1869 ' please run the application again'
1869 ' please run the application again'
1870 ' in order to rescan repositories') % repo_name, category='error')
1870 ' in order to rescan repositories') % repo_name, category='error')
1871
1871
1872
1872
1873 def ip_range(ip_addr):
1873 def ip_range(ip_addr):
1874 from rhodecode.model.db import UserIpMap
1874 from rhodecode.model.db import UserIpMap
1875 s, e = UserIpMap._get_ip_range(ip_addr)
1875 s, e = UserIpMap._get_ip_range(ip_addr)
1876 return '%s - %s' % (s, e)
1876 return '%s - %s' % (s, e)
1877
1877
1878
1878
1879 def form(url, method='post', needs_csrf_token=True, **attrs):
1879 def form(url, method='post', needs_csrf_token=True, **attrs):
1880 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1880 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1881 if method.lower() != 'get' and needs_csrf_token:
1881 if method.lower() != 'get' and needs_csrf_token:
1882 raise Exception(
1882 raise Exception(
1883 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1883 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1884 'CSRF token. If the endpoint does not require such token you can ' +
1884 'CSRF token. If the endpoint does not require such token you can ' +
1885 'explicitly set the parameter needs_csrf_token to false.')
1885 'explicitly set the parameter needs_csrf_token to false.')
1886
1886
1887 return wh_form(url, method=method, **attrs)
1887 return wh_form(url, method=method, **attrs)
1888
1888
1889
1889
1890 def secure_form(url, method="POST", multipart=False, **attrs):
1890 def secure_form(url, method="POST", multipart=False, **attrs):
1891 """Start a form tag that points the action to an url. This
1891 """Start a form tag that points the action to an url. This
1892 form tag will also include the hidden field containing
1892 form tag will also include the hidden field containing
1893 the auth token.
1893 the auth token.
1894
1894
1895 The url options should be given either as a string, or as a
1895 The url options should be given either as a string, or as a
1896 ``url()`` function. The method for the form defaults to POST.
1896 ``url()`` function. The method for the form defaults to POST.
1897
1897
1898 Options:
1898 Options:
1899
1899
1900 ``multipart``
1900 ``multipart``
1901 If set to True, the enctype is set to "multipart/form-data".
1901 If set to True, the enctype is set to "multipart/form-data".
1902 ``method``
1902 ``method``
1903 The method to use when submitting the form, usually either
1903 The method to use when submitting the form, usually either
1904 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1904 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1905 hidden input with name _method is added to simulate the verb
1905 hidden input with name _method is added to simulate the verb
1906 over POST.
1906 over POST.
1907
1907
1908 """
1908 """
1909 from webhelpers.pylonslib.secure_form import insecure_form
1909 from webhelpers.pylonslib.secure_form import insecure_form
1910 form = insecure_form(url, method, multipart, **attrs)
1910 form = insecure_form(url, method, multipart, **attrs)
1911 token = csrf_input()
1911 token = csrf_input()
1912 return literal("%s\n%s" % (form, token))
1912 return literal("%s\n%s" % (form, token))
1913
1913
1914 def csrf_input():
1914 def csrf_input():
1915 return literal(
1915 return literal(
1916 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1916 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1917 csrf_token_key, csrf_token_key, get_csrf_token()))
1917 csrf_token_key, csrf_token_key, get_csrf_token()))
1918
1918
1919 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1919 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1920 select_html = select(name, selected, options, **attrs)
1920 select_html = select(name, selected, options, **attrs)
1921 select2 = """
1921 select2 = """
1922 <script>
1922 <script>
1923 $(document).ready(function() {
1923 $(document).ready(function() {
1924 $('#%s').select2({
1924 $('#%s').select2({
1925 containerCssClass: 'drop-menu',
1925 containerCssClass: 'drop-menu',
1926 dropdownCssClass: 'drop-menu-dropdown',
1926 dropdownCssClass: 'drop-menu-dropdown',
1927 dropdownAutoWidth: true%s
1927 dropdownAutoWidth: true%s
1928 });
1928 });
1929 });
1929 });
1930 </script>
1930 </script>
1931 """
1931 """
1932 filter_option = """,
1932 filter_option = """,
1933 minimumResultsForSearch: -1
1933 minimumResultsForSearch: -1
1934 """
1934 """
1935 input_id = attrs.get('id') or name
1935 input_id = attrs.get('id') or name
1936 filter_enabled = "" if enable_filter else filter_option
1936 filter_enabled = "" if enable_filter else filter_option
1937 select_script = literal(select2 % (input_id, filter_enabled))
1937 select_script = literal(select2 % (input_id, filter_enabled))
1938
1938
1939 return literal(select_html+select_script)
1939 return literal(select_html+select_script)
1940
1940
1941
1941
1942 def get_visual_attr(tmpl_context_var, attr_name):
1942 def get_visual_attr(tmpl_context_var, attr_name):
1943 """
1943 """
1944 A safe way to get a variable from visual variable of template context
1944 A safe way to get a variable from visual variable of template context
1945
1945
1946 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1946 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1947 :param attr_name: name of the attribute we fetch from the c.visual
1947 :param attr_name: name of the attribute we fetch from the c.visual
1948 """
1948 """
1949 visual = getattr(tmpl_context_var, 'visual', None)
1949 visual = getattr(tmpl_context_var, 'visual', None)
1950 if not visual:
1950 if not visual:
1951 return
1951 return
1952 else:
1952 else:
1953 return getattr(visual, attr_name, None)
1953 return getattr(visual, attr_name, None)
1954
1954
1955
1955
1956 def get_last_path_part(file_node):
1956 def get_last_path_part(file_node):
1957 if not file_node.path:
1957 if not file_node.path:
1958 return u''
1958 return u''
1959
1959
1960 path = safe_unicode(file_node.path.split('/')[-1])
1960 path = safe_unicode(file_node.path.split('/')[-1])
1961 return u'../' + path
1961 return u'../' + path
1962
1962
1963
1963
1964 def route_url(*args, **kwargs):
1964 def route_url(*args, **kwargs):
1965 """
1965 """
1966 Wrapper around pyramids `route_url` (fully qualified url) function.
1966 Wrapper around pyramids `route_url` (fully qualified url) function.
1967 It is used to generate URLs from within pylons views or templates.
1967 It is used to generate URLs from within pylons views or templates.
1968 This will be removed when pyramid migration if finished.
1968 This will be removed when pyramid migration if finished.
1969 """
1969 """
1970 req = get_current_request()
1970 req = get_current_request()
1971 return req.route_url(*args, **kwargs)
1971 return req.route_url(*args, **kwargs)
1972
1972
1973
1973
1974 def route_path(*args, **kwargs):
1974 def route_path(*args, **kwargs):
1975 """
1975 """
1976 Wrapper around pyramids `route_path` function. It is used to generate
1976 Wrapper around pyramids `route_path` function. It is used to generate
1977 URLs from within pylons views or templates. This will be removed when
1977 URLs from within pylons views or templates. This will be removed when
1978 pyramid migration if finished.
1978 pyramid migration if finished.
1979 """
1979 """
1980 req = get_current_request()
1980 req = get_current_request()
1981 return req.route_path(*args, **kwargs)
1981 return req.route_path(*args, **kwargs)
1982
1982
1983
1983
1984 def route_path_or_none(*args, **kwargs):
1984 def route_path_or_none(*args, **kwargs):
1985 try:
1985 try:
1986 return route_path(*args, **kwargs)
1986 return route_path(*args, **kwargs)
1987 except KeyError:
1987 except KeyError:
1988 return None
1988 return None
1989
1989
1990
1990
1991 def static_url(*args, **kwds):
1991 def static_url(*args, **kwds):
1992 """
1992 """
1993 Wrapper around pyramids `route_path` function. It is used to generate
1993 Wrapper around pyramids `route_path` function. It is used to generate
1994 URLs from within pylons views or templates. This will be removed when
1994 URLs from within pylons views or templates. This will be removed when
1995 pyramid migration if finished.
1995 pyramid migration if finished.
1996 """
1996 """
1997 req = get_current_request()
1997 req = get_current_request()
1998 return req.static_url(*args, **kwds)
1998 return req.static_url(*args, **kwds)
1999
1999
2000
2000
2001 def resource_path(*args, **kwds):
2001 def resource_path(*args, **kwds):
2002 """
2002 """
2003 Wrapper around pyramids `route_path` function. It is used to generate
2003 Wrapper around pyramids `route_path` function. It is used to generate
2004 URLs from within pylons views or templates. This will be removed when
2004 URLs from within pylons views or templates. This will be removed when
2005 pyramid migration if finished.
2005 pyramid migration if finished.
2006 """
2006 """
2007 req = get_current_request()
2007 req = get_current_request()
2008 return req.resource_path(*args, **kwds)
2008 return req.resource_path(*args, **kwds)
2009
2009
2010
2010
2011 def api_call_example(method, args):
2011 def api_call_example(method, args):
2012 """
2012 """
2013 Generates an API call example via CURL
2013 Generates an API call example via CURL
2014 """
2014 """
2015 args_json = json.dumps(OrderedDict([
2015 args_json = json.dumps(OrderedDict([
2016 ('id', 1),
2016 ('id', 1),
2017 ('auth_token', 'SECRET'),
2017 ('auth_token', 'SECRET'),
2018 ('method', method),
2018 ('method', method),
2019 ('args', args)
2019 ('args', args)
2020 ]))
2020 ]))
2021 return literal(
2021 return literal(
2022 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2022 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2023 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2023 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2024 "and needs to be of `api calls` role."
2024 "and needs to be of `api calls` role."
2025 .format(
2025 .format(
2026 api_url=route_url('apiv2'),
2026 api_url=route_url('apiv2'),
2027 token_url=route_url('my_account_auth_tokens'),
2027 token_url=route_url('my_account_auth_tokens'),
2028 data=args_json))
2028 data=args_json))
@@ -1,635 +1,632 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 comments model for RhodeCode
22 comments model for RhodeCode
23 """
23 """
24
24
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import collections
27 import collections
28
28
29 from datetime import datetime
29 from datetime import datetime
30
30
31 from pylons.i18n.translation import _
31 from pylons.i18n.translation import _
32 from pyramid.threadlocal import get_current_registry
32 from pyramid.threadlocal import get_current_registry
33 from sqlalchemy.sql.expression import null
33 from sqlalchemy.sql.expression import null
34 from sqlalchemy.sql.functions import coalesce
34 from sqlalchemy.sql.functions import coalesce
35
35
36 from rhodecode.lib import helpers as h, diffs
36 from rhodecode.lib import helpers as h, diffs
37 from rhodecode.lib.channelstream import channelstream_request
37 from rhodecode.lib.channelstream import channelstream_request
38 from rhodecode.lib.utils import action_logger
38 from rhodecode.lib.utils import action_logger
39 from rhodecode.lib.utils2 import extract_mentioned_users
39 from rhodecode.lib.utils2 import extract_mentioned_users
40 from rhodecode.model import BaseModel
40 from rhodecode.model import BaseModel
41 from rhodecode.model.db import (
41 from rhodecode.model.db import (
42 ChangesetComment, User, Notification, PullRequest, AttributeDict)
42 ChangesetComment, User, Notification, PullRequest, AttributeDict)
43 from rhodecode.model.notification import NotificationModel
43 from rhodecode.model.notification import NotificationModel
44 from rhodecode.model.meta import Session
44 from rhodecode.model.meta import Session
45 from rhodecode.model.settings import VcsSettingsModel
45 from rhodecode.model.settings import VcsSettingsModel
46 from rhodecode.model.notification import EmailNotificationModel
46 from rhodecode.model.notification import EmailNotificationModel
47 from rhodecode.model.validation_schema.schemas import comment_schema
47 from rhodecode.model.validation_schema.schemas import comment_schema
48
48
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class CommentsModel(BaseModel):
53 class CommentsModel(BaseModel):
54
54
55 cls = ChangesetComment
55 cls = ChangesetComment
56
56
57 DIFF_CONTEXT_BEFORE = 3
57 DIFF_CONTEXT_BEFORE = 3
58 DIFF_CONTEXT_AFTER = 3
58 DIFF_CONTEXT_AFTER = 3
59
59
60 def __get_commit_comment(self, changeset_comment):
60 def __get_commit_comment(self, changeset_comment):
61 return self._get_instance(ChangesetComment, changeset_comment)
61 return self._get_instance(ChangesetComment, changeset_comment)
62
62
63 def __get_pull_request(self, pull_request):
63 def __get_pull_request(self, pull_request):
64 return self._get_instance(PullRequest, pull_request)
64 return self._get_instance(PullRequest, pull_request)
65
65
66 def _extract_mentions(self, s):
66 def _extract_mentions(self, s):
67 user_objects = []
67 user_objects = []
68 for username in extract_mentioned_users(s):
68 for username in extract_mentioned_users(s):
69 user_obj = User.get_by_username(username, case_insensitive=True)
69 user_obj = User.get_by_username(username, case_insensitive=True)
70 if user_obj:
70 if user_obj:
71 user_objects.append(user_obj)
71 user_objects.append(user_obj)
72 return user_objects
72 return user_objects
73
73
74 def _get_renderer(self, global_renderer='rst'):
74 def _get_renderer(self, global_renderer='rst'):
75 try:
75 try:
76 # try reading from visual context
76 # try reading from visual context
77 from pylons import tmpl_context
77 from pylons import tmpl_context
78 global_renderer = tmpl_context.visual.default_renderer
78 global_renderer = tmpl_context.visual.default_renderer
79 except AttributeError:
79 except AttributeError:
80 log.debug("Renderer not set, falling back "
80 log.debug("Renderer not set, falling back "
81 "to default renderer '%s'", global_renderer)
81 "to default renderer '%s'", global_renderer)
82 except Exception:
82 except Exception:
83 log.error(traceback.format_exc())
83 log.error(traceback.format_exc())
84 return global_renderer
84 return global_renderer
85
85
86 def aggregate_comments(self, comments, versions, show_version, inline=False):
86 def aggregate_comments(self, comments, versions, show_version, inline=False):
87 # group by versions, and count until, and display objects
87 # group by versions, and count until, and display objects
88
88
89 comment_groups = collections.defaultdict(list)
89 comment_groups = collections.defaultdict(list)
90 [comment_groups[
90 [comment_groups[
91 _co.pull_request_version_id].append(_co) for _co in comments]
91 _co.pull_request_version_id].append(_co) for _co in comments]
92
92
93 def yield_comments(pos):
93 def yield_comments(pos):
94 for co in comment_groups[pos]:
94 for co in comment_groups[pos]:
95 yield co
95 yield co
96
96
97 comment_versions = collections.defaultdict(
97 comment_versions = collections.defaultdict(
98 lambda: collections.defaultdict(list))
98 lambda: collections.defaultdict(list))
99 prev_prvid = -1
99 prev_prvid = -1
100 # fake last entry with None, to aggregate on "latest" version which
100 # fake last entry with None, to aggregate on "latest" version which
101 # doesn't have an pull_request_version_id
101 # doesn't have an pull_request_version_id
102 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
102 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
103 prvid = ver.pull_request_version_id
103 prvid = ver.pull_request_version_id
104 if prev_prvid == -1:
104 if prev_prvid == -1:
105 prev_prvid = prvid
105 prev_prvid = prvid
106
106
107 for co in yield_comments(prvid):
107 for co in yield_comments(prvid):
108 comment_versions[prvid]['at'].append(co)
108 comment_versions[prvid]['at'].append(co)
109
109
110 # save until
110 # save until
111 current = comment_versions[prvid]['at']
111 current = comment_versions[prvid]['at']
112 prev_until = comment_versions[prev_prvid]['until']
112 prev_until = comment_versions[prev_prvid]['until']
113 cur_until = prev_until + current
113 cur_until = prev_until + current
114 comment_versions[prvid]['until'].extend(cur_until)
114 comment_versions[prvid]['until'].extend(cur_until)
115
115
116 # save outdated
116 # save outdated
117 if inline:
117 if inline:
118 outdated = [x for x in cur_until
118 outdated = [x for x in cur_until
119 if x.outdated_at_version(show_version)]
119 if x.outdated_at_version(show_version)]
120 else:
120 else:
121 outdated = [x for x in cur_until
121 outdated = [x for x in cur_until
122 if x.older_than_version(show_version)]
122 if x.older_than_version(show_version)]
123 display = [x for x in cur_until if x not in outdated]
123 display = [x for x in cur_until if x not in outdated]
124
124
125 comment_versions[prvid]['outdated'] = outdated
125 comment_versions[prvid]['outdated'] = outdated
126 comment_versions[prvid]['display'] = display
126 comment_versions[prvid]['display'] = display
127
127
128 prev_prvid = prvid
128 prev_prvid = prvid
129
129
130 return comment_versions
130 return comment_versions
131
131
132 def get_unresolved_todos(self, pull_request, show_outdated=True):
132 def get_unresolved_todos(self, pull_request, show_outdated=True):
133
133
134 todos = Session().query(ChangesetComment) \
134 todos = Session().query(ChangesetComment) \
135 .filter(ChangesetComment.pull_request == pull_request) \
135 .filter(ChangesetComment.pull_request == pull_request) \
136 .filter(ChangesetComment.resolved_by == None) \
136 .filter(ChangesetComment.resolved_by == None) \
137 .filter(ChangesetComment.comment_type
137 .filter(ChangesetComment.comment_type
138 == ChangesetComment.COMMENT_TYPE_TODO)
138 == ChangesetComment.COMMENT_TYPE_TODO)
139
139
140 if not show_outdated:
140 if not show_outdated:
141 todos = todos.filter(
141 todos = todos.filter(
142 coalesce(ChangesetComment.display_state, '') !=
142 coalesce(ChangesetComment.display_state, '') !=
143 ChangesetComment.COMMENT_OUTDATED)
143 ChangesetComment.COMMENT_OUTDATED)
144
144
145 todos = todos.all()
145 todos = todos.all()
146
146
147 return todos
147 return todos
148
148
149 def get_commit_unresolved_todos(self, commit_id, show_outdated=True):
149 def get_commit_unresolved_todos(self, commit_id, show_outdated=True):
150
150
151 todos = Session().query(ChangesetComment) \
151 todos = Session().query(ChangesetComment) \
152 .filter(ChangesetComment.revision == commit_id) \
152 .filter(ChangesetComment.revision == commit_id) \
153 .filter(ChangesetComment.resolved_by == None) \
153 .filter(ChangesetComment.resolved_by == None) \
154 .filter(ChangesetComment.comment_type
154 .filter(ChangesetComment.comment_type
155 == ChangesetComment.COMMENT_TYPE_TODO)
155 == ChangesetComment.COMMENT_TYPE_TODO)
156
156
157 if not show_outdated:
157 if not show_outdated:
158 todos = todos.filter(
158 todos = todos.filter(
159 coalesce(ChangesetComment.display_state, '') !=
159 coalesce(ChangesetComment.display_state, '') !=
160 ChangesetComment.COMMENT_OUTDATED)
160 ChangesetComment.COMMENT_OUTDATED)
161
161
162 todos = todos.all()
162 todos = todos.all()
163
163
164 return todos
164 return todos
165
165
166 def create(self, text, repo, user, commit_id=None, pull_request=None,
166 def create(self, text, repo, user, commit_id=None, pull_request=None,
167 f_path=None, line_no=None, status_change=None,
167 f_path=None, line_no=None, status_change=None,
168 status_change_type=None, comment_type=None,
168 status_change_type=None, comment_type=None,
169 resolves_comment_id=None, closing_pr=False, send_email=True,
169 resolves_comment_id=None, closing_pr=False, send_email=True,
170 renderer=None):
170 renderer=None):
171 """
171 """
172 Creates new comment for commit or pull request.
172 Creates new comment for commit or pull request.
173 IF status_change is not none this comment is associated with a
173 IF status_change is not none this comment is associated with a
174 status change of commit or commit associated with pull request
174 status change of commit or commit associated with pull request
175
175
176 :param text:
176 :param text:
177 :param repo:
177 :param repo:
178 :param user:
178 :param user:
179 :param commit_id:
179 :param commit_id:
180 :param pull_request:
180 :param pull_request:
181 :param f_path:
181 :param f_path:
182 :param line_no:
182 :param line_no:
183 :param status_change: Label for status change
183 :param status_change: Label for status change
184 :param comment_type: Type of comment
184 :param comment_type: Type of comment
185 :param status_change_type: type of status change
185 :param status_change_type: type of status change
186 :param closing_pr:
186 :param closing_pr:
187 :param send_email:
187 :param send_email:
188 :param renderer: pick renderer for this comment
188 :param renderer: pick renderer for this comment
189 """
189 """
190 if not text:
190 if not text:
191 log.warning('Missing text for comment, skipping...')
191 log.warning('Missing text for comment, skipping...')
192 return
192 return
193
193
194 if not renderer:
194 if not renderer:
195 renderer = self._get_renderer()
195 renderer = self._get_renderer()
196
196
197 repo = self._get_repo(repo)
197 repo = self._get_repo(repo)
198 user = self._get_user(user)
198 user = self._get_user(user)
199
199
200 schema = comment_schema.CommentSchema()
200 schema = comment_schema.CommentSchema()
201 validated_kwargs = schema.deserialize(dict(
201 validated_kwargs = schema.deserialize(dict(
202 comment_body=text,
202 comment_body=text,
203 comment_type=comment_type,
203 comment_type=comment_type,
204 comment_file=f_path,
204 comment_file=f_path,
205 comment_line=line_no,
205 comment_line=line_no,
206 renderer_type=renderer,
206 renderer_type=renderer,
207 status_change=status_change_type,
207 status_change=status_change_type,
208 resolves_comment_id=resolves_comment_id,
208 resolves_comment_id=resolves_comment_id,
209 repo=repo.repo_id,
209 repo=repo.repo_id,
210 user=user.user_id,
210 user=user.user_id,
211 ))
211 ))
212
212
213 comment = ChangesetComment()
213 comment = ChangesetComment()
214 comment.renderer = validated_kwargs['renderer_type']
214 comment.renderer = validated_kwargs['renderer_type']
215 comment.text = validated_kwargs['comment_body']
215 comment.text = validated_kwargs['comment_body']
216 comment.f_path = validated_kwargs['comment_file']
216 comment.f_path = validated_kwargs['comment_file']
217 comment.line_no = validated_kwargs['comment_line']
217 comment.line_no = validated_kwargs['comment_line']
218 comment.comment_type = validated_kwargs['comment_type']
218 comment.comment_type = validated_kwargs['comment_type']
219
219
220 comment.repo = repo
220 comment.repo = repo
221 comment.author = user
221 comment.author = user
222 comment.resolved_comment = self.__get_commit_comment(
222 comment.resolved_comment = self.__get_commit_comment(
223 validated_kwargs['resolves_comment_id'])
223 validated_kwargs['resolves_comment_id'])
224
224
225 pull_request_id = pull_request
225 pull_request_id = pull_request
226
226
227 commit_obj = None
227 commit_obj = None
228 pull_request_obj = None
228 pull_request_obj = None
229
229
230 if commit_id:
230 if commit_id:
231 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
231 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
232 # do a lookup, so we don't pass something bad here
232 # do a lookup, so we don't pass something bad here
233 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
233 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
234 comment.revision = commit_obj.raw_id
234 comment.revision = commit_obj.raw_id
235
235
236 elif pull_request_id:
236 elif pull_request_id:
237 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
237 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
238 pull_request_obj = self.__get_pull_request(pull_request_id)
238 pull_request_obj = self.__get_pull_request(pull_request_id)
239 comment.pull_request = pull_request_obj
239 comment.pull_request = pull_request_obj
240 else:
240 else:
241 raise Exception('Please specify commit or pull_request_id')
241 raise Exception('Please specify commit or pull_request_id')
242
242
243 Session().add(comment)
243 Session().add(comment)
244 Session().flush()
244 Session().flush()
245 kwargs = {
245 kwargs = {
246 'user': user,
246 'user': user,
247 'renderer_type': renderer,
247 'renderer_type': renderer,
248 'repo_name': repo.repo_name,
248 'repo_name': repo.repo_name,
249 'status_change': status_change,
249 'status_change': status_change,
250 'status_change_type': status_change_type,
250 'status_change_type': status_change_type,
251 'comment_body': text,
251 'comment_body': text,
252 'comment_file': f_path,
252 'comment_file': f_path,
253 'comment_line': line_no,
253 'comment_line': line_no,
254 'comment_type': comment_type or 'note'
254 'comment_type': comment_type or 'note'
255 }
255 }
256
256
257 if commit_obj:
257 if commit_obj:
258 recipients = ChangesetComment.get_users(
258 recipients = ChangesetComment.get_users(
259 revision=commit_obj.raw_id)
259 revision=commit_obj.raw_id)
260 # add commit author if it's in RhodeCode system
260 # add commit author if it's in RhodeCode system
261 cs_author = User.get_from_cs_author(commit_obj.author)
261 cs_author = User.get_from_cs_author(commit_obj.author)
262 if not cs_author:
262 if not cs_author:
263 # use repo owner if we cannot extract the author correctly
263 # use repo owner if we cannot extract the author correctly
264 cs_author = repo.user
264 cs_author = repo.user
265 recipients += [cs_author]
265 recipients += [cs_author]
266
266
267 commit_comment_url = self.get_url(comment)
267 commit_comment_url = self.get_url(comment)
268
268
269 target_repo_url = h.link_to(
269 target_repo_url = h.link_to(
270 repo.repo_name,
270 repo.repo_name,
271 h.url('summary_home',
271 h.route_url('repo_summary', repo_name=repo.repo_name))
272 repo_name=repo.repo_name, qualified=True))
273
272
274 # commit specifics
273 # commit specifics
275 kwargs.update({
274 kwargs.update({
276 'commit': commit_obj,
275 'commit': commit_obj,
277 'commit_message': commit_obj.message,
276 'commit_message': commit_obj.message,
278 'commit_target_repo': target_repo_url,
277 'commit_target_repo': target_repo_url,
279 'commit_comment_url': commit_comment_url,
278 'commit_comment_url': commit_comment_url,
280 })
279 })
281
280
282 elif pull_request_obj:
281 elif pull_request_obj:
283 # get the current participants of this pull request
282 # get the current participants of this pull request
284 recipients = ChangesetComment.get_users(
283 recipients = ChangesetComment.get_users(
285 pull_request_id=pull_request_obj.pull_request_id)
284 pull_request_id=pull_request_obj.pull_request_id)
286 # add pull request author
285 # add pull request author
287 recipients += [pull_request_obj.author]
286 recipients += [pull_request_obj.author]
288
287
289 # add the reviewers to notification
288 # add the reviewers to notification
290 recipients += [x.user for x in pull_request_obj.reviewers]
289 recipients += [x.user for x in pull_request_obj.reviewers]
291
290
292 pr_target_repo = pull_request_obj.target_repo
291 pr_target_repo = pull_request_obj.target_repo
293 pr_source_repo = pull_request_obj.source_repo
292 pr_source_repo = pull_request_obj.source_repo
294
293
295 pr_comment_url = h.url(
294 pr_comment_url = h.url(
296 'pullrequest_show',
295 'pullrequest_show',
297 repo_name=pr_target_repo.repo_name,
296 repo_name=pr_target_repo.repo_name,
298 pull_request_id=pull_request_obj.pull_request_id,
297 pull_request_id=pull_request_obj.pull_request_id,
299 anchor='comment-%s' % comment.comment_id,
298 anchor='comment-%s' % comment.comment_id,
300 qualified=True,)
299 qualified=True,)
301
300
302 # set some variables for email notification
301 # set some variables for email notification
303 pr_target_repo_url = h.url(
302 pr_target_repo_url = h.route_url(
304 'summary_home', repo_name=pr_target_repo.repo_name,
303 'repo_summary', repo_name=pr_target_repo.repo_name)
305 qualified=True)
306
304
307 pr_source_repo_url = h.url(
305 pr_source_repo_url = h.route_url(
308 'summary_home', repo_name=pr_source_repo.repo_name,
306 'repo_summary', repo_name=pr_source_repo.repo_name)
309 qualified=True)
310
307
311 # pull request specifics
308 # pull request specifics
312 kwargs.update({
309 kwargs.update({
313 'pull_request': pull_request_obj,
310 'pull_request': pull_request_obj,
314 'pr_id': pull_request_obj.pull_request_id,
311 'pr_id': pull_request_obj.pull_request_id,
315 'pr_target_repo': pr_target_repo,
312 'pr_target_repo': pr_target_repo,
316 'pr_target_repo_url': pr_target_repo_url,
313 'pr_target_repo_url': pr_target_repo_url,
317 'pr_source_repo': pr_source_repo,
314 'pr_source_repo': pr_source_repo,
318 'pr_source_repo_url': pr_source_repo_url,
315 'pr_source_repo_url': pr_source_repo_url,
319 'pr_comment_url': pr_comment_url,
316 'pr_comment_url': pr_comment_url,
320 'pr_closing': closing_pr,
317 'pr_closing': closing_pr,
321 })
318 })
322 if send_email:
319 if send_email:
323 # pre-generate the subject for notification itself
320 # pre-generate the subject for notification itself
324 (subject,
321 (subject,
325 _h, _e, # we don't care about those
322 _h, _e, # we don't care about those
326 body_plaintext) = EmailNotificationModel().render_email(
323 body_plaintext) = EmailNotificationModel().render_email(
327 notification_type, **kwargs)
324 notification_type, **kwargs)
328
325
329 mention_recipients = set(
326 mention_recipients = set(
330 self._extract_mentions(text)).difference(recipients)
327 self._extract_mentions(text)).difference(recipients)
331
328
332 # create notification objects, and emails
329 # create notification objects, and emails
333 NotificationModel().create(
330 NotificationModel().create(
334 created_by=user,
331 created_by=user,
335 notification_subject=subject,
332 notification_subject=subject,
336 notification_body=body_plaintext,
333 notification_body=body_plaintext,
337 notification_type=notification_type,
334 notification_type=notification_type,
338 recipients=recipients,
335 recipients=recipients,
339 mention_recipients=mention_recipients,
336 mention_recipients=mention_recipients,
340 email_kwargs=kwargs,
337 email_kwargs=kwargs,
341 )
338 )
342
339
343 action = (
340 action = (
344 'user_commented_pull_request:{}'.format(
341 'user_commented_pull_request:{}'.format(
345 comment.pull_request.pull_request_id)
342 comment.pull_request.pull_request_id)
346 if comment.pull_request
343 if comment.pull_request
347 else 'user_commented_revision:{}'.format(comment.revision)
344 else 'user_commented_revision:{}'.format(comment.revision)
348 )
345 )
349 action_logger(user, action, comment.repo)
346 action_logger(user, action, comment.repo)
350
347
351 registry = get_current_registry()
348 registry = get_current_registry()
352 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
349 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
353 channelstream_config = rhodecode_plugins.get('channelstream', {})
350 channelstream_config = rhodecode_plugins.get('channelstream', {})
354 msg_url = ''
351 msg_url = ''
355 if commit_obj:
352 if commit_obj:
356 msg_url = commit_comment_url
353 msg_url = commit_comment_url
357 repo_name = repo.repo_name
354 repo_name = repo.repo_name
358 elif pull_request_obj:
355 elif pull_request_obj:
359 msg_url = pr_comment_url
356 msg_url = pr_comment_url
360 repo_name = pr_target_repo.repo_name
357 repo_name = pr_target_repo.repo_name
361
358
362 if channelstream_config.get('enabled'):
359 if channelstream_config.get('enabled'):
363 message = '<strong>{}</strong> {} - ' \
360 message = '<strong>{}</strong> {} - ' \
364 '<a onclick="window.location=\'{}\';' \
361 '<a onclick="window.location=\'{}\';' \
365 'window.location.reload()">' \
362 'window.location.reload()">' \
366 '<strong>{}</strong></a>'
363 '<strong>{}</strong></a>'
367 message = message.format(
364 message = message.format(
368 user.username, _('made a comment'), msg_url,
365 user.username, _('made a comment'), msg_url,
369 _('Show it now'))
366 _('Show it now'))
370 channel = '/repo${}$/pr/{}'.format(
367 channel = '/repo${}$/pr/{}'.format(
371 repo_name,
368 repo_name,
372 pull_request_id
369 pull_request_id
373 )
370 )
374 payload = {
371 payload = {
375 'type': 'message',
372 'type': 'message',
376 'timestamp': datetime.utcnow(),
373 'timestamp': datetime.utcnow(),
377 'user': 'system',
374 'user': 'system',
378 'exclude_users': [user.username],
375 'exclude_users': [user.username],
379 'channel': channel,
376 'channel': channel,
380 'message': {
377 'message': {
381 'message': message,
378 'message': message,
382 'level': 'info',
379 'level': 'info',
383 'topic': '/notifications'
380 'topic': '/notifications'
384 }
381 }
385 }
382 }
386 channelstream_request(channelstream_config, [payload],
383 channelstream_request(channelstream_config, [payload],
387 '/message', raise_exc=False)
384 '/message', raise_exc=False)
388
385
389 return comment
386 return comment
390
387
391 def delete(self, comment):
388 def delete(self, comment):
392 """
389 """
393 Deletes given comment
390 Deletes given comment
394
391
395 :param comment_id:
392 :param comment_id:
396 """
393 """
397 comment = self.__get_commit_comment(comment)
394 comment = self.__get_commit_comment(comment)
398 Session().delete(comment)
395 Session().delete(comment)
399
396
400 return comment
397 return comment
401
398
402 def get_all_comments(self, repo_id, revision=None, pull_request=None):
399 def get_all_comments(self, repo_id, revision=None, pull_request=None):
403 q = ChangesetComment.query()\
400 q = ChangesetComment.query()\
404 .filter(ChangesetComment.repo_id == repo_id)
401 .filter(ChangesetComment.repo_id == repo_id)
405 if revision:
402 if revision:
406 q = q.filter(ChangesetComment.revision == revision)
403 q = q.filter(ChangesetComment.revision == revision)
407 elif pull_request:
404 elif pull_request:
408 pull_request = self.__get_pull_request(pull_request)
405 pull_request = self.__get_pull_request(pull_request)
409 q = q.filter(ChangesetComment.pull_request == pull_request)
406 q = q.filter(ChangesetComment.pull_request == pull_request)
410 else:
407 else:
411 raise Exception('Please specify commit or pull_request')
408 raise Exception('Please specify commit or pull_request')
412 q = q.order_by(ChangesetComment.created_on)
409 q = q.order_by(ChangesetComment.created_on)
413 return q.all()
410 return q.all()
414
411
415 def get_url(self, comment):
412 def get_url(self, comment):
416 comment = self.__get_commit_comment(comment)
413 comment = self.__get_commit_comment(comment)
417 if comment.pull_request:
414 if comment.pull_request:
418 return h.url(
415 return h.url(
419 'pullrequest_show',
416 'pullrequest_show',
420 repo_name=comment.pull_request.target_repo.repo_name,
417 repo_name=comment.pull_request.target_repo.repo_name,
421 pull_request_id=comment.pull_request.pull_request_id,
418 pull_request_id=comment.pull_request.pull_request_id,
422 anchor='comment-%s' % comment.comment_id,
419 anchor='comment-%s' % comment.comment_id,
423 qualified=True,)
420 qualified=True,)
424 else:
421 else:
425 return h.url(
422 return h.url(
426 'changeset_home',
423 'changeset_home',
427 repo_name=comment.repo.repo_name,
424 repo_name=comment.repo.repo_name,
428 revision=comment.revision,
425 revision=comment.revision,
429 anchor='comment-%s' % comment.comment_id,
426 anchor='comment-%s' % comment.comment_id,
430 qualified=True,)
427 qualified=True,)
431
428
432 def get_comments(self, repo_id, revision=None, pull_request=None):
429 def get_comments(self, repo_id, revision=None, pull_request=None):
433 """
430 """
434 Gets main comments based on revision or pull_request_id
431 Gets main comments based on revision or pull_request_id
435
432
436 :param repo_id:
433 :param repo_id:
437 :param revision:
434 :param revision:
438 :param pull_request:
435 :param pull_request:
439 """
436 """
440
437
441 q = ChangesetComment.query()\
438 q = ChangesetComment.query()\
442 .filter(ChangesetComment.repo_id == repo_id)\
439 .filter(ChangesetComment.repo_id == repo_id)\
443 .filter(ChangesetComment.line_no == None)\
440 .filter(ChangesetComment.line_no == None)\
444 .filter(ChangesetComment.f_path == None)
441 .filter(ChangesetComment.f_path == None)
445 if revision:
442 if revision:
446 q = q.filter(ChangesetComment.revision == revision)
443 q = q.filter(ChangesetComment.revision == revision)
447 elif pull_request:
444 elif pull_request:
448 pull_request = self.__get_pull_request(pull_request)
445 pull_request = self.__get_pull_request(pull_request)
449 q = q.filter(ChangesetComment.pull_request == pull_request)
446 q = q.filter(ChangesetComment.pull_request == pull_request)
450 else:
447 else:
451 raise Exception('Please specify commit or pull_request')
448 raise Exception('Please specify commit or pull_request')
452 q = q.order_by(ChangesetComment.created_on)
449 q = q.order_by(ChangesetComment.created_on)
453 return q.all()
450 return q.all()
454
451
455 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
452 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
456 q = self._get_inline_comments_query(repo_id, revision, pull_request)
453 q = self._get_inline_comments_query(repo_id, revision, pull_request)
457 return self._group_comments_by_path_and_line_number(q)
454 return self._group_comments_by_path_and_line_number(q)
458
455
459 def get_inline_comments_count(self, inline_comments, skip_outdated=True,
456 def get_inline_comments_count(self, inline_comments, skip_outdated=True,
460 version=None):
457 version=None):
461 inline_cnt = 0
458 inline_cnt = 0
462 for fname, per_line_comments in inline_comments.iteritems():
459 for fname, per_line_comments in inline_comments.iteritems():
463 for lno, comments in per_line_comments.iteritems():
460 for lno, comments in per_line_comments.iteritems():
464 for comm in comments:
461 for comm in comments:
465 if not comm.outdated_at_version(version) and skip_outdated:
462 if not comm.outdated_at_version(version) and skip_outdated:
466 inline_cnt += 1
463 inline_cnt += 1
467
464
468 return inline_cnt
465 return inline_cnt
469
466
470 def get_outdated_comments(self, repo_id, pull_request):
467 def get_outdated_comments(self, repo_id, pull_request):
471 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
468 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
472 # of a pull request.
469 # of a pull request.
473 q = self._all_inline_comments_of_pull_request(pull_request)
470 q = self._all_inline_comments_of_pull_request(pull_request)
474 q = q.filter(
471 q = q.filter(
475 ChangesetComment.display_state ==
472 ChangesetComment.display_state ==
476 ChangesetComment.COMMENT_OUTDATED
473 ChangesetComment.COMMENT_OUTDATED
477 ).order_by(ChangesetComment.comment_id.asc())
474 ).order_by(ChangesetComment.comment_id.asc())
478
475
479 return self._group_comments_by_path_and_line_number(q)
476 return self._group_comments_by_path_and_line_number(q)
480
477
481 def _get_inline_comments_query(self, repo_id, revision, pull_request):
478 def _get_inline_comments_query(self, repo_id, revision, pull_request):
482 # TODO: johbo: Split this into two methods: One for PR and one for
479 # TODO: johbo: Split this into two methods: One for PR and one for
483 # commit.
480 # commit.
484 if revision:
481 if revision:
485 q = Session().query(ChangesetComment).filter(
482 q = Session().query(ChangesetComment).filter(
486 ChangesetComment.repo_id == repo_id,
483 ChangesetComment.repo_id == repo_id,
487 ChangesetComment.line_no != null(),
484 ChangesetComment.line_no != null(),
488 ChangesetComment.f_path != null(),
485 ChangesetComment.f_path != null(),
489 ChangesetComment.revision == revision)
486 ChangesetComment.revision == revision)
490
487
491 elif pull_request:
488 elif pull_request:
492 pull_request = self.__get_pull_request(pull_request)
489 pull_request = self.__get_pull_request(pull_request)
493 if not CommentsModel.use_outdated_comments(pull_request):
490 if not CommentsModel.use_outdated_comments(pull_request):
494 q = self._visible_inline_comments_of_pull_request(pull_request)
491 q = self._visible_inline_comments_of_pull_request(pull_request)
495 else:
492 else:
496 q = self._all_inline_comments_of_pull_request(pull_request)
493 q = self._all_inline_comments_of_pull_request(pull_request)
497
494
498 else:
495 else:
499 raise Exception('Please specify commit or pull_request_id')
496 raise Exception('Please specify commit or pull_request_id')
500 q = q.order_by(ChangesetComment.comment_id.asc())
497 q = q.order_by(ChangesetComment.comment_id.asc())
501 return q
498 return q
502
499
503 def _group_comments_by_path_and_line_number(self, q):
500 def _group_comments_by_path_and_line_number(self, q):
504 comments = q.all()
501 comments = q.all()
505 paths = collections.defaultdict(lambda: collections.defaultdict(list))
502 paths = collections.defaultdict(lambda: collections.defaultdict(list))
506 for co in comments:
503 for co in comments:
507 paths[co.f_path][co.line_no].append(co)
504 paths[co.f_path][co.line_no].append(co)
508 return paths
505 return paths
509
506
510 @classmethod
507 @classmethod
511 def needed_extra_diff_context(cls):
508 def needed_extra_diff_context(cls):
512 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
509 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
513
510
514 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
511 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
515 if not CommentsModel.use_outdated_comments(pull_request):
512 if not CommentsModel.use_outdated_comments(pull_request):
516 return
513 return
517
514
518 comments = self._visible_inline_comments_of_pull_request(pull_request)
515 comments = self._visible_inline_comments_of_pull_request(pull_request)
519 comments_to_outdate = comments.all()
516 comments_to_outdate = comments.all()
520
517
521 for comment in comments_to_outdate:
518 for comment in comments_to_outdate:
522 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
519 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
523
520
524 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
521 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
525 diff_line = _parse_comment_line_number(comment.line_no)
522 diff_line = _parse_comment_line_number(comment.line_no)
526
523
527 try:
524 try:
528 old_context = old_diff_proc.get_context_of_line(
525 old_context = old_diff_proc.get_context_of_line(
529 path=comment.f_path, diff_line=diff_line)
526 path=comment.f_path, diff_line=diff_line)
530 new_context = new_diff_proc.get_context_of_line(
527 new_context = new_diff_proc.get_context_of_line(
531 path=comment.f_path, diff_line=diff_line)
528 path=comment.f_path, diff_line=diff_line)
532 except (diffs.LineNotInDiffException,
529 except (diffs.LineNotInDiffException,
533 diffs.FileNotInDiffException):
530 diffs.FileNotInDiffException):
534 comment.display_state = ChangesetComment.COMMENT_OUTDATED
531 comment.display_state = ChangesetComment.COMMENT_OUTDATED
535 return
532 return
536
533
537 if old_context == new_context:
534 if old_context == new_context:
538 return
535 return
539
536
540 if self._should_relocate_diff_line(diff_line):
537 if self._should_relocate_diff_line(diff_line):
541 new_diff_lines = new_diff_proc.find_context(
538 new_diff_lines = new_diff_proc.find_context(
542 path=comment.f_path, context=old_context,
539 path=comment.f_path, context=old_context,
543 offset=self.DIFF_CONTEXT_BEFORE)
540 offset=self.DIFF_CONTEXT_BEFORE)
544 if not new_diff_lines:
541 if not new_diff_lines:
545 comment.display_state = ChangesetComment.COMMENT_OUTDATED
542 comment.display_state = ChangesetComment.COMMENT_OUTDATED
546 else:
543 else:
547 new_diff_line = self._choose_closest_diff_line(
544 new_diff_line = self._choose_closest_diff_line(
548 diff_line, new_diff_lines)
545 diff_line, new_diff_lines)
549 comment.line_no = _diff_to_comment_line_number(new_diff_line)
546 comment.line_no = _diff_to_comment_line_number(new_diff_line)
550 else:
547 else:
551 comment.display_state = ChangesetComment.COMMENT_OUTDATED
548 comment.display_state = ChangesetComment.COMMENT_OUTDATED
552
549
553 def _should_relocate_diff_line(self, diff_line):
550 def _should_relocate_diff_line(self, diff_line):
554 """
551 """
555 Checks if relocation shall be tried for the given `diff_line`.
552 Checks if relocation shall be tried for the given `diff_line`.
556
553
557 If a comment points into the first lines, then we can have a situation
554 If a comment points into the first lines, then we can have a situation
558 that after an update another line has been added on top. In this case
555 that after an update another line has been added on top. In this case
559 we would find the context still and move the comment around. This
556 we would find the context still and move the comment around. This
560 would be wrong.
557 would be wrong.
561 """
558 """
562 should_relocate = (
559 should_relocate = (
563 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
560 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
564 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
561 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
565 return should_relocate
562 return should_relocate
566
563
567 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
564 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
568 candidate = new_diff_lines[0]
565 candidate = new_diff_lines[0]
569 best_delta = _diff_line_delta(diff_line, candidate)
566 best_delta = _diff_line_delta(diff_line, candidate)
570 for new_diff_line in new_diff_lines[1:]:
567 for new_diff_line in new_diff_lines[1:]:
571 delta = _diff_line_delta(diff_line, new_diff_line)
568 delta = _diff_line_delta(diff_line, new_diff_line)
572 if delta < best_delta:
569 if delta < best_delta:
573 candidate = new_diff_line
570 candidate = new_diff_line
574 best_delta = delta
571 best_delta = delta
575 return candidate
572 return candidate
576
573
577 def _visible_inline_comments_of_pull_request(self, pull_request):
574 def _visible_inline_comments_of_pull_request(self, pull_request):
578 comments = self._all_inline_comments_of_pull_request(pull_request)
575 comments = self._all_inline_comments_of_pull_request(pull_request)
579 comments = comments.filter(
576 comments = comments.filter(
580 coalesce(ChangesetComment.display_state, '') !=
577 coalesce(ChangesetComment.display_state, '') !=
581 ChangesetComment.COMMENT_OUTDATED)
578 ChangesetComment.COMMENT_OUTDATED)
582 return comments
579 return comments
583
580
584 def _all_inline_comments_of_pull_request(self, pull_request):
581 def _all_inline_comments_of_pull_request(self, pull_request):
585 comments = Session().query(ChangesetComment)\
582 comments = Session().query(ChangesetComment)\
586 .filter(ChangesetComment.line_no != None)\
583 .filter(ChangesetComment.line_no != None)\
587 .filter(ChangesetComment.f_path != None)\
584 .filter(ChangesetComment.f_path != None)\
588 .filter(ChangesetComment.pull_request == pull_request)
585 .filter(ChangesetComment.pull_request == pull_request)
589 return comments
586 return comments
590
587
591 def _all_general_comments_of_pull_request(self, pull_request):
588 def _all_general_comments_of_pull_request(self, pull_request):
592 comments = Session().query(ChangesetComment)\
589 comments = Session().query(ChangesetComment)\
593 .filter(ChangesetComment.line_no == None)\
590 .filter(ChangesetComment.line_no == None)\
594 .filter(ChangesetComment.f_path == None)\
591 .filter(ChangesetComment.f_path == None)\
595 .filter(ChangesetComment.pull_request == pull_request)
592 .filter(ChangesetComment.pull_request == pull_request)
596 return comments
593 return comments
597
594
598 @staticmethod
595 @staticmethod
599 def use_outdated_comments(pull_request):
596 def use_outdated_comments(pull_request):
600 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
597 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
601 settings = settings_model.get_general_settings()
598 settings = settings_model.get_general_settings()
602 return settings.get('rhodecode_use_outdated_comments', False)
599 return settings.get('rhodecode_use_outdated_comments', False)
603
600
604
601
605 def _parse_comment_line_number(line_no):
602 def _parse_comment_line_number(line_no):
606 """
603 """
607 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
604 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
608 """
605 """
609 old_line = None
606 old_line = None
610 new_line = None
607 new_line = None
611 if line_no.startswith('o'):
608 if line_no.startswith('o'):
612 old_line = int(line_no[1:])
609 old_line = int(line_no[1:])
613 elif line_no.startswith('n'):
610 elif line_no.startswith('n'):
614 new_line = int(line_no[1:])
611 new_line = int(line_no[1:])
615 else:
612 else:
616 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
613 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
617 return diffs.DiffLineNumber(old_line, new_line)
614 return diffs.DiffLineNumber(old_line, new_line)
618
615
619
616
620 def _diff_to_comment_line_number(diff_line):
617 def _diff_to_comment_line_number(diff_line):
621 if diff_line.new is not None:
618 if diff_line.new is not None:
622 return u'n{}'.format(diff_line.new)
619 return u'n{}'.format(diff_line.new)
623 elif diff_line.old is not None:
620 elif diff_line.old is not None:
624 return u'o{}'.format(diff_line.old)
621 return u'o{}'.format(diff_line.old)
625 return u''
622 return u''
626
623
627
624
628 def _diff_line_delta(a, b):
625 def _diff_line_delta(a, b):
629 if None not in (a.new, b.new):
626 if None not in (a.new, b.new):
630 return abs(a.new - b.new)
627 return abs(a.new - b.new)
631 elif None not in (a.old, b.old):
628 elif None not in (a.old, b.old):
632 return abs(a.old - b.old)
629 return abs(a.old - b.old)
633 else:
630 else:
634 raise ValueError(
631 raise ValueError(
635 "Cannot compute delta between {} and {}".format(a, b))
632 "Cannot compute delta between {} and {}".format(a, b))
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now