Show More
@@ -1,85 +1,93 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | from rhodecode.config import routing_links |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | class VCSCallPredicate(object): |
|
24 | 24 | def __init__(self, val, config): |
|
25 | 25 | self.val = val |
|
26 | 26 | |
|
27 | 27 | def text(self): |
|
28 | 28 | return 'vcs_call route = %s' % self.val |
|
29 | 29 | |
|
30 | 30 | phash = text |
|
31 | 31 | |
|
32 | 32 | def __call__(self, info, request): |
|
33 | 33 | if hasattr(request, 'vcs_call'): |
|
34 | 34 | # skip vcs calls |
|
35 | 35 | return False |
|
36 | 36 | |
|
37 | 37 | return True |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | def includeme(config): |
|
41 | 41 | |
|
42 | 42 | config.add_route( |
|
43 | 43 | name='home', |
|
44 | 44 | pattern='/') |
|
45 | 45 | |
|
46 | 46 | config.add_route( |
|
47 | name='main_page_repos_data', | |
|
48 | pattern='/_home_repos') | |
|
49 | ||
|
50 | config.add_route( | |
|
51 | name='main_page_repo_groups_data', | |
|
52 | pattern='/_home_repo_groups') | |
|
53 | ||
|
54 | config.add_route( | |
|
47 | 55 | name='user_autocomplete_data', |
|
48 | 56 | pattern='/_users') |
|
49 | 57 | |
|
50 | 58 | config.add_route( |
|
51 | 59 | name='user_group_autocomplete_data', |
|
52 | 60 | pattern='/_user_groups') |
|
53 | 61 | |
|
54 | 62 | config.add_route( |
|
55 | 63 | name='repo_list_data', |
|
56 | 64 | pattern='/_repos') |
|
57 | 65 | |
|
58 | 66 | config.add_route( |
|
59 | 67 | name='repo_group_list_data', |
|
60 | 68 | pattern='/_repo_groups') |
|
61 | 69 | |
|
62 | 70 | config.add_route( |
|
63 | 71 | name='goto_switcher_data', |
|
64 | 72 | pattern='/_goto_data') |
|
65 | 73 | |
|
66 | 74 | config.add_route( |
|
67 | 75 | name='markup_preview', |
|
68 | 76 | pattern='/_markup_preview') |
|
69 | 77 | |
|
70 | 78 | config.add_route( |
|
71 | 79 | name='file_preview', |
|
72 | 80 | pattern='/_file_preview') |
|
73 | 81 | |
|
74 | 82 | config.add_route( |
|
75 | 83 | name='store_user_session_value', |
|
76 | 84 | pattern='/_store_session_attr') |
|
77 | 85 | |
|
78 | 86 | # register our static links via redirection mechanism |
|
79 | 87 | routing_links.connect_redirection_links(config) |
|
80 | 88 | |
|
81 | 89 | # Scan module for configuration decorators. |
|
82 | 90 | config.scan('.views', ignore='.tests') |
|
83 | 91 | |
|
84 | 92 | config.add_route_predicate( |
|
85 | 93 | 'skip_vcs_call', VCSCallPredicate) |
@@ -1,791 +1,823 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import re |
|
22 | 22 | import logging |
|
23 | 23 | import collections |
|
24 | 24 | |
|
25 | from pyramid.httpexceptions import HTTPNotFound | |
|
25 | 26 | from pyramid.view import view_config |
|
26 | 27 | |
|
27 | from rhodecode.apps._base import BaseAppView | |
|
28 | from rhodecode.apps._base import BaseAppView, DataGridAppView | |
|
28 | 29 | from rhodecode.lib import helpers as h |
|
29 | 30 | from rhodecode.lib.auth import ( |
|
30 |
LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, CSRFRequired |
|
|
31 | LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, CSRFRequired, | |
|
32 | HasRepoGroupPermissionAny) | |
|
31 | 33 | from rhodecode.lib.codeblocks import filenode_as_lines_tokens |
|
32 | 34 | from rhodecode.lib.index import searcher_from_config |
|
33 | 35 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int |
|
34 | from rhodecode.lib.ext_json import json | |
|
35 | 36 | from rhodecode.lib.vcs.nodes import FileNode |
|
36 | 37 | from rhodecode.model.db import ( |
|
37 |
func, true, or_, case, in_filter_generator, |
|
|
38 | func, true, or_, case, in_filter_generator, Session, | |
|
39 | Repository, RepoGroup, User, UserGroup) | |
|
38 | 40 | from rhodecode.model.repo import RepoModel |
|
39 | 41 | from rhodecode.model.repo_group import RepoGroupModel |
|
40 | from rhodecode.model.scm import RepoGroupList, RepoList | |
|
41 | 42 | from rhodecode.model.user import UserModel |
|
42 | 43 | from rhodecode.model.user_group import UserGroupModel |
|
43 | 44 | |
|
44 | 45 | log = logging.getLogger(__name__) |
|
45 | 46 | |
|
46 | 47 | |
|
47 | class HomeView(BaseAppView): | |
|
48 | class HomeView(BaseAppView, DataGridAppView): | |
|
48 | 49 | |
|
49 | 50 | def load_default_context(self): |
|
50 | 51 | c = self._get_local_tmpl_context() |
|
51 | 52 | c.user = c.auth_user.get_instance() |
|
52 | 53 | |
|
53 | 54 | return c |
|
54 | 55 | |
|
55 | 56 | @LoginRequired() |
|
56 | 57 | @view_config( |
|
57 | 58 | route_name='user_autocomplete_data', request_method='GET', |
|
58 | 59 | renderer='json_ext', xhr=True) |
|
59 | 60 | def user_autocomplete_data(self): |
|
60 | 61 | self.load_default_context() |
|
61 | 62 | query = self.request.GET.get('query') |
|
62 | 63 | active = str2bool(self.request.GET.get('active') or True) |
|
63 | 64 | include_groups = str2bool(self.request.GET.get('user_groups')) |
|
64 | 65 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
65 | 66 | skip_default_user = str2bool(self.request.GET.get('skip_default_user')) |
|
66 | 67 | |
|
67 | 68 | log.debug('generating user list, query:%s, active:%s, with_groups:%s', |
|
68 | 69 | query, active, include_groups) |
|
69 | 70 | |
|
70 | 71 | _users = UserModel().get_users( |
|
71 | 72 | name_contains=query, only_active=active) |
|
72 | 73 | |
|
73 | 74 | def maybe_skip_default_user(usr): |
|
74 | 75 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: |
|
75 | 76 | return False |
|
76 | 77 | return True |
|
77 | 78 | _users = filter(maybe_skip_default_user, _users) |
|
78 | 79 | |
|
79 | 80 | if include_groups: |
|
80 | 81 | # extend with user groups |
|
81 | 82 | _user_groups = UserGroupModel().get_user_groups( |
|
82 | 83 | name_contains=query, only_active=active, |
|
83 | 84 | expand_groups=expand_groups) |
|
84 | 85 | _users = _users + _user_groups |
|
85 | 86 | |
|
86 | 87 | return {'suggestions': _users} |
|
87 | 88 | |
|
88 | 89 | @LoginRequired() |
|
89 | 90 | @NotAnonymous() |
|
90 | 91 | @view_config( |
|
91 | 92 | route_name='user_group_autocomplete_data', request_method='GET', |
|
92 | 93 | renderer='json_ext', xhr=True) |
|
93 | 94 | def user_group_autocomplete_data(self): |
|
94 | 95 | self.load_default_context() |
|
95 | 96 | query = self.request.GET.get('query') |
|
96 | 97 | active = str2bool(self.request.GET.get('active') or True) |
|
97 | 98 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
98 | 99 | |
|
99 | 100 | log.debug('generating user group list, query:%s, active:%s', |
|
100 | 101 | query, active) |
|
101 | 102 | |
|
102 | 103 | _user_groups = UserGroupModel().get_user_groups( |
|
103 | 104 | name_contains=query, only_active=active, |
|
104 | 105 | expand_groups=expand_groups) |
|
105 | 106 | _user_groups = _user_groups |
|
106 | 107 | |
|
107 | 108 | return {'suggestions': _user_groups} |
|
108 | 109 | |
|
109 | 110 | def _get_repo_list(self, name_contains=None, repo_type=None, repo_group_name='', limit=20): |
|
110 | 111 | org_query = name_contains |
|
111 | 112 | allowed_ids = self._rhodecode_user.repo_acl_ids( |
|
112 | 113 | ['repository.read', 'repository.write', 'repository.admin'], |
|
113 | 114 | cache=False, name_filter=name_contains) or [-1] |
|
114 | 115 | |
|
115 | 116 | query = Repository.query()\ |
|
116 | 117 | .filter(Repository.archived.isnot(true()))\ |
|
117 | 118 | .filter(or_( |
|
118 | 119 | # generate multiple IN to fix limitation problems |
|
119 | 120 | *in_filter_generator(Repository.repo_id, allowed_ids) |
|
120 | 121 | )) |
|
121 | 122 | |
|
122 | 123 | query = query.order_by(case( |
|
123 | 124 | [ |
|
124 | 125 | (Repository.repo_name.startswith(repo_group_name), repo_group_name+'/'), |
|
125 | 126 | ], |
|
126 | 127 | )) |
|
127 | 128 | query = query.order_by(func.length(Repository.repo_name)) |
|
128 | 129 | query = query.order_by(Repository.repo_name) |
|
129 | 130 | |
|
130 | 131 | if repo_type: |
|
131 | 132 | query = query.filter(Repository.repo_type == repo_type) |
|
132 | 133 | |
|
133 | 134 | if name_contains: |
|
134 | 135 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
135 | 136 | query = query.filter( |
|
136 | 137 | Repository.repo_name.ilike(ilike_expression)) |
|
137 | 138 | query = query.limit(limit) |
|
138 | 139 | |
|
139 | 140 | acl_iter = query |
|
140 | 141 | |
|
141 | 142 | return [ |
|
142 | 143 | { |
|
143 | 144 | 'id': obj.repo_name, |
|
144 | 145 | 'value': org_query, |
|
145 | 146 | 'value_display': obj.repo_name, |
|
146 | 147 | 'text': obj.repo_name, |
|
147 | 148 | 'type': 'repo', |
|
148 | 149 | 'repo_id': obj.repo_id, |
|
149 | 150 | 'repo_type': obj.repo_type, |
|
150 | 151 | 'private': obj.private, |
|
151 | 152 | 'url': h.route_path('repo_summary', repo_name=obj.repo_name) |
|
152 | 153 | } |
|
153 | 154 | for obj in acl_iter] |
|
154 | 155 | |
|
155 | 156 | def _get_repo_group_list(self, name_contains=None, repo_group_name='', limit=20): |
|
156 | 157 | org_query = name_contains |
|
157 | 158 | allowed_ids = self._rhodecode_user.repo_group_acl_ids( |
|
158 | 159 | ['group.read', 'group.write', 'group.admin'], |
|
159 | 160 | cache=False, name_filter=name_contains) or [-1] |
|
160 | 161 | |
|
161 | 162 | query = RepoGroup.query()\ |
|
162 | 163 | .filter(or_( |
|
163 | 164 | # generate multiple IN to fix limitation problems |
|
164 | 165 | *in_filter_generator(RepoGroup.group_id, allowed_ids) |
|
165 | 166 | )) |
|
166 | 167 | |
|
167 | 168 | query = query.order_by(case( |
|
168 | 169 | [ |
|
169 | 170 | (RepoGroup.group_name.startswith(repo_group_name), repo_group_name+'/'), |
|
170 | 171 | ], |
|
171 | 172 | )) |
|
172 | 173 | query = query.order_by(func.length(RepoGroup.group_name)) |
|
173 | 174 | query = query.order_by(RepoGroup.group_name) |
|
174 | 175 | |
|
175 | 176 | if name_contains: |
|
176 | 177 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
177 | 178 | query = query.filter( |
|
178 | 179 | RepoGroup.group_name.ilike(ilike_expression)) |
|
179 | 180 | query = query.limit(limit) |
|
180 | 181 | |
|
181 | 182 | acl_iter = query |
|
182 | 183 | |
|
183 | 184 | return [ |
|
184 | 185 | { |
|
185 | 186 | 'id': obj.group_name, |
|
186 | 187 | 'value': org_query, |
|
187 | 188 | 'value_display': obj.group_name, |
|
188 | 189 | 'text': obj.group_name, |
|
189 | 190 | 'type': 'repo_group', |
|
190 | 191 | 'repo_group_id': obj.group_id, |
|
191 | 192 | 'url': h.route_path( |
|
192 | 193 | 'repo_group_home', repo_group_name=obj.group_name) |
|
193 | 194 | } |
|
194 | 195 | for obj in acl_iter] |
|
195 | 196 | |
|
196 | 197 | def _get_user_list(self, name_contains=None, limit=20): |
|
197 | 198 | org_query = name_contains |
|
198 | 199 | if not name_contains: |
|
199 | 200 | return [], False |
|
200 | 201 | |
|
201 | 202 | # TODO(marcink): should all logged in users be allowed to search others? |
|
202 | 203 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER |
|
203 | 204 | if not allowed_user_search: |
|
204 | 205 | return [], False |
|
205 | 206 | |
|
206 | 207 | name_contains = re.compile('(?:user:[ ]?)(.+)').findall(name_contains) |
|
207 | 208 | if len(name_contains) != 1: |
|
208 | 209 | return [], False |
|
209 | 210 | |
|
210 | 211 | name_contains = name_contains[0] |
|
211 | 212 | |
|
212 | 213 | query = User.query()\ |
|
213 | 214 | .order_by(func.length(User.username))\ |
|
214 | 215 | .order_by(User.username) \ |
|
215 | 216 | .filter(User.username != User.DEFAULT_USER) |
|
216 | 217 | |
|
217 | 218 | if name_contains: |
|
218 | 219 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
219 | 220 | query = query.filter( |
|
220 | 221 | User.username.ilike(ilike_expression)) |
|
221 | 222 | query = query.limit(limit) |
|
222 | 223 | |
|
223 | 224 | acl_iter = query |
|
224 | 225 | |
|
225 | 226 | return [ |
|
226 | 227 | { |
|
227 | 228 | 'id': obj.user_id, |
|
228 | 229 | 'value': org_query, |
|
229 | 230 | 'value_display': 'user: `{}`'.format(obj.username), |
|
230 | 231 | 'type': 'user', |
|
231 | 232 | 'icon_link': h.gravatar_url(obj.email, 30), |
|
232 | 233 | 'url': h.route_path( |
|
233 | 234 | 'user_profile', username=obj.username) |
|
234 | 235 | } |
|
235 | 236 | for obj in acl_iter], True |
|
236 | 237 | |
|
237 | 238 | def _get_user_groups_list(self, name_contains=None, limit=20): |
|
238 | 239 | org_query = name_contains |
|
239 | 240 | if not name_contains: |
|
240 | 241 | return [], False |
|
241 | 242 | |
|
242 | 243 | # TODO(marcink): should all logged in users be allowed to search others? |
|
243 | 244 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER |
|
244 | 245 | if not allowed_user_search: |
|
245 | 246 | return [], False |
|
246 | 247 | |
|
247 | 248 | name_contains = re.compile('(?:user_group:[ ]?)(.+)').findall(name_contains) |
|
248 | 249 | if len(name_contains) != 1: |
|
249 | 250 | return [], False |
|
250 | 251 | |
|
251 | 252 | name_contains = name_contains[0] |
|
252 | 253 | |
|
253 | 254 | query = UserGroup.query()\ |
|
254 | 255 | .order_by(func.length(UserGroup.users_group_name))\ |
|
255 | 256 | .order_by(UserGroup.users_group_name) |
|
256 | 257 | |
|
257 | 258 | if name_contains: |
|
258 | 259 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
259 | 260 | query = query.filter( |
|
260 | 261 | UserGroup.users_group_name.ilike(ilike_expression)) |
|
261 | 262 | query = query.limit(limit) |
|
262 | 263 | |
|
263 | 264 | acl_iter = query |
|
264 | 265 | |
|
265 | 266 | return [ |
|
266 | 267 | { |
|
267 | 268 | 'id': obj.users_group_id, |
|
268 | 269 | 'value': org_query, |
|
269 | 270 | 'value_display': 'user_group: `{}`'.format(obj.users_group_name), |
|
270 | 271 | 'type': 'user_group', |
|
271 | 272 | 'url': h.route_path( |
|
272 | 273 | 'user_group_profile', user_group_name=obj.users_group_name) |
|
273 | 274 | } |
|
274 | 275 | for obj in acl_iter], True |
|
275 | 276 | |
|
276 | 277 | def _get_hash_commit_list(self, auth_user, searcher, query, repo=None, repo_group=None): |
|
277 | 278 | repo_name = repo_group_name = None |
|
278 | 279 | if repo: |
|
279 | 280 | repo_name = repo.repo_name |
|
280 | 281 | if repo_group: |
|
281 | 282 | repo_group_name = repo_group.group_name |
|
282 | 283 | |
|
283 | 284 | org_query = query |
|
284 | 285 | if not query or len(query) < 3 or not searcher: |
|
285 | 286 | return [], False |
|
286 | 287 | |
|
287 | 288 | commit_hashes = re.compile('(?:commit:[ ]?)([0-9a-f]{2,40})').findall(query) |
|
288 | 289 | |
|
289 | 290 | if len(commit_hashes) != 1: |
|
290 | 291 | return [], False |
|
291 | 292 | |
|
292 | 293 | commit_hash = commit_hashes[0] |
|
293 | 294 | |
|
294 | 295 | result = searcher.search( |
|
295 | 296 | 'commit_id:{}*'.format(commit_hash), 'commit', auth_user, |
|
296 | 297 | repo_name, repo_group_name, raise_on_exc=False) |
|
297 | 298 | |
|
298 | 299 | commits = [] |
|
299 | 300 | for entry in result['results']: |
|
300 | 301 | repo_data = { |
|
301 | 302 | 'repository_id': entry.get('repository_id'), |
|
302 | 303 | 'repository_type': entry.get('repo_type'), |
|
303 | 304 | 'repository_name': entry.get('repository'), |
|
304 | 305 | } |
|
305 | 306 | |
|
306 | 307 | commit_entry = { |
|
307 | 308 | 'id': entry['commit_id'], |
|
308 | 309 | 'value': org_query, |
|
309 | 310 | 'value_display': '`{}` commit: {}'.format( |
|
310 | 311 | entry['repository'], entry['commit_id']), |
|
311 | 312 | 'type': 'commit', |
|
312 | 313 | 'repo': entry['repository'], |
|
313 | 314 | 'repo_data': repo_data, |
|
314 | 315 | |
|
315 | 316 | 'url': h.route_path( |
|
316 | 317 | 'repo_commit', |
|
317 | 318 | repo_name=entry['repository'], commit_id=entry['commit_id']) |
|
318 | 319 | } |
|
319 | 320 | |
|
320 | 321 | commits.append(commit_entry) |
|
321 | 322 | return commits, True |
|
322 | 323 | |
|
323 | 324 | def _get_path_list(self, auth_user, searcher, query, repo=None, repo_group=None): |
|
324 | 325 | repo_name = repo_group_name = None |
|
325 | 326 | if repo: |
|
326 | 327 | repo_name = repo.repo_name |
|
327 | 328 | if repo_group: |
|
328 | 329 | repo_group_name = repo_group.group_name |
|
329 | 330 | |
|
330 | 331 | org_query = query |
|
331 | 332 | if not query or len(query) < 3 or not searcher: |
|
332 | 333 | return [], False |
|
333 | 334 | |
|
334 | 335 | paths_re = re.compile('(?:file:[ ]?)(.+)').findall(query) |
|
335 | 336 | if len(paths_re) != 1: |
|
336 | 337 | return [], False |
|
337 | 338 | |
|
338 | 339 | file_path = paths_re[0] |
|
339 | 340 | |
|
340 | 341 | search_path = searcher.escape_specials(file_path) |
|
341 | 342 | result = searcher.search( |
|
342 | 343 | 'file.raw:*{}*'.format(search_path), 'path', auth_user, |
|
343 | 344 | repo_name, repo_group_name, raise_on_exc=False) |
|
344 | 345 | |
|
345 | 346 | files = [] |
|
346 | 347 | for entry in result['results']: |
|
347 | 348 | repo_data = { |
|
348 | 349 | 'repository_id': entry.get('repository_id'), |
|
349 | 350 | 'repository_type': entry.get('repo_type'), |
|
350 | 351 | 'repository_name': entry.get('repository'), |
|
351 | 352 | } |
|
352 | 353 | |
|
353 | 354 | file_entry = { |
|
354 | 355 | 'id': entry['commit_id'], |
|
355 | 356 | 'value': org_query, |
|
356 | 357 | 'value_display': '`{}` file: {}'.format( |
|
357 | 358 | entry['repository'], entry['file']), |
|
358 | 359 | 'type': 'file', |
|
359 | 360 | 'repo': entry['repository'], |
|
360 | 361 | 'repo_data': repo_data, |
|
361 | 362 | |
|
362 | 363 | 'url': h.route_path( |
|
363 | 364 | 'repo_files', |
|
364 | 365 | repo_name=entry['repository'], commit_id=entry['commit_id'], |
|
365 | 366 | f_path=entry['file']) |
|
366 | 367 | } |
|
367 | 368 | |
|
368 | 369 | files.append(file_entry) |
|
369 | 370 | return files, True |
|
370 | 371 | |
|
371 | 372 | @LoginRequired() |
|
372 | 373 | @view_config( |
|
373 | 374 | route_name='repo_list_data', request_method='GET', |
|
374 | 375 | renderer='json_ext', xhr=True) |
|
375 | 376 | def repo_list_data(self): |
|
376 | 377 | _ = self.request.translate |
|
377 | 378 | self.load_default_context() |
|
378 | 379 | |
|
379 | 380 | query = self.request.GET.get('query') |
|
380 | 381 | repo_type = self.request.GET.get('repo_type') |
|
381 | 382 | log.debug('generating repo list, query:%s, repo_type:%s', |
|
382 | 383 | query, repo_type) |
|
383 | 384 | |
|
384 | 385 | res = [] |
|
385 | 386 | repos = self._get_repo_list(query, repo_type=repo_type) |
|
386 | 387 | if repos: |
|
387 | 388 | res.append({ |
|
388 | 389 | 'text': _('Repositories'), |
|
389 | 390 | 'children': repos |
|
390 | 391 | }) |
|
391 | 392 | |
|
392 | 393 | data = { |
|
393 | 394 | 'more': False, |
|
394 | 395 | 'results': res |
|
395 | 396 | } |
|
396 | 397 | return data |
|
397 | 398 | |
|
398 | 399 | @LoginRequired() |
|
399 | 400 | @view_config( |
|
400 | 401 | route_name='repo_group_list_data', request_method='GET', |
|
401 | 402 | renderer='json_ext', xhr=True) |
|
402 | 403 | def repo_group_list_data(self): |
|
403 | 404 | _ = self.request.translate |
|
404 | 405 | self.load_default_context() |
|
405 | 406 | |
|
406 | 407 | query = self.request.GET.get('query') |
|
407 | 408 | |
|
408 | 409 | log.debug('generating repo group list, query:%s', |
|
409 | 410 | query) |
|
410 | 411 | |
|
411 | 412 | res = [] |
|
412 | 413 | repo_groups = self._get_repo_group_list(query) |
|
413 | 414 | if repo_groups: |
|
414 | 415 | res.append({ |
|
415 | 416 | 'text': _('Repository Groups'), |
|
416 | 417 | 'children': repo_groups |
|
417 | 418 | }) |
|
418 | 419 | |
|
419 | 420 | data = { |
|
420 | 421 | 'more': False, |
|
421 | 422 | 'results': res |
|
422 | 423 | } |
|
423 | 424 | return data |
|
424 | 425 | |
|
425 | 426 | def _get_default_search_queries(self, search_context, searcher, query): |
|
426 | 427 | if not searcher: |
|
427 | 428 | return [] |
|
428 | 429 | |
|
429 | 430 | is_es_6 = searcher.is_es_6 |
|
430 | 431 | |
|
431 | 432 | queries = [] |
|
432 | 433 | repo_group_name, repo_name, repo_context = None, None, None |
|
433 | 434 | |
|
434 | 435 | # repo group context |
|
435 | 436 | if search_context.get('search_context[repo_group_name]'): |
|
436 | 437 | repo_group_name = search_context.get('search_context[repo_group_name]') |
|
437 | 438 | if search_context.get('search_context[repo_name]'): |
|
438 | 439 | repo_name = search_context.get('search_context[repo_name]') |
|
439 | 440 | repo_context = search_context.get('search_context[repo_view_type]') |
|
440 | 441 | |
|
441 | 442 | if is_es_6 and repo_name: |
|
442 | 443 | # files |
|
443 | 444 | def query_modifier(): |
|
444 | 445 | qry = query |
|
445 | 446 | return {'q': qry, 'type': 'content'} |
|
446 | 447 | |
|
447 | 448 | label = u'File search for `{}`'.format(h.escape(query)) |
|
448 | 449 | file_qry = { |
|
449 | 450 | 'id': -10, |
|
450 | 451 | 'value': query, |
|
451 | 452 | 'value_display': label, |
|
452 | 453 | 'value_icon': '<i class="icon-code"></i>', |
|
453 | 454 | 'type': 'search', |
|
454 | 455 | 'subtype': 'repo', |
|
455 | 456 | 'url': h.route_path('search_repo', |
|
456 | 457 | repo_name=repo_name, |
|
457 | 458 | _query=query_modifier()) |
|
458 | 459 | } |
|
459 | 460 | |
|
460 | 461 | # commits |
|
461 | 462 | def query_modifier(): |
|
462 | 463 | qry = query |
|
463 | 464 | return {'q': qry, 'type': 'commit'} |
|
464 | 465 | |
|
465 | 466 | label = u'Commit search for `{}`'.format(h.escape(query)) |
|
466 | 467 | commit_qry = { |
|
467 | 468 | 'id': -20, |
|
468 | 469 | 'value': query, |
|
469 | 470 | 'value_display': label, |
|
470 | 471 | 'value_icon': '<i class="icon-history"></i>', |
|
471 | 472 | 'type': 'search', |
|
472 | 473 | 'subtype': 'repo', |
|
473 | 474 | 'url': h.route_path('search_repo', |
|
474 | 475 | repo_name=repo_name, |
|
475 | 476 | _query=query_modifier()) |
|
476 | 477 | } |
|
477 | 478 | |
|
478 | 479 | if repo_context in ['commit', 'commits']: |
|
479 | 480 | queries.extend([commit_qry, file_qry]) |
|
480 | 481 | elif repo_context in ['files', 'summary']: |
|
481 | 482 | queries.extend([file_qry, commit_qry]) |
|
482 | 483 | else: |
|
483 | 484 | queries.extend([commit_qry, file_qry]) |
|
484 | 485 | |
|
485 | 486 | elif is_es_6 and repo_group_name: |
|
486 | 487 | # files |
|
487 | 488 | def query_modifier(): |
|
488 | 489 | qry = query |
|
489 | 490 | return {'q': qry, 'type': 'content'} |
|
490 | 491 | |
|
491 | 492 | label = u'File search for `{}`'.format(query) |
|
492 | 493 | file_qry = { |
|
493 | 494 | 'id': -30, |
|
494 | 495 | 'value': query, |
|
495 | 496 | 'value_display': label, |
|
496 | 497 | 'value_icon': '<i class="icon-code"></i>', |
|
497 | 498 | 'type': 'search', |
|
498 | 499 | 'subtype': 'repo_group', |
|
499 | 500 | 'url': h.route_path('search_repo_group', |
|
500 | 501 | repo_group_name=repo_group_name, |
|
501 | 502 | _query=query_modifier()) |
|
502 | 503 | } |
|
503 | 504 | |
|
504 | 505 | # commits |
|
505 | 506 | def query_modifier(): |
|
506 | 507 | qry = query |
|
507 | 508 | return {'q': qry, 'type': 'commit'} |
|
508 | 509 | |
|
509 | 510 | label = u'Commit search for `{}`'.format(query) |
|
510 | 511 | commit_qry = { |
|
511 | 512 | 'id': -40, |
|
512 | 513 | 'value': query, |
|
513 | 514 | 'value_display': label, |
|
514 | 515 | 'value_icon': '<i class="icon-history"></i>', |
|
515 | 516 | 'type': 'search', |
|
516 | 517 | 'subtype': 'repo_group', |
|
517 | 518 | 'url': h.route_path('search_repo_group', |
|
518 | 519 | repo_group_name=repo_group_name, |
|
519 | 520 | _query=query_modifier()) |
|
520 | 521 | } |
|
521 | 522 | |
|
522 | 523 | if repo_context in ['commit', 'commits']: |
|
523 | 524 | queries.extend([commit_qry, file_qry]) |
|
524 | 525 | elif repo_context in ['files', 'summary']: |
|
525 | 526 | queries.extend([file_qry, commit_qry]) |
|
526 | 527 | else: |
|
527 | 528 | queries.extend([commit_qry, file_qry]) |
|
528 | 529 | |
|
529 | 530 | # Global, not scoped |
|
530 | 531 | if not queries: |
|
531 | 532 | queries.append( |
|
532 | 533 | { |
|
533 | 534 | 'id': -1, |
|
534 | 535 | 'value': query, |
|
535 | 536 | 'value_display': u'File search for: `{}`'.format(query), |
|
536 | 537 | 'value_icon': '<i class="icon-code"></i>', |
|
537 | 538 | 'type': 'search', |
|
538 | 539 | 'subtype': 'global', |
|
539 | 540 | 'url': h.route_path('search', |
|
540 | 541 | _query={'q': query, 'type': 'content'}) |
|
541 | 542 | }) |
|
542 | 543 | queries.append( |
|
543 | 544 | { |
|
544 | 545 | 'id': -2, |
|
545 | 546 | 'value': query, |
|
546 | 547 | 'value_display': u'Commit search for: `{}`'.format(query), |
|
547 | 548 | 'value_icon': '<i class="icon-history"></i>', |
|
548 | 549 | 'type': 'search', |
|
549 | 550 | 'subtype': 'global', |
|
550 | 551 | 'url': h.route_path('search', |
|
551 | 552 | _query={'q': query, 'type': 'commit'}) |
|
552 | 553 | }) |
|
553 | 554 | |
|
554 | 555 | return queries |
|
555 | 556 | |
|
556 | 557 | @LoginRequired() |
|
557 | 558 | @view_config( |
|
558 | 559 | route_name='goto_switcher_data', request_method='GET', |
|
559 | 560 | renderer='json_ext', xhr=True) |
|
560 | 561 | def goto_switcher_data(self): |
|
561 | 562 | c = self.load_default_context() |
|
562 | 563 | |
|
563 | 564 | _ = self.request.translate |
|
564 | 565 | |
|
565 | 566 | query = self.request.GET.get('query') |
|
566 | 567 | log.debug('generating main filter data, query %s', query) |
|
567 | 568 | |
|
568 | 569 | res = [] |
|
569 | 570 | if not query: |
|
570 | 571 | return {'suggestions': res} |
|
571 | 572 | |
|
572 | 573 | def no_match(name): |
|
573 | 574 | return { |
|
574 | 575 | 'id': -1, |
|
575 | 576 | 'value': "", |
|
576 | 577 | 'value_display': name, |
|
577 | 578 | 'type': 'text', |
|
578 | 579 | 'url': "" |
|
579 | 580 | } |
|
580 | 581 | searcher = searcher_from_config(self.request.registry.settings) |
|
581 | 582 | has_specialized_search = False |
|
582 | 583 | |
|
583 | 584 | # set repo context |
|
584 | 585 | repo = None |
|
585 | 586 | repo_id = safe_int(self.request.GET.get('search_context[repo_id]')) |
|
586 | 587 | if repo_id: |
|
587 | 588 | repo = Repository.get(repo_id) |
|
588 | 589 | |
|
589 | 590 | # set group context |
|
590 | 591 | repo_group = None |
|
591 | 592 | repo_group_id = safe_int(self.request.GET.get('search_context[repo_group_id]')) |
|
592 | 593 | if repo_group_id: |
|
593 | 594 | repo_group = RepoGroup.get(repo_group_id) |
|
594 | 595 | prefix_match = False |
|
595 | 596 | |
|
596 | 597 | # user: type search |
|
597 | 598 | if not prefix_match: |
|
598 | 599 | users, prefix_match = self._get_user_list(query) |
|
599 | 600 | if users: |
|
600 | 601 | has_specialized_search = True |
|
601 | 602 | for serialized_user in users: |
|
602 | 603 | res.append(serialized_user) |
|
603 | 604 | elif prefix_match: |
|
604 | 605 | has_specialized_search = True |
|
605 | 606 | res.append(no_match('No matching users found')) |
|
606 | 607 | |
|
607 | 608 | # user_group: type search |
|
608 | 609 | if not prefix_match: |
|
609 | 610 | user_groups, prefix_match = self._get_user_groups_list(query) |
|
610 | 611 | if user_groups: |
|
611 | 612 | has_specialized_search = True |
|
612 | 613 | for serialized_user_group in user_groups: |
|
613 | 614 | res.append(serialized_user_group) |
|
614 | 615 | elif prefix_match: |
|
615 | 616 | has_specialized_search = True |
|
616 | 617 | res.append(no_match('No matching user groups found')) |
|
617 | 618 | |
|
618 | 619 | # FTS commit: type search |
|
619 | 620 | if not prefix_match: |
|
620 | 621 | commits, prefix_match = self._get_hash_commit_list( |
|
621 | 622 | c.auth_user, searcher, query, repo, repo_group) |
|
622 | 623 | if commits: |
|
623 | 624 | has_specialized_search = True |
|
624 | 625 | unique_repos = collections.OrderedDict() |
|
625 | 626 | for commit in commits: |
|
626 | 627 | repo_name = commit['repo'] |
|
627 | 628 | unique_repos.setdefault(repo_name, []).append(commit) |
|
628 | 629 | |
|
629 | 630 | for _repo, commits in unique_repos.items(): |
|
630 | 631 | for commit in commits: |
|
631 | 632 | res.append(commit) |
|
632 | 633 | elif prefix_match: |
|
633 | 634 | has_specialized_search = True |
|
634 | 635 | res.append(no_match('No matching commits found')) |
|
635 | 636 | |
|
636 | 637 | # FTS file: type search |
|
637 | 638 | if not prefix_match: |
|
638 | 639 | paths, prefix_match = self._get_path_list( |
|
639 | 640 | c.auth_user, searcher, query, repo, repo_group) |
|
640 | 641 | if paths: |
|
641 | 642 | has_specialized_search = True |
|
642 | 643 | unique_repos = collections.OrderedDict() |
|
643 | 644 | for path in paths: |
|
644 | 645 | repo_name = path['repo'] |
|
645 | 646 | unique_repos.setdefault(repo_name, []).append(path) |
|
646 | 647 | |
|
647 | 648 | for repo, paths in unique_repos.items(): |
|
648 | 649 | for path in paths: |
|
649 | 650 | res.append(path) |
|
650 | 651 | elif prefix_match: |
|
651 | 652 | has_specialized_search = True |
|
652 | 653 | res.append(no_match('No matching files found')) |
|
653 | 654 | |
|
654 | 655 | # main suggestions |
|
655 | 656 | if not has_specialized_search: |
|
656 | 657 | repo_group_name = '' |
|
657 | 658 | if repo_group: |
|
658 | 659 | repo_group_name = repo_group.group_name |
|
659 | 660 | |
|
660 | 661 | for _q in self._get_default_search_queries(self.request.GET, searcher, query): |
|
661 | 662 | res.append(_q) |
|
662 | 663 | |
|
663 | 664 | repo_groups = self._get_repo_group_list(query, repo_group_name=repo_group_name) |
|
664 | 665 | for serialized_repo_group in repo_groups: |
|
665 | 666 | res.append(serialized_repo_group) |
|
666 | 667 | |
|
667 | 668 | repos = self._get_repo_list(query, repo_group_name=repo_group_name) |
|
668 | 669 | for serialized_repo in repos: |
|
669 | 670 | res.append(serialized_repo) |
|
670 | 671 | |
|
671 | 672 | if not repos and not repo_groups: |
|
672 | 673 | res.append(no_match('No matches found')) |
|
673 | 674 | |
|
674 | 675 | return {'suggestions': res} |
|
675 | 676 | |
|
676 | def _get_groups_and_repos(self, repo_group_id=None): | |
|
677 | # repo groups groups | |
|
678 | repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) | |
|
679 | _perms = ['group.read', 'group.write', 'group.admin'] | |
|
680 | repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) | |
|
681 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( | |
|
682 | repo_group_list=repo_group_list_acl, admin=False) | |
|
683 | ||
|
684 | # repositories | |
|
685 | repo_list = Repository.get_all_repos(group_id=repo_group_id) | |
|
686 | _perms = ['repository.read', 'repository.write', 'repository.admin'] | |
|
687 | repo_list_acl = RepoList(repo_list, perm_set=_perms) | |
|
688 | repo_data = RepoModel().get_repos_as_dict( | |
|
689 | repo_list=repo_list_acl, admin=False) | |
|
690 | ||
|
691 | return repo_data, repo_group_data | |
|
692 | ||
|
693 | 677 | @LoginRequired() |
|
694 | 678 | @view_config( |
|
695 | 679 | route_name='home', request_method='GET', |
|
696 | 680 | renderer='rhodecode:templates/index.mako') |
|
697 | 681 | def main_page(self): |
|
698 | 682 | c = self.load_default_context() |
|
699 | 683 | c.repo_group = None |
|
684 | return self._get_template_context(c) | |
|
700 | 685 | |
|
701 | repo_data, repo_group_data = self._get_groups_and_repos() | |
|
702 | # json used to render the grids | |
|
703 | c.repos_data = json.dumps(repo_data) | |
|
704 | c.repo_groups_data = json.dumps(repo_group_data) | |
|
686 | def _main_page_repo_groups_data(self, repo_group_id): | |
|
687 | column_map = { | |
|
688 | 'name_raw': 'group_name_hash', | |
|
689 | 'desc': 'group_description', | |
|
690 | 'last_change_raw': 'updated_on', | |
|
691 | 'owner': 'user_username', | |
|
692 | } | |
|
693 | draw, start, limit = self._extract_chunk(self.request) | |
|
694 | search_q, order_by, order_dir = self._extract_ordering( | |
|
695 | self.request, column_map=column_map) | |
|
696 | return RepoGroupModel().get_repo_groups_data_table( | |
|
697 | draw, start, limit, | |
|
698 | search_q, order_by, order_dir, | |
|
699 | self._rhodecode_user, repo_group_id) | |
|
700 | ||
|
701 | def _main_page_repos_data(self, repo_group_id): | |
|
702 | column_map = { | |
|
703 | 'name_raw': 'repo_name', | |
|
704 | 'desc': 'description', | |
|
705 | 'last_change_raw': 'updated_on', | |
|
706 | 'owner': 'user_username', | |
|
707 | } | |
|
708 | draw, start, limit = self._extract_chunk(self.request) | |
|
709 | search_q, order_by, order_dir = self._extract_ordering( | |
|
710 | self.request, column_map=column_map) | |
|
711 | return RepoModel().get_repos_data_table( | |
|
712 | draw, start, limit, | |
|
713 | search_q, order_by, order_dir, | |
|
714 | self._rhodecode_user, repo_group_id) | |
|
705 | 715 | |
|
706 | return self._get_template_context(c) | |
|
716 | @LoginRequired() | |
|
717 | @view_config( | |
|
718 | route_name='main_page_repo_groups_data', | |
|
719 | request_method='GET', renderer='json_ext', xhr=True) | |
|
720 | def main_page_repo_groups_data(self): | |
|
721 | self.load_default_context() | |
|
722 | repo_group_id = safe_int(self.request.GET.get('repo_group_id')) | |
|
723 | ||
|
724 | if repo_group_id: | |
|
725 | group = RepoGroup.get_or_404(repo_group_id) | |
|
726 | _perms = ['group.read', 'group.write', 'group.admin'] | |
|
727 | if not HasRepoGroupPermissionAny(*_perms)( | |
|
728 | group.group_name, 'user is allowed to list repo group children'): | |
|
729 | raise HTTPNotFound() | |
|
730 | ||
|
731 | return self._main_page_repo_groups_data(repo_group_id) | |
|
732 | ||
|
733 | @LoginRequired() | |
|
734 | @view_config( | |
|
735 | route_name='main_page_repos_data', | |
|
736 | request_method='GET', renderer='json_ext', xhr=True) | |
|
737 | def main_page_repos_data(self): | |
|
738 | self.load_default_context() | |
|
739 | repo_group_id = safe_int(self.request.GET.get('repo_group_id')) | |
|
740 | ||
|
741 | if repo_group_id: | |
|
742 | group = RepoGroup.get_or_404(repo_group_id) | |
|
743 | _perms = ['group.read', 'group.write', 'group.admin'] | |
|
744 | if not HasRepoGroupPermissionAny(*_perms)( | |
|
745 | group.group_name, 'user is allowed to list repo group children'): | |
|
746 | raise HTTPNotFound() | |
|
747 | ||
|
748 | return self._main_page_repos_data(repo_group_id) | |
|
707 | 749 | |
|
708 | 750 | @LoginRequired() |
|
709 | 751 | @HasRepoGroupPermissionAnyDecorator( |
|
710 | 752 | 'group.read', 'group.write', 'group.admin') |
|
711 | 753 | @view_config( |
|
712 | 754 | route_name='repo_group_home', request_method='GET', |
|
713 | 755 | renderer='rhodecode:templates/index_repo_group.mako') |
|
714 | 756 | @view_config( |
|
715 | 757 | route_name='repo_group_home_slash', request_method='GET', |
|
716 | 758 | renderer='rhodecode:templates/index_repo_group.mako') |
|
717 | 759 | def repo_group_main_page(self): |
|
718 | 760 | c = self.load_default_context() |
|
719 | 761 | c.repo_group = self.request.db_repo_group |
|
720 | repo_data, repo_group_data = self._get_groups_and_repos(c.repo_group.group_id) | |
|
721 | ||
|
722 | # update every 5 min | |
|
723 | if self.request.db_repo_group.last_commit_cache_update_diff > 60 * 5: | |
|
724 | self.request.db_repo_group.update_commit_cache() | |
|
725 | ||
|
726 | # json used to render the grids | |
|
727 | c.repos_data = json.dumps(repo_data) | |
|
728 | c.repo_groups_data = json.dumps(repo_group_data) | |
|
729 | ||
|
730 | 762 | return self._get_template_context(c) |
|
731 | 763 | |
|
732 | 764 | @LoginRequired() |
|
733 | 765 | @CSRFRequired() |
|
734 | 766 | @view_config( |
|
735 | 767 | route_name='markup_preview', request_method='POST', |
|
736 | 768 | renderer='string', xhr=True) |
|
737 | 769 | def markup_preview(self): |
|
738 | 770 | # Technically a CSRF token is not needed as no state changes with this |
|
739 | 771 | # call. However, as this is a POST is better to have it, so automated |
|
740 | 772 | # tools don't flag it as potential CSRF. |
|
741 | 773 | # Post is required because the payload could be bigger than the maximum |
|
742 | 774 | # allowed by GET. |
|
743 | 775 | |
|
744 | 776 | text = self.request.POST.get('text') |
|
745 | 777 | renderer = self.request.POST.get('renderer') or 'rst' |
|
746 | 778 | if text: |
|
747 | 779 | return h.render(text, renderer=renderer, mentions=True) |
|
748 | 780 | return '' |
|
749 | 781 | |
|
750 | 782 | @LoginRequired() |
|
751 | 783 | @CSRFRequired() |
|
752 | 784 | @view_config( |
|
753 | 785 | route_name='file_preview', request_method='POST', |
|
754 | 786 | renderer='string', xhr=True) |
|
755 | 787 | def file_preview(self): |
|
756 | 788 | # Technically a CSRF token is not needed as no state changes with this |
|
757 | 789 | # call. However, as this is a POST is better to have it, so automated |
|
758 | 790 | # tools don't flag it as potential CSRF. |
|
759 | 791 | # Post is required because the payload could be bigger than the maximum |
|
760 | 792 | # allowed by GET. |
|
761 | 793 | |
|
762 | 794 | text = self.request.POST.get('text') |
|
763 | 795 | file_path = self.request.POST.get('file_path') |
|
764 | 796 | |
|
765 | 797 | renderer = h.renderer_from_filename(file_path) |
|
766 | 798 | |
|
767 | 799 | if renderer: |
|
768 | 800 | return h.render(text, renderer=renderer, mentions=True) |
|
769 | 801 | else: |
|
770 | 802 | self.load_default_context() |
|
771 | 803 | _render = self.request.get_partial_renderer( |
|
772 | 804 | 'rhodecode:templates/files/file_content.mako') |
|
773 | 805 | |
|
774 | 806 | lines = filenode_as_lines_tokens(FileNode(file_path, text)) |
|
775 | 807 | |
|
776 | 808 | return _render('render_lines', lines) |
|
777 | 809 | |
|
778 | 810 | @LoginRequired() |
|
779 | 811 | @CSRFRequired() |
|
780 | 812 | @view_config( |
|
781 | 813 | route_name='store_user_session_value', request_method='POST', |
|
782 | 814 | renderer='string', xhr=True) |
|
783 | 815 | def store_user_session_attr(self): |
|
784 | 816 | key = self.request.POST.get('key') |
|
785 | 817 | val = self.request.POST.get('val') |
|
786 | 818 | |
|
787 | 819 | existing_value = self.request.session.get(key) |
|
788 | 820 | if existing_value != val: |
|
789 | 821 | self.request.session[key] = val |
|
790 | 822 | |
|
791 | 823 | return 'stored:{}:{}'.format(key, val) |
@@ -1,613 +1,615 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | The base Controller API |
|
23 | 23 | Provides the BaseController class for subclassing. And usage in different |
|
24 | 24 | controllers |
|
25 | 25 | """ |
|
26 | 26 | |
|
27 | 27 | import logging |
|
28 | 28 | import socket |
|
29 | 29 | |
|
30 | 30 | import markupsafe |
|
31 | 31 | import ipaddress |
|
32 | 32 | |
|
33 | 33 | from paste.auth.basic import AuthBasicAuthenticator |
|
34 | 34 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception |
|
35 | 35 | from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION |
|
36 | 36 | |
|
37 | 37 | import rhodecode |
|
38 | 38 | from rhodecode.apps._base import TemplateArgs |
|
39 | 39 | from rhodecode.authentication.base import VCS_TYPE |
|
40 | 40 | from rhodecode.lib import auth, utils2 |
|
41 | 41 | from rhodecode.lib import helpers as h |
|
42 | 42 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper |
|
43 | 43 | from rhodecode.lib.exceptions import UserCreationError |
|
44 | 44 | from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes) |
|
45 | 45 | from rhodecode.lib.utils2 import ( |
|
46 | 46 | str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str) |
|
47 | 47 | from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark |
|
48 | 48 | from rhodecode.model.notification import NotificationModel |
|
49 | 49 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel |
|
50 | 50 | |
|
51 | 51 | log = logging.getLogger(__name__) |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | def _filter_proxy(ip): |
|
55 | 55 | """ |
|
56 | 56 | Passed in IP addresses in HEADERS can be in a special format of multiple |
|
57 | 57 | ips. Those comma separated IPs are passed from various proxies in the |
|
58 | 58 | chain of request processing. The left-most being the original client. |
|
59 | 59 | We only care about the first IP which came from the org. client. |
|
60 | 60 | |
|
61 | 61 | :param ip: ip string from headers |
|
62 | 62 | """ |
|
63 | 63 | if ',' in ip: |
|
64 | 64 | _ips = ip.split(',') |
|
65 | 65 | _first_ip = _ips[0].strip() |
|
66 | 66 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) |
|
67 | 67 | return _first_ip |
|
68 | 68 | return ip |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | def _filter_port(ip): |
|
72 | 72 | """ |
|
73 | 73 | Removes a port from ip, there are 4 main cases to handle here. |
|
74 | 74 | - ipv4 eg. 127.0.0.1 |
|
75 | 75 | - ipv6 eg. ::1 |
|
76 | 76 | - ipv4+port eg. 127.0.0.1:8080 |
|
77 | 77 | - ipv6+port eg. [::1]:8080 |
|
78 | 78 | |
|
79 | 79 | :param ip: |
|
80 | 80 | """ |
|
81 | 81 | def is_ipv6(ip_addr): |
|
82 | 82 | if hasattr(socket, 'inet_pton'): |
|
83 | 83 | try: |
|
84 | 84 | socket.inet_pton(socket.AF_INET6, ip_addr) |
|
85 | 85 | except socket.error: |
|
86 | 86 | return False |
|
87 | 87 | else: |
|
88 | 88 | # fallback to ipaddress |
|
89 | 89 | try: |
|
90 | 90 | ipaddress.IPv6Address(safe_unicode(ip_addr)) |
|
91 | 91 | except Exception: |
|
92 | 92 | return False |
|
93 | 93 | return True |
|
94 | 94 | |
|
95 | 95 | if ':' not in ip: # must be ipv4 pure ip |
|
96 | 96 | return ip |
|
97 | 97 | |
|
98 | 98 | if '[' in ip and ']' in ip: # ipv6 with port |
|
99 | 99 | return ip.split(']')[0][1:].lower() |
|
100 | 100 | |
|
101 | 101 | # must be ipv6 or ipv4 with port |
|
102 | 102 | if is_ipv6(ip): |
|
103 | 103 | return ip |
|
104 | 104 | else: |
|
105 | 105 | ip, _port = ip.split(':')[:2] # means ipv4+port |
|
106 | 106 | return ip |
|
107 | 107 | |
|
108 | 108 | |
|
109 | 109 | def get_ip_addr(environ): |
|
110 | 110 | proxy_key = 'HTTP_X_REAL_IP' |
|
111 | 111 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
112 | 112 | def_key = 'REMOTE_ADDR' |
|
113 | 113 | _filters = lambda x: _filter_port(_filter_proxy(x)) |
|
114 | 114 | |
|
115 | 115 | ip = environ.get(proxy_key) |
|
116 | 116 | if ip: |
|
117 | 117 | return _filters(ip) |
|
118 | 118 | |
|
119 | 119 | ip = environ.get(proxy_key2) |
|
120 | 120 | if ip: |
|
121 | 121 | return _filters(ip) |
|
122 | 122 | |
|
123 | 123 | ip = environ.get(def_key, '0.0.0.0') |
|
124 | 124 | return _filters(ip) |
|
125 | 125 | |
|
126 | 126 | |
|
127 | 127 | def get_server_ip_addr(environ, log_errors=True): |
|
128 | 128 | hostname = environ.get('SERVER_NAME') |
|
129 | 129 | try: |
|
130 | 130 | return socket.gethostbyname(hostname) |
|
131 | 131 | except Exception as e: |
|
132 | 132 | if log_errors: |
|
133 | 133 | # in some cases this lookup is not possible, and we don't want to |
|
134 | 134 | # make it an exception in logs |
|
135 | 135 | log.exception('Could not retrieve server ip address: %s', e) |
|
136 | 136 | return hostname |
|
137 | 137 | |
|
138 | 138 | |
|
139 | 139 | def get_server_port(environ): |
|
140 | 140 | return environ.get('SERVER_PORT') |
|
141 | 141 | |
|
142 | 142 | |
|
143 | 143 | def get_access_path(environ): |
|
144 | 144 | path = environ.get('PATH_INFO') |
|
145 | 145 | org_req = environ.get('pylons.original_request') |
|
146 | 146 | if org_req: |
|
147 | 147 | path = org_req.environ.get('PATH_INFO') |
|
148 | 148 | return path |
|
149 | 149 | |
|
150 | 150 | |
|
151 | 151 | def get_user_agent(environ): |
|
152 | 152 | return environ.get('HTTP_USER_AGENT') |
|
153 | 153 | |
|
154 | 154 | |
|
155 | 155 | def vcs_operation_context( |
|
156 | 156 | environ, repo_name, username, action, scm, check_locking=True, |
|
157 | 157 | is_shadow_repo=False, check_branch_perms=False, detect_force_push=False): |
|
158 | 158 | """ |
|
159 | 159 | Generate the context for a vcs operation, e.g. push or pull. |
|
160 | 160 | |
|
161 | 161 | This context is passed over the layers so that hooks triggered by the |
|
162 | 162 | vcs operation know details like the user, the user's IP address etc. |
|
163 | 163 | |
|
164 | 164 | :param check_locking: Allows to switch of the computation of the locking |
|
165 | 165 | data. This serves mainly the need of the simplevcs middleware to be |
|
166 | 166 | able to disable this for certain operations. |
|
167 | 167 | |
|
168 | 168 | """ |
|
169 | 169 | # Tri-state value: False: unlock, None: nothing, True: lock |
|
170 | 170 | make_lock = None |
|
171 | 171 | locked_by = [None, None, None] |
|
172 | 172 | is_anonymous = username == User.DEFAULT_USER |
|
173 | 173 | user = User.get_by_username(username) |
|
174 | 174 | if not is_anonymous and check_locking: |
|
175 | 175 | log.debug('Checking locking on repository "%s"', repo_name) |
|
176 | 176 | repo = Repository.get_by_repo_name(repo_name) |
|
177 | 177 | make_lock, __, locked_by = repo.get_locking_state( |
|
178 | 178 | action, user.user_id) |
|
179 | 179 | user_id = user.user_id |
|
180 | 180 | settings_model = VcsSettingsModel(repo=repo_name) |
|
181 | 181 | ui_settings = settings_model.get_ui_settings() |
|
182 | 182 | |
|
183 | 183 | # NOTE(marcink): This should be also in sync with |
|
184 | 184 | # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data |
|
185 | 185 | store = [x for x in ui_settings if x.key == '/'] |
|
186 | 186 | repo_store = '' |
|
187 | 187 | if store: |
|
188 | 188 | repo_store = store[0].value |
|
189 | 189 | |
|
190 | 190 | scm_data = { |
|
191 | 191 | 'ip': get_ip_addr(environ), |
|
192 | 192 | 'username': username, |
|
193 | 193 | 'user_id': user_id, |
|
194 | 194 | 'action': action, |
|
195 | 195 | 'repository': repo_name, |
|
196 | 196 | 'scm': scm, |
|
197 | 197 | 'config': rhodecode.CONFIG['__file__'], |
|
198 | 198 | 'repo_store': repo_store, |
|
199 | 199 | 'make_lock': make_lock, |
|
200 | 200 | 'locked_by': locked_by, |
|
201 | 201 | 'server_url': utils2.get_server_url(environ), |
|
202 | 202 | 'user_agent': get_user_agent(environ), |
|
203 | 203 | 'hooks': get_enabled_hook_classes(ui_settings), |
|
204 | 204 | 'is_shadow_repo': is_shadow_repo, |
|
205 | 205 | 'detect_force_push': detect_force_push, |
|
206 | 206 | 'check_branch_perms': check_branch_perms, |
|
207 | 207 | } |
|
208 | 208 | return scm_data |
|
209 | 209 | |
|
210 | 210 | |
|
211 | 211 | class BasicAuth(AuthBasicAuthenticator): |
|
212 | 212 | |
|
213 | 213 | def __init__(self, realm, authfunc, registry, auth_http_code=None, |
|
214 | 214 | initial_call_detection=False, acl_repo_name=None): |
|
215 | 215 | self.realm = realm |
|
216 | 216 | self.initial_call = initial_call_detection |
|
217 | 217 | self.authfunc = authfunc |
|
218 | 218 | self.registry = registry |
|
219 | 219 | self.acl_repo_name = acl_repo_name |
|
220 | 220 | self._rc_auth_http_code = auth_http_code |
|
221 | 221 | |
|
222 | 222 | def _get_response_from_code(self, http_code): |
|
223 | 223 | try: |
|
224 | 224 | return get_exception(safe_int(http_code)) |
|
225 | 225 | except Exception: |
|
226 | 226 | log.exception('Failed to fetch response for code %s', http_code) |
|
227 | 227 | return HTTPForbidden |
|
228 | 228 | |
|
229 | 229 | def get_rc_realm(self): |
|
230 | 230 | return safe_str(self.registry.rhodecode_settings.get('rhodecode_realm')) |
|
231 | 231 | |
|
232 | 232 | def build_authentication(self): |
|
233 | 233 | head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) |
|
234 | 234 | if self._rc_auth_http_code and not self.initial_call: |
|
235 | 235 | # return alternative HTTP code if alternative http return code |
|
236 | 236 | # is specified in RhodeCode config, but ONLY if it's not the |
|
237 | 237 | # FIRST call |
|
238 | 238 | custom_response_klass = self._get_response_from_code( |
|
239 | 239 | self._rc_auth_http_code) |
|
240 | 240 | return custom_response_klass(headers=head) |
|
241 | 241 | return HTTPUnauthorized(headers=head) |
|
242 | 242 | |
|
243 | 243 | def authenticate(self, environ): |
|
244 | 244 | authorization = AUTHORIZATION(environ) |
|
245 | 245 | if not authorization: |
|
246 | 246 | return self.build_authentication() |
|
247 | 247 | (authmeth, auth) = authorization.split(' ', 1) |
|
248 | 248 | if 'basic' != authmeth.lower(): |
|
249 | 249 | return self.build_authentication() |
|
250 | 250 | auth = auth.strip().decode('base64') |
|
251 | 251 | _parts = auth.split(':', 1) |
|
252 | 252 | if len(_parts) == 2: |
|
253 | 253 | username, password = _parts |
|
254 | 254 | auth_data = self.authfunc( |
|
255 | 255 | username, password, environ, VCS_TYPE, |
|
256 | 256 | registry=self.registry, acl_repo_name=self.acl_repo_name) |
|
257 | 257 | if auth_data: |
|
258 | 258 | return {'username': username, 'auth_data': auth_data} |
|
259 | 259 | if username and password: |
|
260 | 260 | # we mark that we actually executed authentication once, at |
|
261 | 261 | # that point we can use the alternative auth code |
|
262 | 262 | self.initial_call = False |
|
263 | 263 | |
|
264 | 264 | return self.build_authentication() |
|
265 | 265 | |
|
266 | 266 | __call__ = authenticate |
|
267 | 267 | |
|
268 | 268 | |
|
269 | 269 | def calculate_version_hash(config): |
|
270 | 270 | return sha1( |
|
271 | 271 | config.get('beaker.session.secret', '') + |
|
272 | 272 | rhodecode.__version__)[:8] |
|
273 | 273 | |
|
274 | 274 | |
|
275 | 275 | def get_current_lang(request): |
|
276 | 276 | # NOTE(marcink): remove after pyramid move |
|
277 | 277 | try: |
|
278 | 278 | return translation.get_lang()[0] |
|
279 | 279 | except: |
|
280 | 280 | pass |
|
281 | 281 | |
|
282 | 282 | return getattr(request, '_LOCALE_', request.locale_name) |
|
283 | 283 | |
|
284 | 284 | |
|
285 | 285 | def attach_context_attributes(context, request, user_id=None): |
|
286 | 286 | """ |
|
287 | 287 | Attach variables into template context called `c`. |
|
288 | 288 | """ |
|
289 | 289 | config = request.registry.settings |
|
290 | 290 | |
|
291 | 291 | rc_config = SettingsModel().get_all_settings(cache=True) |
|
292 | 292 | context.rc_config = rc_config |
|
293 | 293 | context.rhodecode_version = rhodecode.__version__ |
|
294 | 294 | context.rhodecode_edition = config.get('rhodecode.edition') |
|
295 | 295 | # unique secret + version does not leak the version but keep consistency |
|
296 | 296 | context.rhodecode_version_hash = calculate_version_hash(config) |
|
297 | 297 | |
|
298 | 298 | # Default language set for the incoming request |
|
299 | 299 | context.language = get_current_lang(request) |
|
300 | 300 | |
|
301 | 301 | # Visual options |
|
302 | 302 | context.visual = AttributeDict({}) |
|
303 | 303 | |
|
304 | 304 | # DB stored Visual Items |
|
305 | 305 | context.visual.show_public_icon = str2bool( |
|
306 | 306 | rc_config.get('rhodecode_show_public_icon')) |
|
307 | 307 | context.visual.show_private_icon = str2bool( |
|
308 | 308 | rc_config.get('rhodecode_show_private_icon')) |
|
309 | 309 | context.visual.stylify_metatags = str2bool( |
|
310 | 310 | rc_config.get('rhodecode_stylify_metatags')) |
|
311 | 311 | context.visual.dashboard_items = safe_int( |
|
312 | 312 | rc_config.get('rhodecode_dashboard_items', 100)) |
|
313 | 313 | context.visual.admin_grid_items = safe_int( |
|
314 | 314 | rc_config.get('rhodecode_admin_grid_items', 100)) |
|
315 | 315 | context.visual.show_revision_number = str2bool( |
|
316 | 316 | rc_config.get('rhodecode_show_revision_number', True)) |
|
317 | 317 | context.visual.show_sha_length = safe_int( |
|
318 | 318 | rc_config.get('rhodecode_show_sha_length', 100)) |
|
319 | 319 | context.visual.repository_fields = str2bool( |
|
320 | 320 | rc_config.get('rhodecode_repository_fields')) |
|
321 | 321 | context.visual.show_version = str2bool( |
|
322 | 322 | rc_config.get('rhodecode_show_version')) |
|
323 | 323 | context.visual.use_gravatar = str2bool( |
|
324 | 324 | rc_config.get('rhodecode_use_gravatar')) |
|
325 | 325 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') |
|
326 | 326 | context.visual.default_renderer = rc_config.get( |
|
327 | 327 | 'rhodecode_markup_renderer', 'rst') |
|
328 | 328 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES |
|
329 | 329 | context.visual.rhodecode_support_url = \ |
|
330 | 330 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') |
|
331 | 331 | |
|
332 | 332 | context.visual.affected_files_cut_off = 60 |
|
333 | 333 | |
|
334 | 334 | context.pre_code = rc_config.get('rhodecode_pre_code') |
|
335 | 335 | context.post_code = rc_config.get('rhodecode_post_code') |
|
336 | 336 | context.rhodecode_name = rc_config.get('rhodecode_title') |
|
337 | 337 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') |
|
338 | 338 | # if we have specified default_encoding in the request, it has more |
|
339 | 339 | # priority |
|
340 | 340 | if request.GET.get('default_encoding'): |
|
341 | 341 | context.default_encodings.insert(0, request.GET.get('default_encoding')) |
|
342 | 342 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') |
|
343 | 343 | context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl') |
|
344 | 344 | |
|
345 | 345 | # INI stored |
|
346 | 346 | context.labs_active = str2bool( |
|
347 | 347 | config.get('labs_settings_active', 'false')) |
|
348 | 348 | context.ssh_enabled = str2bool( |
|
349 | 349 | config.get('ssh.generate_authorized_keyfile', 'false')) |
|
350 | 350 | context.ssh_key_generator_enabled = str2bool( |
|
351 | 351 | config.get('ssh.enable_ui_key_generator', 'true')) |
|
352 | 352 | |
|
353 | 353 | context.visual.allow_repo_location_change = str2bool( |
|
354 | 354 | config.get('allow_repo_location_change', True)) |
|
355 | 355 | context.visual.allow_custom_hooks_settings = str2bool( |
|
356 | 356 | config.get('allow_custom_hooks_settings', True)) |
|
357 | 357 | context.debug_style = str2bool(config.get('debug_style', False)) |
|
358 | 358 | |
|
359 | 359 | context.rhodecode_instanceid = config.get('instance_id') |
|
360 | 360 | |
|
361 | 361 | context.visual.cut_off_limit_diff = safe_int( |
|
362 | 362 | config.get('cut_off_limit_diff')) |
|
363 | 363 | context.visual.cut_off_limit_file = safe_int( |
|
364 | 364 | config.get('cut_off_limit_file')) |
|
365 | 365 | |
|
366 | 366 | context.license = AttributeDict({}) |
|
367 | 367 | context.license.hide_license_info = str2bool( |
|
368 | 368 | config.get('license.hide_license_info', False)) |
|
369 | 369 | |
|
370 | 370 | # AppEnlight |
|
371 | 371 | context.appenlight_enabled = str2bool(config.get('appenlight', 'false')) |
|
372 | 372 | context.appenlight_api_public_key = config.get( |
|
373 | 373 | 'appenlight.api_public_key', '') |
|
374 | 374 | context.appenlight_server_url = config.get('appenlight.server_url', '') |
|
375 | 375 | |
|
376 | 376 | diffmode = { |
|
377 | 377 | "unified": "unified", |
|
378 | 378 | "sideside": "sideside" |
|
379 | 379 | }.get(request.GET.get('diffmode')) |
|
380 | 380 | |
|
381 | 381 | is_api = hasattr(request, 'rpc_user') |
|
382 | 382 | session_attrs = { |
|
383 | 383 | # defaults |
|
384 | 384 | "clone_url_format": "http", |
|
385 | 385 | "diffmode": "sideside" |
|
386 | 386 | } |
|
387 | 387 | |
|
388 | 388 | if not is_api: |
|
389 | 389 | # don't access pyramid session for API calls |
|
390 | 390 | if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'): |
|
391 | 391 | request.session['rc_user_session_attr.diffmode'] = diffmode |
|
392 | 392 | |
|
393 | 393 | # session settings per user |
|
394 | 394 | |
|
395 | 395 | for k, v in request.session.items(): |
|
396 | 396 | pref = 'rc_user_session_attr.' |
|
397 | 397 | if k and k.startswith(pref): |
|
398 | 398 | k = k[len(pref):] |
|
399 | 399 | session_attrs[k] = v |
|
400 | 400 | |
|
401 | 401 | context.user_session_attrs = session_attrs |
|
402 | 402 | |
|
403 | 403 | # JS template context |
|
404 | 404 | context.template_context = { |
|
405 | 405 | 'repo_name': None, |
|
406 | 406 | 'repo_type': None, |
|
407 | 407 | 'repo_landing_commit': None, |
|
408 | 408 | 'rhodecode_user': { |
|
409 | 409 | 'username': None, |
|
410 | 410 | 'email': None, |
|
411 | 411 | 'notification_status': False |
|
412 | 412 | }, |
|
413 | 413 | 'session_attrs': session_attrs, |
|
414 | 414 | 'visual': { |
|
415 | 415 | 'default_renderer': None |
|
416 | 416 | }, |
|
417 | 417 | 'commit_data': { |
|
418 | 418 | 'commit_id': None |
|
419 | 419 | }, |
|
420 | 420 | 'pull_request_data': {'pull_request_id': None}, |
|
421 | 421 | 'timeago': { |
|
422 | 422 | 'refresh_time': 120 * 1000, |
|
423 | 423 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 |
|
424 | 424 | }, |
|
425 | 425 | 'pyramid_dispatch': { |
|
426 | 426 | |
|
427 | 427 | }, |
|
428 | 428 | 'extra': {'plugins': {}} |
|
429 | 429 | } |
|
430 | 430 | # END CONFIG VARS |
|
431 | 431 | if is_api: |
|
432 | 432 | csrf_token = None |
|
433 | 433 | else: |
|
434 | 434 | csrf_token = auth.get_csrf_token(session=request.session) |
|
435 | 435 | |
|
436 | 436 | context.csrf_token = csrf_token |
|
437 | 437 | context.backends = rhodecode.BACKENDS.keys() |
|
438 | 438 | context.backends.sort() |
|
439 | 439 | unread_count = 0 |
|
440 | 440 | user_bookmark_list = [] |
|
441 | 441 | if user_id: |
|
442 | 442 | unread_count = NotificationModel().get_unread_cnt_for_user(user_id) |
|
443 | 443 | user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id) |
|
444 | 444 | context.unread_notifications = unread_count |
|
445 | 445 | context.bookmark_items = user_bookmark_list |
|
446 | 446 | |
|
447 | 447 | # web case |
|
448 | 448 | if hasattr(request, 'user'): |
|
449 | 449 | context.auth_user = request.user |
|
450 | 450 | context.rhodecode_user = request.user |
|
451 | 451 | |
|
452 | 452 | # api case |
|
453 | 453 | if hasattr(request, 'rpc_user'): |
|
454 | 454 | context.auth_user = request.rpc_user |
|
455 | 455 | context.rhodecode_user = request.rpc_user |
|
456 | 456 | |
|
457 | 457 | # attach the whole call context to the request |
|
458 | 458 | request.call_context = context |
|
459 | 459 | |
|
460 | 460 | |
|
461 | 461 | def get_auth_user(request): |
|
462 | 462 | environ = request.environ |
|
463 | 463 | session = request.session |
|
464 | 464 | |
|
465 | 465 | ip_addr = get_ip_addr(environ) |
|
466 | 466 | |
|
467 | 467 | # make sure that we update permissions each time we call controller |
|
468 | 468 | _auth_token = (request.GET.get('auth_token', '') or request.GET.get('api_key', '')) |
|
469 | 469 | if not _auth_token and request.matchdict: |
|
470 | 470 | url_auth_token = request.matchdict.get('_auth_token') |
|
471 | 471 | _auth_token = url_auth_token |
|
472 | 472 | if _auth_token: |
|
473 | 473 | log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:]) |
|
474 | 474 | |
|
475 | 475 | if _auth_token: |
|
476 | 476 | # when using API_KEY we assume user exists, and |
|
477 | 477 | # doesn't need auth based on cookies. |
|
478 | 478 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) |
|
479 | 479 | authenticated = False |
|
480 | 480 | else: |
|
481 | 481 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
482 | 482 | try: |
|
483 | 483 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), |
|
484 | 484 | ip_addr=ip_addr) |
|
485 | 485 | except UserCreationError as e: |
|
486 | 486 | h.flash(e, 'error') |
|
487 | 487 | # container auth or other auth functions that create users |
|
488 | 488 | # on the fly can throw this exception signaling that there's |
|
489 | 489 | # issue with user creation, explanation should be provided |
|
490 | 490 | # in Exception itself. We then create a simple blank |
|
491 | 491 | # AuthUser |
|
492 | 492 | auth_user = AuthUser(ip_addr=ip_addr) |
|
493 | 493 | |
|
494 | 494 | # in case someone changes a password for user it triggers session |
|
495 | 495 | # flush and forces a re-login |
|
496 | 496 | if password_changed(auth_user, session): |
|
497 | 497 | session.invalidate() |
|
498 | 498 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
499 | 499 | auth_user = AuthUser(ip_addr=ip_addr) |
|
500 | 500 | |
|
501 | 501 | authenticated = cookie_store.get('is_authenticated') |
|
502 | 502 | |
|
503 | 503 | if not auth_user.is_authenticated and auth_user.is_user_object: |
|
504 | 504 | # user is not authenticated and not empty |
|
505 | 505 | auth_user.set_authenticated(authenticated) |
|
506 | 506 | |
|
507 | 507 | return auth_user, _auth_token |
|
508 | 508 | |
|
509 | 509 | |
|
510 | 510 | def h_filter(s): |
|
511 | 511 | """ |
|
512 | 512 | Custom filter for Mako templates. Mako by standard uses `markupsafe.escape` |
|
513 | 513 | we wrap this with additional functionality that converts None to empty |
|
514 | 514 | strings |
|
515 | 515 | """ |
|
516 | 516 | if s is None: |
|
517 | 517 | return markupsafe.Markup() |
|
518 | 518 | return markupsafe.escape(s) |
|
519 | 519 | |
|
520 | 520 | |
|
521 | 521 | def add_events_routes(config): |
|
522 | 522 | """ |
|
523 | 523 | Adds routing that can be used in events. Because some events are triggered |
|
524 | 524 | outside of pyramid context, we need to bootstrap request with some |
|
525 | 525 | routing registered |
|
526 | 526 | """ |
|
527 | 527 | |
|
528 | 528 | from rhodecode.apps._base import ADMIN_PREFIX |
|
529 | 529 | |
|
530 | 530 | config.add_route(name='home', pattern='/') |
|
531 | config.add_route(name='main_page_repos_data', pattern='/_home_repos') | |
|
532 | config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups') | |
|
531 | 533 | |
|
532 | 534 | config.add_route(name='login', pattern=ADMIN_PREFIX + '/login') |
|
533 | 535 | config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout') |
|
534 | 536 | config.add_route(name='repo_summary', pattern='/{repo_name}') |
|
535 | 537 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') |
|
536 | 538 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') |
|
537 | 539 | |
|
538 | 540 | config.add_route(name='pullrequest_show', |
|
539 | 541 | pattern='/{repo_name}/pull-request/{pull_request_id}') |
|
540 | 542 | config.add_route(name='pull_requests_global', |
|
541 | 543 | pattern='/pull-request/{pull_request_id}') |
|
542 | 544 | |
|
543 | 545 | config.add_route(name='repo_commit', |
|
544 | 546 | pattern='/{repo_name}/changeset/{commit_id}') |
|
545 | 547 | config.add_route(name='repo_files', |
|
546 | 548 | pattern='/{repo_name}/files/{commit_id}/{f_path}') |
|
547 | 549 | |
|
548 | 550 | config.add_route(name='hovercard_user', |
|
549 | 551 | pattern='/_hovercard/user/{user_id}') |
|
550 | 552 | |
|
551 | 553 | config.add_route(name='hovercard_user_group', |
|
552 | 554 | pattern='/_hovercard/user_group/{user_group_id}') |
|
553 | 555 | |
|
554 | 556 | config.add_route(name='hovercard_pull_request', |
|
555 | 557 | pattern='/_hovercard/pull_request/{pull_request_id}') |
|
556 | 558 | |
|
557 | 559 | config.add_route(name='hovercard_repo_commit', |
|
558 | 560 | pattern='/_hovercard/commit/{repo_name}/{commit_id}') |
|
559 | 561 | |
|
560 | 562 | |
|
561 | 563 | def bootstrap_config(request): |
|
562 | 564 | import pyramid.testing |
|
563 | 565 | registry = pyramid.testing.Registry('RcTestRegistry') |
|
564 | 566 | |
|
565 | 567 | config = pyramid.testing.setUp(registry=registry, request=request) |
|
566 | 568 | |
|
567 | 569 | # allow pyramid lookup in testing |
|
568 | 570 | config.include('pyramid_mako') |
|
569 | 571 | config.include('rhodecode.lib.rc_beaker') |
|
570 | 572 | config.include('rhodecode.lib.rc_cache') |
|
571 | 573 | |
|
572 | 574 | add_events_routes(config) |
|
573 | 575 | |
|
574 | 576 | return config |
|
575 | 577 | |
|
576 | 578 | |
|
577 | 579 | def bootstrap_request(**kwargs): |
|
578 | 580 | import pyramid.testing |
|
579 | 581 | |
|
580 | 582 | class TestRequest(pyramid.testing.DummyRequest): |
|
581 | 583 | application_url = kwargs.pop('application_url', 'http://example.com') |
|
582 | 584 | host = kwargs.pop('host', 'example.com:80') |
|
583 | 585 | domain = kwargs.pop('domain', 'example.com') |
|
584 | 586 | |
|
585 | 587 | def translate(self, msg): |
|
586 | 588 | return msg |
|
587 | 589 | |
|
588 | 590 | def plularize(self, singular, plural, n): |
|
589 | 591 | return singular |
|
590 | 592 | |
|
591 | 593 | def get_partial_renderer(self, tmpl_name): |
|
592 | 594 | |
|
593 | 595 | from rhodecode.lib.partial_renderer import get_partial_renderer |
|
594 | 596 | return get_partial_renderer(request=self, tmpl_name=tmpl_name) |
|
595 | 597 | |
|
596 | 598 | _call_context = TemplateArgs() |
|
597 | 599 | _call_context.visual = TemplateArgs() |
|
598 | 600 | _call_context.visual.show_sha_length = 12 |
|
599 | 601 | _call_context.visual.show_revision_number = True |
|
600 | 602 | |
|
601 | 603 | @property |
|
602 | 604 | def call_context(self): |
|
603 | 605 | return self._call_context |
|
604 | 606 | |
|
605 | 607 | class TestDummySession(pyramid.testing.DummySession): |
|
606 | 608 | def save(*arg, **kw): |
|
607 | 609 | pass |
|
608 | 610 | |
|
609 | 611 | request = TestRequest(**kwargs) |
|
610 | 612 | request.session = TestDummySession() |
|
611 | 613 | |
|
612 | 614 | return request |
|
613 | 615 |
@@ -1,1079 +1,1160 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import re |
|
23 | 23 | import shutil |
|
24 | 24 | import time |
|
25 | 25 | import logging |
|
26 | 26 | import traceback |
|
27 | 27 | import datetime |
|
28 | 28 | |
|
29 | 29 | from pyramid.threadlocal import get_current_request |
|
30 | 30 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
31 | 31 | |
|
32 | 32 | from rhodecode import events |
|
33 | 33 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
34 | 34 | from rhodecode.lib.caching_query import FromCache |
|
35 | 35 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError |
|
36 | 36 | from rhodecode.lib.hooks_base import log_delete_repository |
|
37 | 37 | from rhodecode.lib.user_log_filter import user_log_filter |
|
38 | 38 | from rhodecode.lib.utils import make_db_config |
|
39 | 39 | from rhodecode.lib.utils2 import ( |
|
40 | 40 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, |
|
41 | 41 | get_current_rhodecode_user, safe_int, datetime_to_time, |
|
42 | 42 | action_logger_generic) |
|
43 | 43 | from rhodecode.lib.vcs.backends import get_backend |
|
44 | 44 | from rhodecode.model import BaseModel |
|
45 | 45 | from rhodecode.model.db import ( |
|
46 | 46 | _hash_key, func, case, joinedload, or_, in_filter_generator, |
|
47 | 47 | Session, Repository, UserRepoToPerm, UserGroupRepoToPerm, |
|
48 | 48 | UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, |
|
49 | 49 | Statistics, UserGroup, RepoGroup, RepositoryField, UserLog) |
|
50 | 50 | from rhodecode.model.settings import VcsSettingsModel |
|
51 | 51 | |
|
52 | 52 | log = logging.getLogger(__name__) |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | class RepoModel(BaseModel): |
|
56 | 56 | |
|
57 | 57 | cls = Repository |
|
58 | 58 | |
|
59 | 59 | def _get_user_group(self, users_group): |
|
60 | 60 | return self._get_instance(UserGroup, users_group, |
|
61 | 61 | callback=UserGroup.get_by_group_name) |
|
62 | 62 | |
|
63 | 63 | def _get_repo_group(self, repo_group): |
|
64 | 64 | return self._get_instance(RepoGroup, repo_group, |
|
65 | 65 | callback=RepoGroup.get_by_group_name) |
|
66 | 66 | |
|
67 | 67 | def _create_default_perms(self, repository, private): |
|
68 | 68 | # create default permission |
|
69 | 69 | default = 'repository.read' |
|
70 | 70 | def_user = User.get_default_user() |
|
71 | 71 | for p in def_user.user_perms: |
|
72 | 72 | if p.permission.permission_name.startswith('repository.'): |
|
73 | 73 | default = p.permission.permission_name |
|
74 | 74 | break |
|
75 | 75 | |
|
76 | 76 | default_perm = 'repository.none' if private else default |
|
77 | 77 | |
|
78 | 78 | repo_to_perm = UserRepoToPerm() |
|
79 | 79 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
80 | 80 | |
|
81 | 81 | repo_to_perm.repository = repository |
|
82 | 82 | repo_to_perm.user_id = def_user.user_id |
|
83 | 83 | |
|
84 | 84 | return repo_to_perm |
|
85 | 85 | |
|
86 | 86 | @LazyProperty |
|
87 | 87 | def repos_path(self): |
|
88 | 88 | """ |
|
89 | 89 | Gets the repositories root path from database |
|
90 | 90 | """ |
|
91 | 91 | settings_model = VcsSettingsModel(sa=self.sa) |
|
92 | 92 | return settings_model.get_repos_location() |
|
93 | 93 | |
|
94 | 94 | def get(self, repo_id): |
|
95 | 95 | repo = self.sa.query(Repository) \ |
|
96 | 96 | .filter(Repository.repo_id == repo_id) |
|
97 | 97 | |
|
98 | 98 | return repo.scalar() |
|
99 | 99 | |
|
100 | 100 | def get_repo(self, repository): |
|
101 | 101 | return self._get_repo(repository) |
|
102 | 102 | |
|
103 | 103 | def get_by_repo_name(self, repo_name, cache=False): |
|
104 | 104 | repo = self.sa.query(Repository) \ |
|
105 | 105 | .filter(Repository.repo_name == repo_name) |
|
106 | 106 | |
|
107 | 107 | if cache: |
|
108 | 108 | name_key = _hash_key(repo_name) |
|
109 | 109 | repo = repo.options( |
|
110 | 110 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) |
|
111 | 111 | return repo.scalar() |
|
112 | 112 | |
|
113 | 113 | def _extract_id_from_repo_name(self, repo_name): |
|
114 | 114 | if repo_name.startswith('/'): |
|
115 | 115 | repo_name = repo_name.lstrip('/') |
|
116 | 116 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
117 | 117 | if by_id_match: |
|
118 | 118 | return by_id_match.groups()[0] |
|
119 | 119 | |
|
120 | 120 | def get_repo_by_id(self, repo_name): |
|
121 | 121 | """ |
|
122 | 122 | Extracts repo_name by id from special urls. |
|
123 | 123 | Example url is _11/repo_name |
|
124 | 124 | |
|
125 | 125 | :param repo_name: |
|
126 | 126 | :return: repo object if matched else None |
|
127 | 127 | """ |
|
128 | 128 | |
|
129 | 129 | try: |
|
130 | 130 | _repo_id = self._extract_id_from_repo_name(repo_name) |
|
131 | 131 | if _repo_id: |
|
132 | 132 | return self.get(_repo_id) |
|
133 | 133 | except Exception: |
|
134 | 134 | log.exception('Failed to extract repo_name from URL') |
|
135 | 135 | |
|
136 | 136 | return None |
|
137 | 137 | |
|
138 | 138 | def get_repos_for_root(self, root, traverse=False): |
|
139 | 139 | if traverse: |
|
140 | 140 | like_expression = u'{}%'.format(safe_unicode(root)) |
|
141 | 141 | repos = Repository.query().filter( |
|
142 | 142 | Repository.repo_name.like(like_expression)).all() |
|
143 | 143 | else: |
|
144 | 144 | if root and not isinstance(root, RepoGroup): |
|
145 | 145 | raise ValueError( |
|
146 | 146 | 'Root must be an instance ' |
|
147 | 147 | 'of RepoGroup, got:{} instead'.format(type(root))) |
|
148 | 148 | repos = Repository.query().filter(Repository.group == root).all() |
|
149 | 149 | return repos |
|
150 | 150 | |
|
151 | 151 | def get_url(self, repo, request=None, permalink=False): |
|
152 | 152 | if not request: |
|
153 | 153 | request = get_current_request() |
|
154 | 154 | |
|
155 | 155 | if not request: |
|
156 | 156 | return |
|
157 | 157 | |
|
158 | 158 | if permalink: |
|
159 | 159 | return request.route_url( |
|
160 | 160 | 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id))) |
|
161 | 161 | else: |
|
162 | 162 | return request.route_url( |
|
163 | 163 | 'repo_summary', repo_name=safe_str(repo.repo_name)) |
|
164 | 164 | |
|
165 | 165 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): |
|
166 | 166 | if not request: |
|
167 | 167 | request = get_current_request() |
|
168 | 168 | |
|
169 | 169 | if not request: |
|
170 | 170 | return |
|
171 | 171 | |
|
172 | 172 | if permalink: |
|
173 | 173 | return request.route_url( |
|
174 | 174 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
175 | 175 | commit_id=commit_id) |
|
176 | 176 | |
|
177 | 177 | else: |
|
178 | 178 | return request.route_url( |
|
179 | 179 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
180 | 180 | commit_id=commit_id) |
|
181 | 181 | |
|
182 | 182 | def get_repo_log(self, repo, filter_term): |
|
183 | 183 | repo_log = UserLog.query()\ |
|
184 | 184 | .filter(or_(UserLog.repository_id == repo.repo_id, |
|
185 | 185 | UserLog.repository_name == repo.repo_name))\ |
|
186 | 186 | .options(joinedload(UserLog.user))\ |
|
187 | 187 | .options(joinedload(UserLog.repository))\ |
|
188 | 188 | .order_by(UserLog.action_date.desc()) |
|
189 | 189 | |
|
190 | 190 | repo_log = user_log_filter(repo_log, filter_term) |
|
191 | 191 | return repo_log |
|
192 | 192 | |
|
193 | 193 | @classmethod |
|
194 | 194 | def update_commit_cache(cls, repositories=None): |
|
195 | 195 | if not repositories: |
|
196 | 196 | repositories = Repository.getAll() |
|
197 | 197 | for repo in repositories: |
|
198 | 198 | repo.update_commit_cache() |
|
199 | 199 | |
|
200 | 200 | def get_repos_as_dict(self, repo_list=None, admin=False, |
|
201 | 201 | super_user_actions=False, short_name=None): |
|
202 | 202 | _render = get_current_request().get_partial_renderer( |
|
203 | 203 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
204 | 204 | c = _render.get_call_context() |
|
205 | 205 | |
|
206 | 206 | def quick_menu(repo_name): |
|
207 | 207 | return _render('quick_menu', repo_name) |
|
208 | 208 | |
|
209 | 209 | def repo_lnk(name, rtype, rstate, private, archived, fork_of): |
|
210 | 210 | if short_name is not None: |
|
211 | 211 | short_name_var = short_name |
|
212 | 212 | else: |
|
213 | 213 | short_name_var = not admin |
|
214 | 214 | return _render('repo_name', name, rtype, rstate, private, archived, fork_of, |
|
215 | 215 | short_name=short_name_var, admin=False) |
|
216 | 216 | |
|
217 | 217 | def last_change(last_change): |
|
218 | 218 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: |
|
219 | 219 | ts = time.time() |
|
220 | 220 | utc_offset = (datetime.datetime.fromtimestamp(ts) |
|
221 | 221 | - datetime.datetime.utcfromtimestamp(ts)).total_seconds() |
|
222 | 222 | last_change = last_change + datetime.timedelta(seconds=utc_offset) |
|
223 | 223 | |
|
224 | 224 | return _render("last_change", last_change) |
|
225 | 225 | |
|
226 | 226 | def rss_lnk(repo_name): |
|
227 | 227 | return _render("rss", repo_name) |
|
228 | 228 | |
|
229 | 229 | def atom_lnk(repo_name): |
|
230 | 230 | return _render("atom", repo_name) |
|
231 | 231 | |
|
232 | 232 | def last_rev(repo_name, cs_cache): |
|
233 | 233 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
234 | 234 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
235 | 235 | cs_cache.get('message'), cs_cache.get('date')) |
|
236 | 236 | |
|
237 | 237 | def desc(desc): |
|
238 | 238 | return _render('repo_desc', desc, c.visual.stylify_metatags) |
|
239 | 239 | |
|
240 | 240 | def state(repo_state): |
|
241 | 241 | return _render("repo_state", repo_state) |
|
242 | 242 | |
|
243 | 243 | def repo_actions(repo_name): |
|
244 | 244 | return _render('repo_actions', repo_name, super_user_actions) |
|
245 | 245 | |
|
246 | 246 | def user_profile(username): |
|
247 | 247 | return _render('user_profile', username) |
|
248 | 248 | |
|
249 | 249 | repos_data = [] |
|
250 | 250 | for repo in repo_list: |
|
251 | 251 | # NOTE(marcink): because we use only raw column we need to load it like that |
|
252 | 252 | changeset_cache = Repository._load_changeset_cache( |
|
253 | 253 | repo.repo_id, repo._changeset_cache) |
|
254 | 254 | last_commit_change = Repository._load_commit_change(changeset_cache) |
|
255 | 255 | |
|
256 | 256 | row = { |
|
257 | 257 | "menu": quick_menu(repo.repo_name), |
|
258 | 258 | |
|
259 | 259 | "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state, |
|
260 | 260 | repo.private, repo.archived, repo.fork), |
|
261 | 261 | "name_raw": repo.repo_name.lower(), |
|
262 | 262 | "desc": desc(repo.description), |
|
263 | 263 | |
|
264 | 264 | "last_change": last_change(last_commit_change), |
|
265 | 265 | "last_change_raw": datetime_to_time(last_commit_change), |
|
266 | 266 | |
|
267 | 267 | "last_changeset": last_rev(repo.repo_name, changeset_cache), |
|
268 | 268 | "last_changeset_raw": changeset_cache.get('revision'), |
|
269 | 269 | |
|
270 | 270 | "owner": user_profile(repo.User.username), |
|
271 | 271 | |
|
272 | 272 | "state": state(repo.repo_state), |
|
273 | 273 | "rss": rss_lnk(repo.repo_name), |
|
274 | 274 | "atom": atom_lnk(repo.repo_name), |
|
275 | 275 | } |
|
276 | 276 | if admin: |
|
277 | 277 | row.update({ |
|
278 | 278 | "action": repo_actions(repo.repo_name), |
|
279 | 279 | }) |
|
280 | 280 | repos_data.append(row) |
|
281 | 281 | |
|
282 | 282 | return repos_data |
|
283 | 283 | |
|
284 | def get_repos_data_table( | |
|
285 | self, draw, start, limit, | |
|
286 | search_q, order_by, order_dir, | |
|
287 | auth_user, repo_group_id): | |
|
288 | from rhodecode.model.scm import RepoList | |
|
289 | ||
|
290 | _perms = ['repository.read', 'repository.write', 'repository.admin'] | |
|
291 | ||
|
292 | repos = Repository.query() \ | |
|
293 | .filter(Repository.group_id == repo_group_id) \ | |
|
294 | .all() | |
|
295 | auth_repo_list = RepoList( | |
|
296 | repos, perm_set=_perms, | |
|
297 | extra_kwargs=dict(user=auth_user)) | |
|
298 | ||
|
299 | allowed_ids = [-1] | |
|
300 | for repo in auth_repo_list: | |
|
301 | allowed_ids.append(repo.repo_id) | |
|
302 | ||
|
303 | repos_data_total_count = Repository.query() \ | |
|
304 | .filter(Repository.group_id == repo_group_id) \ | |
|
305 | .filter(or_( | |
|
306 | # generate multiple IN to fix limitation problems | |
|
307 | *in_filter_generator(Repository.repo_id, allowed_ids)) | |
|
308 | ) \ | |
|
309 | .count() | |
|
310 | ||
|
311 | base_q = Session.query( | |
|
312 | Repository.repo_id, | |
|
313 | Repository.repo_name, | |
|
314 | Repository.description, | |
|
315 | Repository.repo_type, | |
|
316 | Repository.repo_state, | |
|
317 | Repository.private, | |
|
318 | Repository.archived, | |
|
319 | Repository.fork, | |
|
320 | Repository.updated_on, | |
|
321 | Repository._changeset_cache, | |
|
322 | User, | |
|
323 | ) \ | |
|
324 | .filter(Repository.group_id == repo_group_id) \ | |
|
325 | .filter(or_( | |
|
326 | # generate multiple IN to fix limitation problems | |
|
327 | *in_filter_generator(Repository.repo_id, allowed_ids)) | |
|
328 | ) \ | |
|
329 | .join(User, User.user_id == Repository.user_id) \ | |
|
330 | .group_by(Repository, User) | |
|
331 | ||
|
332 | repos_data_total_filtered_count = base_q.count() | |
|
333 | ||
|
334 | sort_defined = False | |
|
335 | if order_by == 'repo_name': | |
|
336 | sort_col = func.lower(Repository.repo_name) | |
|
337 | sort_defined = True | |
|
338 | elif order_by == 'user_username': | |
|
339 | sort_col = User.username | |
|
340 | else: | |
|
341 | sort_col = getattr(Repository, order_by, None) | |
|
342 | ||
|
343 | if sort_defined or sort_col: | |
|
344 | if order_dir == 'asc': | |
|
345 | sort_col = sort_col.asc() | |
|
346 | else: | |
|
347 | sort_col = sort_col.desc() | |
|
348 | ||
|
349 | base_q = base_q.order_by(sort_col) | |
|
350 | base_q = base_q.offset(start).limit(limit) | |
|
351 | ||
|
352 | repos_list = base_q.all() | |
|
353 | ||
|
354 | repos_data = RepoModel().get_repos_as_dict( | |
|
355 | repo_list=repos_list, admin=False) | |
|
356 | ||
|
357 | data = ({ | |
|
358 | 'draw': draw, | |
|
359 | 'data': repos_data, | |
|
360 | 'recordsTotal': repos_data_total_count, | |
|
361 | 'recordsFiltered': repos_data_total_filtered_count, | |
|
362 | }) | |
|
363 | return data | |
|
364 | ||
|
284 | 365 | def _get_defaults(self, repo_name): |
|
285 | 366 | """ |
|
286 | 367 | Gets information about repository, and returns a dict for |
|
287 | 368 | usage in forms |
|
288 | 369 | |
|
289 | 370 | :param repo_name: |
|
290 | 371 | """ |
|
291 | 372 | |
|
292 | 373 | repo_info = Repository.get_by_repo_name(repo_name) |
|
293 | 374 | |
|
294 | 375 | if repo_info is None: |
|
295 | 376 | return None |
|
296 | 377 | |
|
297 | 378 | defaults = repo_info.get_dict() |
|
298 | 379 | defaults['repo_name'] = repo_info.just_name |
|
299 | 380 | |
|
300 | 381 | groups = repo_info.groups_with_parents |
|
301 | 382 | parent_group = groups[-1] if groups else None |
|
302 | 383 | |
|
303 | 384 | # we use -1 as this is how in HTML, we mark an empty group |
|
304 | 385 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) |
|
305 | 386 | |
|
306 | 387 | keys_to_process = ( |
|
307 | 388 | {'k': 'repo_type', 'strip': False}, |
|
308 | 389 | {'k': 'repo_enable_downloads', 'strip': True}, |
|
309 | 390 | {'k': 'repo_description', 'strip': True}, |
|
310 | 391 | {'k': 'repo_enable_locking', 'strip': True}, |
|
311 | 392 | {'k': 'repo_landing_rev', 'strip': True}, |
|
312 | 393 | {'k': 'clone_uri', 'strip': False}, |
|
313 | 394 | {'k': 'push_uri', 'strip': False}, |
|
314 | 395 | {'k': 'repo_private', 'strip': True}, |
|
315 | 396 | {'k': 'repo_enable_statistics', 'strip': True} |
|
316 | 397 | ) |
|
317 | 398 | |
|
318 | 399 | for item in keys_to_process: |
|
319 | 400 | attr = item['k'] |
|
320 | 401 | if item['strip']: |
|
321 | 402 | attr = remove_prefix(item['k'], 'repo_') |
|
322 | 403 | |
|
323 | 404 | val = defaults[attr] |
|
324 | 405 | if item['k'] == 'repo_landing_rev': |
|
325 | 406 | val = ':'.join(defaults[attr]) |
|
326 | 407 | defaults[item['k']] = val |
|
327 | 408 | if item['k'] == 'clone_uri': |
|
328 | 409 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
329 | 410 | if item['k'] == 'push_uri': |
|
330 | 411 | defaults['push_uri_hidden'] = repo_info.push_uri_hidden |
|
331 | 412 | |
|
332 | 413 | # fill owner |
|
333 | 414 | if repo_info.user: |
|
334 | 415 | defaults.update({'user': repo_info.user.username}) |
|
335 | 416 | else: |
|
336 | 417 | replacement_user = User.get_first_super_admin().username |
|
337 | 418 | defaults.update({'user': replacement_user}) |
|
338 | 419 | |
|
339 | 420 | return defaults |
|
340 | 421 | |
|
341 | 422 | def update(self, repo, **kwargs): |
|
342 | 423 | try: |
|
343 | 424 | cur_repo = self._get_repo(repo) |
|
344 | 425 | source_repo_name = cur_repo.repo_name |
|
345 | 426 | if 'user' in kwargs: |
|
346 | 427 | cur_repo.user = User.get_by_username(kwargs['user']) |
|
347 | 428 | |
|
348 | 429 | if 'repo_group' in kwargs: |
|
349 | 430 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
350 | 431 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
351 | 432 | |
|
352 | 433 | update_keys = [ |
|
353 | 434 | (1, 'repo_description'), |
|
354 | 435 | (1, 'repo_landing_rev'), |
|
355 | 436 | (1, 'repo_private'), |
|
356 | 437 | (1, 'repo_enable_downloads'), |
|
357 | 438 | (1, 'repo_enable_locking'), |
|
358 | 439 | (1, 'repo_enable_statistics'), |
|
359 | 440 | (0, 'clone_uri'), |
|
360 | 441 | (0, 'push_uri'), |
|
361 | 442 | (0, 'fork_id') |
|
362 | 443 | ] |
|
363 | 444 | for strip, k in update_keys: |
|
364 | 445 | if k in kwargs: |
|
365 | 446 | val = kwargs[k] |
|
366 | 447 | if strip: |
|
367 | 448 | k = remove_prefix(k, 'repo_') |
|
368 | 449 | |
|
369 | 450 | setattr(cur_repo, k, val) |
|
370 | 451 | |
|
371 | 452 | new_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
372 | 453 | cur_repo.repo_name = new_name |
|
373 | 454 | |
|
374 | 455 | # if private flag is set, reset default permission to NONE |
|
375 | 456 | if kwargs.get('repo_private'): |
|
376 | 457 | EMPTY_PERM = 'repository.none' |
|
377 | 458 | RepoModel().grant_user_permission( |
|
378 | 459 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM |
|
379 | 460 | ) |
|
380 | 461 | |
|
381 | 462 | # handle extra fields |
|
382 | 463 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs): |
|
383 | 464 | k = RepositoryField.un_prefix_key(field) |
|
384 | 465 | ex_field = RepositoryField.get_by_key_name( |
|
385 | 466 | key=k, repo=cur_repo) |
|
386 | 467 | if ex_field: |
|
387 | 468 | ex_field.field_value = kwargs[field] |
|
388 | 469 | self.sa.add(ex_field) |
|
389 | 470 | |
|
390 | 471 | self.sa.add(cur_repo) |
|
391 | 472 | |
|
392 | 473 | if source_repo_name != new_name: |
|
393 | 474 | # rename repository |
|
394 | 475 | self._rename_filesystem_repo( |
|
395 | 476 | old=source_repo_name, new=new_name) |
|
396 | 477 | |
|
397 | 478 | return cur_repo |
|
398 | 479 | except Exception: |
|
399 | 480 | log.error(traceback.format_exc()) |
|
400 | 481 | raise |
|
401 | 482 | |
|
402 | 483 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
403 | 484 | private=False, clone_uri=None, repo_group=None, |
|
404 | 485 | landing_rev='rev:tip', fork_of=None, |
|
405 | 486 | copy_fork_permissions=False, enable_statistics=False, |
|
406 | 487 | enable_locking=False, enable_downloads=False, |
|
407 | 488 | copy_group_permissions=False, |
|
408 | 489 | state=Repository.STATE_PENDING): |
|
409 | 490 | """ |
|
410 | 491 | Create repository inside database with PENDING state, this should be |
|
411 | 492 | only executed by create() repo. With exception of importing existing |
|
412 | 493 | repos |
|
413 | 494 | """ |
|
414 | 495 | from rhodecode.model.scm import ScmModel |
|
415 | 496 | |
|
416 | 497 | owner = self._get_user(owner) |
|
417 | 498 | fork_of = self._get_repo(fork_of) |
|
418 | 499 | repo_group = self._get_repo_group(safe_int(repo_group)) |
|
419 | 500 | |
|
420 | 501 | try: |
|
421 | 502 | repo_name = safe_unicode(repo_name) |
|
422 | 503 | description = safe_unicode(description) |
|
423 | 504 | # repo name is just a name of repository |
|
424 | 505 | # while repo_name_full is a full qualified name that is combined |
|
425 | 506 | # with name and path of group |
|
426 | 507 | repo_name_full = repo_name |
|
427 | 508 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] |
|
428 | 509 | |
|
429 | 510 | new_repo = Repository() |
|
430 | 511 | new_repo.repo_state = state |
|
431 | 512 | new_repo.enable_statistics = False |
|
432 | 513 | new_repo.repo_name = repo_name_full |
|
433 | 514 | new_repo.repo_type = repo_type |
|
434 | 515 | new_repo.user = owner |
|
435 | 516 | new_repo.group = repo_group |
|
436 | 517 | new_repo.description = description or repo_name |
|
437 | 518 | new_repo.private = private |
|
438 | 519 | new_repo.archived = False |
|
439 | 520 | new_repo.clone_uri = clone_uri |
|
440 | 521 | new_repo.landing_rev = landing_rev |
|
441 | 522 | |
|
442 | 523 | new_repo.enable_statistics = enable_statistics |
|
443 | 524 | new_repo.enable_locking = enable_locking |
|
444 | 525 | new_repo.enable_downloads = enable_downloads |
|
445 | 526 | |
|
446 | 527 | if repo_group: |
|
447 | 528 | new_repo.enable_locking = repo_group.enable_locking |
|
448 | 529 | |
|
449 | 530 | if fork_of: |
|
450 | 531 | parent_repo = fork_of |
|
451 | 532 | new_repo.fork = parent_repo |
|
452 | 533 | |
|
453 | 534 | events.trigger(events.RepoPreCreateEvent(new_repo)) |
|
454 | 535 | |
|
455 | 536 | self.sa.add(new_repo) |
|
456 | 537 | |
|
457 | 538 | EMPTY_PERM = 'repository.none' |
|
458 | 539 | if fork_of and copy_fork_permissions: |
|
459 | 540 | repo = fork_of |
|
460 | 541 | user_perms = UserRepoToPerm.query() \ |
|
461 | 542 | .filter(UserRepoToPerm.repository == repo).all() |
|
462 | 543 | group_perms = UserGroupRepoToPerm.query() \ |
|
463 | 544 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
464 | 545 | |
|
465 | 546 | for perm in user_perms: |
|
466 | 547 | UserRepoToPerm.create( |
|
467 | 548 | perm.user, new_repo, perm.permission) |
|
468 | 549 | |
|
469 | 550 | for perm in group_perms: |
|
470 | 551 | UserGroupRepoToPerm.create( |
|
471 | 552 | perm.users_group, new_repo, perm.permission) |
|
472 | 553 | # in case we copy permissions and also set this repo to private |
|
473 | 554 | # override the default user permission to make it a private repo |
|
474 | 555 | if private: |
|
475 | 556 | RepoModel(self.sa).grant_user_permission( |
|
476 | 557 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
477 | 558 | |
|
478 | 559 | elif repo_group and copy_group_permissions: |
|
479 | 560 | user_perms = UserRepoGroupToPerm.query() \ |
|
480 | 561 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
481 | 562 | |
|
482 | 563 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
483 | 564 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
484 | 565 | |
|
485 | 566 | for perm in user_perms: |
|
486 | 567 | perm_name = perm.permission.permission_name.replace( |
|
487 | 568 | 'group.', 'repository.') |
|
488 | 569 | perm_obj = Permission.get_by_key(perm_name) |
|
489 | 570 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
490 | 571 | |
|
491 | 572 | for perm in group_perms: |
|
492 | 573 | perm_name = perm.permission.permission_name.replace( |
|
493 | 574 | 'group.', 'repository.') |
|
494 | 575 | perm_obj = Permission.get_by_key(perm_name) |
|
495 | 576 | UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) |
|
496 | 577 | |
|
497 | 578 | if private: |
|
498 | 579 | RepoModel(self.sa).grant_user_permission( |
|
499 | 580 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
500 | 581 | |
|
501 | 582 | else: |
|
502 | 583 | perm_obj = self._create_default_perms(new_repo, private) |
|
503 | 584 | self.sa.add(perm_obj) |
|
504 | 585 | |
|
505 | 586 | # now automatically start following this repository as owner |
|
506 | 587 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id) |
|
507 | 588 | |
|
508 | 589 | # we need to flush here, in order to check if database won't |
|
509 | 590 | # throw any exceptions, create filesystem dirs at the very end |
|
510 | 591 | self.sa.flush() |
|
511 | 592 | events.trigger(events.RepoCreateEvent(new_repo)) |
|
512 | 593 | return new_repo |
|
513 | 594 | |
|
514 | 595 | except Exception: |
|
515 | 596 | log.error(traceback.format_exc()) |
|
516 | 597 | raise |
|
517 | 598 | |
|
518 | 599 | def create(self, form_data, cur_user): |
|
519 | 600 | """ |
|
520 | 601 | Create repository using celery tasks |
|
521 | 602 | |
|
522 | 603 | :param form_data: |
|
523 | 604 | :param cur_user: |
|
524 | 605 | """ |
|
525 | 606 | from rhodecode.lib.celerylib import tasks, run_task |
|
526 | 607 | return run_task(tasks.create_repo, form_data, cur_user) |
|
527 | 608 | |
|
528 | 609 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, |
|
529 | 610 | perm_deletions=None, check_perms=True, |
|
530 | 611 | cur_user=None): |
|
531 | 612 | if not perm_additions: |
|
532 | 613 | perm_additions = [] |
|
533 | 614 | if not perm_updates: |
|
534 | 615 | perm_updates = [] |
|
535 | 616 | if not perm_deletions: |
|
536 | 617 | perm_deletions = [] |
|
537 | 618 | |
|
538 | 619 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
539 | 620 | |
|
540 | 621 | changes = { |
|
541 | 622 | 'added': [], |
|
542 | 623 | 'updated': [], |
|
543 | 624 | 'deleted': [] |
|
544 | 625 | } |
|
545 | 626 | # update permissions |
|
546 | 627 | for member_id, perm, member_type in perm_updates: |
|
547 | 628 | member_id = int(member_id) |
|
548 | 629 | if member_type == 'user': |
|
549 | 630 | member_name = User.get(member_id).username |
|
550 | 631 | # this updates also current one if found |
|
551 | 632 | self.grant_user_permission( |
|
552 | 633 | repo=repo, user=member_id, perm=perm) |
|
553 | 634 | elif member_type == 'user_group': |
|
554 | 635 | # check if we have permissions to alter this usergroup |
|
555 | 636 | member_name = UserGroup.get(member_id).users_group_name |
|
556 | 637 | if not check_perms or HasUserGroupPermissionAny( |
|
557 | 638 | *req_perms)(member_name, user=cur_user): |
|
558 | 639 | self.grant_user_group_permission( |
|
559 | 640 | repo=repo, group_name=member_id, perm=perm) |
|
560 | 641 | else: |
|
561 | 642 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
562 | 643 | "got {} instead".format(member_type)) |
|
563 | 644 | changes['updated'].append({'type': member_type, 'id': member_id, |
|
564 | 645 | 'name': member_name, 'new_perm': perm}) |
|
565 | 646 | |
|
566 | 647 | # set new permissions |
|
567 | 648 | for member_id, perm, member_type in perm_additions: |
|
568 | 649 | member_id = int(member_id) |
|
569 | 650 | if member_type == 'user': |
|
570 | 651 | member_name = User.get(member_id).username |
|
571 | 652 | self.grant_user_permission( |
|
572 | 653 | repo=repo, user=member_id, perm=perm) |
|
573 | 654 | elif member_type == 'user_group': |
|
574 | 655 | # check if we have permissions to alter this usergroup |
|
575 | 656 | member_name = UserGroup.get(member_id).users_group_name |
|
576 | 657 | if not check_perms or HasUserGroupPermissionAny( |
|
577 | 658 | *req_perms)(member_name, user=cur_user): |
|
578 | 659 | self.grant_user_group_permission( |
|
579 | 660 | repo=repo, group_name=member_id, perm=perm) |
|
580 | 661 | else: |
|
581 | 662 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
582 | 663 | "got {} instead".format(member_type)) |
|
583 | 664 | |
|
584 | 665 | changes['added'].append({'type': member_type, 'id': member_id, |
|
585 | 666 | 'name': member_name, 'new_perm': perm}) |
|
586 | 667 | # delete permissions |
|
587 | 668 | for member_id, perm, member_type in perm_deletions: |
|
588 | 669 | member_id = int(member_id) |
|
589 | 670 | if member_type == 'user': |
|
590 | 671 | member_name = User.get(member_id).username |
|
591 | 672 | self.revoke_user_permission(repo=repo, user=member_id) |
|
592 | 673 | elif member_type == 'user_group': |
|
593 | 674 | # check if we have permissions to alter this usergroup |
|
594 | 675 | member_name = UserGroup.get(member_id).users_group_name |
|
595 | 676 | if not check_perms or HasUserGroupPermissionAny( |
|
596 | 677 | *req_perms)(member_name, user=cur_user): |
|
597 | 678 | self.revoke_user_group_permission( |
|
598 | 679 | repo=repo, group_name=member_id) |
|
599 | 680 | else: |
|
600 | 681 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
601 | 682 | "got {} instead".format(member_type)) |
|
602 | 683 | |
|
603 | 684 | changes['deleted'].append({'type': member_type, 'id': member_id, |
|
604 | 685 | 'name': member_name, 'new_perm': perm}) |
|
605 | 686 | return changes |
|
606 | 687 | |
|
607 | 688 | def create_fork(self, form_data, cur_user): |
|
608 | 689 | """ |
|
609 | 690 | Simple wrapper into executing celery task for fork creation |
|
610 | 691 | |
|
611 | 692 | :param form_data: |
|
612 | 693 | :param cur_user: |
|
613 | 694 | """ |
|
614 | 695 | from rhodecode.lib.celerylib import tasks, run_task |
|
615 | 696 | return run_task(tasks.create_repo_fork, form_data, cur_user) |
|
616 | 697 | |
|
617 | 698 | def archive(self, repo): |
|
618 | 699 | """ |
|
619 | 700 | Archive given repository. Set archive flag. |
|
620 | 701 | |
|
621 | 702 | :param repo: |
|
622 | 703 | """ |
|
623 | 704 | repo = self._get_repo(repo) |
|
624 | 705 | if repo: |
|
625 | 706 | |
|
626 | 707 | try: |
|
627 | 708 | repo.archived = True |
|
628 | 709 | self.sa.add(repo) |
|
629 | 710 | self.sa.commit() |
|
630 | 711 | except Exception: |
|
631 | 712 | log.error(traceback.format_exc()) |
|
632 | 713 | raise |
|
633 | 714 | |
|
634 | 715 | def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None): |
|
635 | 716 | """ |
|
636 | 717 | Delete given repository, forks parameter defines what do do with |
|
637 | 718 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
638 | 719 | forks |
|
639 | 720 | |
|
640 | 721 | :param repo: |
|
641 | 722 | :param forks: str 'delete' or 'detach' |
|
642 | 723 | :param pull_requests: str 'delete' or None |
|
643 | 724 | :param fs_remove: remove(archive) repo from filesystem |
|
644 | 725 | """ |
|
645 | 726 | if not cur_user: |
|
646 | 727 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
647 | 728 | repo = self._get_repo(repo) |
|
648 | 729 | if repo: |
|
649 | 730 | if forks == 'detach': |
|
650 | 731 | for r in repo.forks: |
|
651 | 732 | r.fork = None |
|
652 | 733 | self.sa.add(r) |
|
653 | 734 | elif forks == 'delete': |
|
654 | 735 | for r in repo.forks: |
|
655 | 736 | self.delete(r, forks='delete') |
|
656 | 737 | elif [f for f in repo.forks]: |
|
657 | 738 | raise AttachedForksError() |
|
658 | 739 | |
|
659 | 740 | # check for pull requests |
|
660 | 741 | pr_sources = repo.pull_requests_source |
|
661 | 742 | pr_targets = repo.pull_requests_target |
|
662 | 743 | if pull_requests != 'delete' and (pr_sources or pr_targets): |
|
663 | 744 | raise AttachedPullRequestsError() |
|
664 | 745 | |
|
665 | 746 | old_repo_dict = repo.get_dict() |
|
666 | 747 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
667 | 748 | try: |
|
668 | 749 | self.sa.delete(repo) |
|
669 | 750 | if fs_remove: |
|
670 | 751 | self._delete_filesystem_repo(repo) |
|
671 | 752 | else: |
|
672 | 753 | log.debug('skipping removal from filesystem') |
|
673 | 754 | old_repo_dict.update({ |
|
674 | 755 | 'deleted_by': cur_user, |
|
675 | 756 | 'deleted_on': time.time(), |
|
676 | 757 | }) |
|
677 | 758 | log_delete_repository(**old_repo_dict) |
|
678 | 759 | events.trigger(events.RepoDeleteEvent(repo)) |
|
679 | 760 | except Exception: |
|
680 | 761 | log.error(traceback.format_exc()) |
|
681 | 762 | raise |
|
682 | 763 | |
|
683 | 764 | def grant_user_permission(self, repo, user, perm): |
|
684 | 765 | """ |
|
685 | 766 | Grant permission for user on given repository, or update existing one |
|
686 | 767 | if found |
|
687 | 768 | |
|
688 | 769 | :param repo: Instance of Repository, repository_id, or repository name |
|
689 | 770 | :param user: Instance of User, user_id or username |
|
690 | 771 | :param perm: Instance of Permission, or permission_name |
|
691 | 772 | """ |
|
692 | 773 | user = self._get_user(user) |
|
693 | 774 | repo = self._get_repo(repo) |
|
694 | 775 | permission = self._get_perm(perm) |
|
695 | 776 | |
|
696 | 777 | # check if we have that permission already |
|
697 | 778 | obj = self.sa.query(UserRepoToPerm) \ |
|
698 | 779 | .filter(UserRepoToPerm.user == user) \ |
|
699 | 780 | .filter(UserRepoToPerm.repository == repo) \ |
|
700 | 781 | .scalar() |
|
701 | 782 | if obj is None: |
|
702 | 783 | # create new ! |
|
703 | 784 | obj = UserRepoToPerm() |
|
704 | 785 | obj.repository = repo |
|
705 | 786 | obj.user = user |
|
706 | 787 | obj.permission = permission |
|
707 | 788 | self.sa.add(obj) |
|
708 | 789 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
709 | 790 | action_logger_generic( |
|
710 | 791 | 'granted permission: {} to user: {} on repo: {}'.format( |
|
711 | 792 | perm, user, repo), namespace='security.repo') |
|
712 | 793 | return obj |
|
713 | 794 | |
|
714 | 795 | def revoke_user_permission(self, repo, user): |
|
715 | 796 | """ |
|
716 | 797 | Revoke permission for user on given repository |
|
717 | 798 | |
|
718 | 799 | :param repo: Instance of Repository, repository_id, or repository name |
|
719 | 800 | :param user: Instance of User, user_id or username |
|
720 | 801 | """ |
|
721 | 802 | |
|
722 | 803 | user = self._get_user(user) |
|
723 | 804 | repo = self._get_repo(repo) |
|
724 | 805 | |
|
725 | 806 | obj = self.sa.query(UserRepoToPerm) \ |
|
726 | 807 | .filter(UserRepoToPerm.repository == repo) \ |
|
727 | 808 | .filter(UserRepoToPerm.user == user) \ |
|
728 | 809 | .scalar() |
|
729 | 810 | if obj: |
|
730 | 811 | self.sa.delete(obj) |
|
731 | 812 | log.debug('Revoked perm on %s on %s', repo, user) |
|
732 | 813 | action_logger_generic( |
|
733 | 814 | 'revoked permission from user: {} on repo: {}'.format( |
|
734 | 815 | user, repo), namespace='security.repo') |
|
735 | 816 | |
|
736 | 817 | def grant_user_group_permission(self, repo, group_name, perm): |
|
737 | 818 | """ |
|
738 | 819 | Grant permission for user group on given repository, or update |
|
739 | 820 | existing one if found |
|
740 | 821 | |
|
741 | 822 | :param repo: Instance of Repository, repository_id, or repository name |
|
742 | 823 | :param group_name: Instance of UserGroup, users_group_id, |
|
743 | 824 | or user group name |
|
744 | 825 | :param perm: Instance of Permission, or permission_name |
|
745 | 826 | """ |
|
746 | 827 | repo = self._get_repo(repo) |
|
747 | 828 | group_name = self._get_user_group(group_name) |
|
748 | 829 | permission = self._get_perm(perm) |
|
749 | 830 | |
|
750 | 831 | # check if we have that permission already |
|
751 | 832 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
752 | 833 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
753 | 834 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
754 | 835 | .scalar() |
|
755 | 836 | |
|
756 | 837 | if obj is None: |
|
757 | 838 | # create new |
|
758 | 839 | obj = UserGroupRepoToPerm() |
|
759 | 840 | |
|
760 | 841 | obj.repository = repo |
|
761 | 842 | obj.users_group = group_name |
|
762 | 843 | obj.permission = permission |
|
763 | 844 | self.sa.add(obj) |
|
764 | 845 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
765 | 846 | action_logger_generic( |
|
766 | 847 | 'granted permission: {} to usergroup: {} on repo: {}'.format( |
|
767 | 848 | perm, group_name, repo), namespace='security.repo') |
|
768 | 849 | |
|
769 | 850 | return obj |
|
770 | 851 | |
|
771 | 852 | def revoke_user_group_permission(self, repo, group_name): |
|
772 | 853 | """ |
|
773 | 854 | Revoke permission for user group on given repository |
|
774 | 855 | |
|
775 | 856 | :param repo: Instance of Repository, repository_id, or repository name |
|
776 | 857 | :param group_name: Instance of UserGroup, users_group_id, |
|
777 | 858 | or user group name |
|
778 | 859 | """ |
|
779 | 860 | repo = self._get_repo(repo) |
|
780 | 861 | group_name = self._get_user_group(group_name) |
|
781 | 862 | |
|
782 | 863 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
783 | 864 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
784 | 865 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
785 | 866 | .scalar() |
|
786 | 867 | if obj: |
|
787 | 868 | self.sa.delete(obj) |
|
788 | 869 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
789 | 870 | action_logger_generic( |
|
790 | 871 | 'revoked permission from usergroup: {} on repo: {}'.format( |
|
791 | 872 | group_name, repo), namespace='security.repo') |
|
792 | 873 | |
|
793 | 874 | def delete_stats(self, repo_name): |
|
794 | 875 | """ |
|
795 | 876 | removes stats for given repo |
|
796 | 877 | |
|
797 | 878 | :param repo_name: |
|
798 | 879 | """ |
|
799 | 880 | repo = self._get_repo(repo_name) |
|
800 | 881 | try: |
|
801 | 882 | obj = self.sa.query(Statistics) \ |
|
802 | 883 | .filter(Statistics.repository == repo).scalar() |
|
803 | 884 | if obj: |
|
804 | 885 | self.sa.delete(obj) |
|
805 | 886 | except Exception: |
|
806 | 887 | log.error(traceback.format_exc()) |
|
807 | 888 | raise |
|
808 | 889 | |
|
809 | 890 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', |
|
810 | 891 | field_type='str', field_desc=''): |
|
811 | 892 | |
|
812 | 893 | repo = self._get_repo(repo_name) |
|
813 | 894 | |
|
814 | 895 | new_field = RepositoryField() |
|
815 | 896 | new_field.repository = repo |
|
816 | 897 | new_field.field_key = field_key |
|
817 | 898 | new_field.field_type = field_type # python type |
|
818 | 899 | new_field.field_value = field_value |
|
819 | 900 | new_field.field_desc = field_desc |
|
820 | 901 | new_field.field_label = field_label |
|
821 | 902 | self.sa.add(new_field) |
|
822 | 903 | return new_field |
|
823 | 904 | |
|
824 | 905 | def delete_repo_field(self, repo_name, field_key): |
|
825 | 906 | repo = self._get_repo(repo_name) |
|
826 | 907 | field = RepositoryField.get_by_key_name(field_key, repo) |
|
827 | 908 | if field: |
|
828 | 909 | self.sa.delete(field) |
|
829 | 910 | |
|
830 | 911 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
831 | 912 | clone_uri=None, repo_store_location=None, |
|
832 | 913 | use_global_config=False, install_hooks=True): |
|
833 | 914 | """ |
|
834 | 915 | makes repository on filesystem. It's group aware means it'll create |
|
835 | 916 | a repository within a group, and alter the paths accordingly of |
|
836 | 917 | group location |
|
837 | 918 | |
|
838 | 919 | :param repo_name: |
|
839 | 920 | :param alias: |
|
840 | 921 | :param parent: |
|
841 | 922 | :param clone_uri: |
|
842 | 923 | :param repo_store_location: |
|
843 | 924 | """ |
|
844 | 925 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group |
|
845 | 926 | from rhodecode.model.scm import ScmModel |
|
846 | 927 | |
|
847 | 928 | if Repository.NAME_SEP in repo_name: |
|
848 | 929 | raise ValueError( |
|
849 | 930 | 'repo_name must not contain groups got `%s`' % repo_name) |
|
850 | 931 | |
|
851 | 932 | if isinstance(repo_group, RepoGroup): |
|
852 | 933 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
853 | 934 | else: |
|
854 | 935 | new_parent_path = repo_group or '' |
|
855 | 936 | |
|
856 | 937 | if repo_store_location: |
|
857 | 938 | _paths = [repo_store_location] |
|
858 | 939 | else: |
|
859 | 940 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
860 | 941 | # we need to make it str for mercurial |
|
861 | 942 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
862 | 943 | |
|
863 | 944 | # check if this path is not a repository |
|
864 | 945 | if is_valid_repo(repo_path, self.repos_path): |
|
865 | 946 | raise Exception('This path %s is a valid repository' % repo_path) |
|
866 | 947 | |
|
867 | 948 | # check if this path is a group |
|
868 | 949 | if is_valid_repo_group(repo_path, self.repos_path): |
|
869 | 950 | raise Exception('This path %s is a valid group' % repo_path) |
|
870 | 951 | |
|
871 | 952 | log.info('creating repo %s in %s from url: `%s`', |
|
872 | 953 | repo_name, safe_unicode(repo_path), |
|
873 | 954 | obfuscate_url_pw(clone_uri)) |
|
874 | 955 | |
|
875 | 956 | backend = get_backend(repo_type) |
|
876 | 957 | |
|
877 | 958 | config_repo = None if use_global_config else repo_name |
|
878 | 959 | if config_repo and new_parent_path: |
|
879 | 960 | config_repo = Repository.NAME_SEP.join( |
|
880 | 961 | (new_parent_path, config_repo)) |
|
881 | 962 | config = make_db_config(clear_session=False, repo=config_repo) |
|
882 | 963 | config.set('extensions', 'largefiles', '') |
|
883 | 964 | |
|
884 | 965 | # patch and reset hooks section of UI config to not run any |
|
885 | 966 | # hooks on creating remote repo |
|
886 | 967 | config.clear_section('hooks') |
|
887 | 968 | |
|
888 | 969 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice |
|
889 | 970 | if repo_type == 'git': |
|
890 | 971 | repo = backend( |
|
891 | 972 | repo_path, config=config, create=True, src_url=clone_uri, bare=True, |
|
892 | 973 | with_wire={"cache": False}) |
|
893 | 974 | else: |
|
894 | 975 | repo = backend( |
|
895 | 976 | repo_path, config=config, create=True, src_url=clone_uri, |
|
896 | 977 | with_wire={"cache": False}) |
|
897 | 978 | |
|
898 | 979 | if install_hooks: |
|
899 | 980 | repo.install_hooks() |
|
900 | 981 | |
|
901 | 982 | log.debug('Created repo %s with %s backend', |
|
902 | 983 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
903 | 984 | return repo |
|
904 | 985 | |
|
905 | 986 | def _rename_filesystem_repo(self, old, new): |
|
906 | 987 | """ |
|
907 | 988 | renames repository on filesystem |
|
908 | 989 | |
|
909 | 990 | :param old: old name |
|
910 | 991 | :param new: new name |
|
911 | 992 | """ |
|
912 | 993 | log.info('renaming repo from %s to %s', old, new) |
|
913 | 994 | |
|
914 | 995 | old_path = os.path.join(self.repos_path, old) |
|
915 | 996 | new_path = os.path.join(self.repos_path, new) |
|
916 | 997 | if os.path.isdir(new_path): |
|
917 | 998 | raise Exception( |
|
918 | 999 | 'Was trying to rename to already existing dir %s' % new_path |
|
919 | 1000 | ) |
|
920 | 1001 | shutil.move(old_path, new_path) |
|
921 | 1002 | |
|
922 | 1003 | def _delete_filesystem_repo(self, repo): |
|
923 | 1004 | """ |
|
924 | 1005 | removes repo from filesystem, the removal is acctually made by |
|
925 | 1006 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this |
|
926 | 1007 | repository is no longer valid for rhodecode, can be undeleted later on |
|
927 | 1008 | by reverting the renames on this repository |
|
928 | 1009 | |
|
929 | 1010 | :param repo: repo object |
|
930 | 1011 | """ |
|
931 | 1012 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
932 | 1013 | repo_group = repo.group |
|
933 | 1014 | log.info("Removing repository %s", rm_path) |
|
934 | 1015 | # disable hg/git internal that it doesn't get detected as repo |
|
935 | 1016 | alias = repo.repo_type |
|
936 | 1017 | |
|
937 | 1018 | config = make_db_config(clear_session=False) |
|
938 | 1019 | config.set('extensions', 'largefiles', '') |
|
939 | 1020 | bare = getattr(repo.scm_instance(config=config), 'bare', False) |
|
940 | 1021 | |
|
941 | 1022 | # skip this for bare git repos |
|
942 | 1023 | if not bare: |
|
943 | 1024 | # disable VCS repo |
|
944 | 1025 | vcs_path = os.path.join(rm_path, '.%s' % alias) |
|
945 | 1026 | if os.path.exists(vcs_path): |
|
946 | 1027 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) |
|
947 | 1028 | |
|
948 | 1029 | _now = datetime.datetime.now() |
|
949 | 1030 | _ms = str(_now.microsecond).rjust(6, '0') |
|
950 | 1031 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
951 | 1032 | repo.just_name) |
|
952 | 1033 | if repo_group: |
|
953 | 1034 | # if repository is in group, prefix the removal path with the group |
|
954 | 1035 | args = repo_group.full_path_splitted + [_d] |
|
955 | 1036 | _d = os.path.join(*args) |
|
956 | 1037 | |
|
957 | 1038 | if os.path.isdir(rm_path): |
|
958 | 1039 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
959 | 1040 | |
|
960 | 1041 | # finally cleanup diff-cache if it exists |
|
961 | 1042 | cached_diffs_dir = repo.cached_diffs_dir |
|
962 | 1043 | if os.path.isdir(cached_diffs_dir): |
|
963 | 1044 | shutil.rmtree(cached_diffs_dir) |
|
964 | 1045 | |
|
965 | 1046 | |
|
966 | 1047 | class ReadmeFinder: |
|
967 | 1048 | """ |
|
968 | 1049 | Utility which knows how to find a readme for a specific commit. |
|
969 | 1050 | |
|
970 | 1051 | The main idea is that this is a configurable algorithm. When creating an |
|
971 | 1052 | instance you can define parameters, currently only the `default_renderer`. |
|
972 | 1053 | Based on this configuration the method :meth:`search` behaves slightly |
|
973 | 1054 | different. |
|
974 | 1055 | """ |
|
975 | 1056 | |
|
976 | 1057 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) |
|
977 | 1058 | path_re = re.compile(r'^docs?', re.IGNORECASE) |
|
978 | 1059 | |
|
979 | 1060 | default_priorities = { |
|
980 | 1061 | None: 0, |
|
981 | 1062 | '.text': 2, |
|
982 | 1063 | '.txt': 3, |
|
983 | 1064 | '.rst': 1, |
|
984 | 1065 | '.rest': 2, |
|
985 | 1066 | '.md': 1, |
|
986 | 1067 | '.mkdn': 2, |
|
987 | 1068 | '.mdown': 3, |
|
988 | 1069 | '.markdown': 4, |
|
989 | 1070 | } |
|
990 | 1071 | |
|
991 | 1072 | path_priority = { |
|
992 | 1073 | 'doc': 0, |
|
993 | 1074 | 'docs': 1, |
|
994 | 1075 | } |
|
995 | 1076 | |
|
996 | 1077 | FALLBACK_PRIORITY = 99 |
|
997 | 1078 | |
|
998 | 1079 | RENDERER_TO_EXTENSION = { |
|
999 | 1080 | 'rst': ['.rst', '.rest'], |
|
1000 | 1081 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], |
|
1001 | 1082 | } |
|
1002 | 1083 | |
|
1003 | 1084 | def __init__(self, default_renderer=None): |
|
1004 | 1085 | self._default_renderer = default_renderer |
|
1005 | 1086 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( |
|
1006 | 1087 | default_renderer, []) |
|
1007 | 1088 | |
|
1008 | 1089 | def search(self, commit, path='/'): |
|
1009 | 1090 | """ |
|
1010 | 1091 | Find a readme in the given `commit`. |
|
1011 | 1092 | """ |
|
1012 | 1093 | nodes = commit.get_nodes(path) |
|
1013 | 1094 | matches = self._match_readmes(nodes) |
|
1014 | 1095 | matches = self._sort_according_to_priority(matches) |
|
1015 | 1096 | if matches: |
|
1016 | 1097 | return matches[0].node |
|
1017 | 1098 | |
|
1018 | 1099 | paths = self._match_paths(nodes) |
|
1019 | 1100 | paths = self._sort_paths_according_to_priority(paths) |
|
1020 | 1101 | for path in paths: |
|
1021 | 1102 | match = self.search(commit, path=path) |
|
1022 | 1103 | if match: |
|
1023 | 1104 | return match |
|
1024 | 1105 | |
|
1025 | 1106 | return None |
|
1026 | 1107 | |
|
1027 | 1108 | def _match_readmes(self, nodes): |
|
1028 | 1109 | for node in nodes: |
|
1029 | 1110 | if not node.is_file(): |
|
1030 | 1111 | continue |
|
1031 | 1112 | path = node.path.rsplit('/', 1)[-1] |
|
1032 | 1113 | match = self.readme_re.match(path) |
|
1033 | 1114 | if match: |
|
1034 | 1115 | extension = match.group(1) |
|
1035 | 1116 | yield ReadmeMatch(node, match, self._priority(extension)) |
|
1036 | 1117 | |
|
1037 | 1118 | def _match_paths(self, nodes): |
|
1038 | 1119 | for node in nodes: |
|
1039 | 1120 | if not node.is_dir(): |
|
1040 | 1121 | continue |
|
1041 | 1122 | match = self.path_re.match(node.path) |
|
1042 | 1123 | if match: |
|
1043 | 1124 | yield node.path |
|
1044 | 1125 | |
|
1045 | 1126 | def _priority(self, extension): |
|
1046 | 1127 | renderer_priority = ( |
|
1047 | 1128 | 0 if extension in self._renderer_extensions else 1) |
|
1048 | 1129 | extension_priority = self.default_priorities.get( |
|
1049 | 1130 | extension, self.FALLBACK_PRIORITY) |
|
1050 | 1131 | return (renderer_priority, extension_priority) |
|
1051 | 1132 | |
|
1052 | 1133 | def _sort_according_to_priority(self, matches): |
|
1053 | 1134 | |
|
1054 | 1135 | def priority_and_path(match): |
|
1055 | 1136 | return (match.priority, match.path) |
|
1056 | 1137 | |
|
1057 | 1138 | return sorted(matches, key=priority_and_path) |
|
1058 | 1139 | |
|
1059 | 1140 | def _sort_paths_according_to_priority(self, paths): |
|
1060 | 1141 | |
|
1061 | 1142 | def priority_and_path(path): |
|
1062 | 1143 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) |
|
1063 | 1144 | |
|
1064 | 1145 | return sorted(paths, key=priority_and_path) |
|
1065 | 1146 | |
|
1066 | 1147 | |
|
1067 | 1148 | class ReadmeMatch: |
|
1068 | 1149 | |
|
1069 | 1150 | def __init__(self, node, match, priority): |
|
1070 | 1151 | self.node = node |
|
1071 | 1152 | self._match = match |
|
1072 | 1153 | self.priority = priority |
|
1073 | 1154 | |
|
1074 | 1155 | @property |
|
1075 | 1156 | def path(self): |
|
1076 | 1157 | return self.node.path |
|
1077 | 1158 | |
|
1078 | 1159 | def __repr__(self): |
|
1079 | 1160 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
@@ -1,799 +1,877 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | repo group model for RhodeCode |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import datetime |
|
28 | 28 | import itertools |
|
29 | 29 | import logging |
|
30 | 30 | import shutil |
|
31 | 31 | import time |
|
32 | 32 | import traceback |
|
33 | 33 | import string |
|
34 | 34 | |
|
35 | 35 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
36 | 36 | |
|
37 | 37 | from rhodecode import events |
|
38 | 38 | from rhodecode.model import BaseModel |
|
39 | 39 | from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator, |
|
40 | 40 | Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm, |
|
41 | 41 | UserGroup, Repository) |
|
42 | 42 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel |
|
43 | 43 | from rhodecode.lib.caching_query import FromCache |
|
44 | 44 | from rhodecode.lib.utils2 import action_logger_generic, datetime_to_time |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class RepoGroupModel(BaseModel): |
|
50 | 50 | |
|
51 | 51 | cls = RepoGroup |
|
52 | 52 | PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`' |
|
53 | 53 | PERSONAL_GROUP_PATTERN = '${username}' # default |
|
54 | 54 | |
|
55 | 55 | def _get_user_group(self, users_group): |
|
56 | 56 | return self._get_instance(UserGroup, users_group, |
|
57 | 57 | callback=UserGroup.get_by_group_name) |
|
58 | 58 | |
|
59 | 59 | def _get_repo_group(self, repo_group): |
|
60 | 60 | return self._get_instance(RepoGroup, repo_group, |
|
61 | 61 | callback=RepoGroup.get_by_group_name) |
|
62 | 62 | |
|
63 | 63 | @LazyProperty |
|
64 | 64 | def repos_path(self): |
|
65 | 65 | """ |
|
66 | 66 | Gets the repositories root path from database |
|
67 | 67 | """ |
|
68 | 68 | |
|
69 | 69 | settings_model = VcsSettingsModel(sa=self.sa) |
|
70 | 70 | return settings_model.get_repos_location() |
|
71 | 71 | |
|
72 | 72 | def get_by_group_name(self, repo_group_name, cache=None): |
|
73 | 73 | repo = self.sa.query(RepoGroup) \ |
|
74 | 74 | .filter(RepoGroup.group_name == repo_group_name) |
|
75 | 75 | |
|
76 | 76 | if cache: |
|
77 | 77 | name_key = _hash_key(repo_group_name) |
|
78 | 78 | repo = repo.options( |
|
79 | 79 | FromCache("sql_cache_short", "get_repo_group_%s" % name_key)) |
|
80 | 80 | return repo.scalar() |
|
81 | 81 | |
|
82 | 82 | def get_default_create_personal_repo_group(self): |
|
83 | 83 | value = SettingsModel().get_setting_by_name( |
|
84 | 84 | 'create_personal_repo_group') |
|
85 | 85 | return value.app_settings_value if value else None or False |
|
86 | 86 | |
|
87 | 87 | def get_personal_group_name_pattern(self): |
|
88 | 88 | value = SettingsModel().get_setting_by_name( |
|
89 | 89 | 'personal_repo_group_pattern') |
|
90 | 90 | val = value.app_settings_value if value else None |
|
91 | 91 | group_template = val or self.PERSONAL_GROUP_PATTERN |
|
92 | 92 | |
|
93 | 93 | group_template = group_template.lstrip('/') |
|
94 | 94 | return group_template |
|
95 | 95 | |
|
96 | 96 | def get_personal_group_name(self, user): |
|
97 | 97 | template = self.get_personal_group_name_pattern() |
|
98 | 98 | return string.Template(template).safe_substitute( |
|
99 | 99 | username=user.username, |
|
100 | 100 | user_id=user.user_id, |
|
101 | 101 | first_name=user.first_name, |
|
102 | 102 | last_name=user.last_name, |
|
103 | 103 | ) |
|
104 | 104 | |
|
105 | 105 | def create_personal_repo_group(self, user, commit_early=True): |
|
106 | 106 | desc = self.PERSONAL_GROUP_DESC % {'username': user.username} |
|
107 | 107 | personal_repo_group_name = self.get_personal_group_name(user) |
|
108 | 108 | |
|
109 | 109 | # create a new one |
|
110 | 110 | RepoGroupModel().create( |
|
111 | 111 | group_name=personal_repo_group_name, |
|
112 | 112 | group_description=desc, |
|
113 | 113 | owner=user.username, |
|
114 | 114 | personal=True, |
|
115 | 115 | commit_early=commit_early) |
|
116 | 116 | |
|
117 | 117 | def _create_default_perms(self, new_group): |
|
118 | 118 | # create default permission |
|
119 | 119 | default_perm = 'group.read' |
|
120 | 120 | def_user = User.get_default_user() |
|
121 | 121 | for p in def_user.user_perms: |
|
122 | 122 | if p.permission.permission_name.startswith('group.'): |
|
123 | 123 | default_perm = p.permission.permission_name |
|
124 | 124 | break |
|
125 | 125 | |
|
126 | 126 | repo_group_to_perm = UserRepoGroupToPerm() |
|
127 | 127 | repo_group_to_perm.permission = Permission.get_by_key(default_perm) |
|
128 | 128 | |
|
129 | 129 | repo_group_to_perm.group = new_group |
|
130 | 130 | repo_group_to_perm.user_id = def_user.user_id |
|
131 | 131 | return repo_group_to_perm |
|
132 | 132 | |
|
133 | 133 | def _get_group_name_and_parent(self, group_name_full, repo_in_path=False, |
|
134 | 134 | get_object=False): |
|
135 | 135 | """ |
|
136 | 136 | Get's the group name and a parent group name from given group name. |
|
137 | 137 | If repo_in_path is set to truth, we asume the full path also includes |
|
138 | 138 | repo name, in such case we clean the last element. |
|
139 | 139 | |
|
140 | 140 | :param group_name_full: |
|
141 | 141 | """ |
|
142 | 142 | split_paths = 1 |
|
143 | 143 | if repo_in_path: |
|
144 | 144 | split_paths = 2 |
|
145 | 145 | _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths) |
|
146 | 146 | |
|
147 | 147 | if repo_in_path and len(_parts) > 1: |
|
148 | 148 | # such case last element is the repo_name |
|
149 | 149 | _parts.pop(-1) |
|
150 | 150 | group_name_cleaned = _parts[-1] # just the group name |
|
151 | 151 | parent_repo_group_name = None |
|
152 | 152 | |
|
153 | 153 | if len(_parts) > 1: |
|
154 | 154 | parent_repo_group_name = _parts[0] |
|
155 | 155 | |
|
156 | 156 | parent_group = None |
|
157 | 157 | if parent_repo_group_name: |
|
158 | 158 | parent_group = RepoGroup.get_by_group_name(parent_repo_group_name) |
|
159 | 159 | |
|
160 | 160 | if get_object: |
|
161 | 161 | return group_name_cleaned, parent_repo_group_name, parent_group |
|
162 | 162 | |
|
163 | 163 | return group_name_cleaned, parent_repo_group_name |
|
164 | 164 | |
|
165 | 165 | def check_exist_filesystem(self, group_name, exc_on_failure=True): |
|
166 | 166 | create_path = os.path.join(self.repos_path, group_name) |
|
167 | 167 | log.debug('creating new group in %s', create_path) |
|
168 | 168 | |
|
169 | 169 | if os.path.isdir(create_path): |
|
170 | 170 | if exc_on_failure: |
|
171 | 171 | abs_create_path = os.path.abspath(create_path) |
|
172 | 172 | raise Exception('Directory `{}` already exists !'.format(abs_create_path)) |
|
173 | 173 | return False |
|
174 | 174 | return True |
|
175 | 175 | |
|
176 | 176 | def _create_group(self, group_name): |
|
177 | 177 | """ |
|
178 | 178 | makes repository group on filesystem |
|
179 | 179 | |
|
180 | 180 | :param repo_name: |
|
181 | 181 | :param parent_id: |
|
182 | 182 | """ |
|
183 | 183 | |
|
184 | 184 | self.check_exist_filesystem(group_name) |
|
185 | 185 | create_path = os.path.join(self.repos_path, group_name) |
|
186 | 186 | log.debug('creating new group in %s', create_path) |
|
187 | 187 | os.makedirs(create_path, mode=0o755) |
|
188 | 188 | log.debug('created group in %s', create_path) |
|
189 | 189 | |
|
190 | 190 | def _rename_group(self, old, new): |
|
191 | 191 | """ |
|
192 | 192 | Renames a group on filesystem |
|
193 | 193 | |
|
194 | 194 | :param group_name: |
|
195 | 195 | """ |
|
196 | 196 | |
|
197 | 197 | if old == new: |
|
198 | 198 | log.debug('skipping group rename') |
|
199 | 199 | return |
|
200 | 200 | |
|
201 | 201 | log.debug('renaming repository group from %s to %s', old, new) |
|
202 | 202 | |
|
203 | 203 | old_path = os.path.join(self.repos_path, old) |
|
204 | 204 | new_path = os.path.join(self.repos_path, new) |
|
205 | 205 | |
|
206 | 206 | log.debug('renaming repos paths from %s to %s', old_path, new_path) |
|
207 | 207 | |
|
208 | 208 | if os.path.isdir(new_path): |
|
209 | 209 | raise Exception('Was trying to rename to already ' |
|
210 | 210 | 'existing dir %s' % new_path) |
|
211 | 211 | shutil.move(old_path, new_path) |
|
212 | 212 | |
|
213 | 213 | def _delete_filesystem_group(self, group, force_delete=False): |
|
214 | 214 | """ |
|
215 | 215 | Deletes a group from a filesystem |
|
216 | 216 | |
|
217 | 217 | :param group: instance of group from database |
|
218 | 218 | :param force_delete: use shutil rmtree to remove all objects |
|
219 | 219 | """ |
|
220 | 220 | paths = group.full_path.split(RepoGroup.url_sep()) |
|
221 | 221 | paths = os.sep.join(paths) |
|
222 | 222 | |
|
223 | 223 | rm_path = os.path.join(self.repos_path, paths) |
|
224 | 224 | log.info("Removing group %s", rm_path) |
|
225 | 225 | # delete only if that path really exists |
|
226 | 226 | if os.path.isdir(rm_path): |
|
227 | 227 | if force_delete: |
|
228 | 228 | shutil.rmtree(rm_path) |
|
229 | 229 | else: |
|
230 | 230 | # archive that group` |
|
231 | 231 | _now = datetime.datetime.now() |
|
232 | 232 | _ms = str(_now.microsecond).rjust(6, '0') |
|
233 | 233 | _d = 'rm__%s_GROUP_%s' % ( |
|
234 | 234 | _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name) |
|
235 | 235 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
236 | 236 | |
|
237 | 237 | def create(self, group_name, group_description, owner, just_db=False, |
|
238 | 238 | copy_permissions=False, personal=None, commit_early=True): |
|
239 | 239 | |
|
240 | 240 | (group_name_cleaned, |
|
241 | 241 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name) |
|
242 | 242 | |
|
243 | 243 | parent_group = None |
|
244 | 244 | if parent_group_name: |
|
245 | 245 | parent_group = self._get_repo_group(parent_group_name) |
|
246 | 246 | if not parent_group: |
|
247 | 247 | # we tried to create a nested group, but the parent is not |
|
248 | 248 | # existing |
|
249 | 249 | raise ValueError( |
|
250 | 250 | 'Parent group `%s` given in `%s` group name ' |
|
251 | 251 | 'is not yet existing.' % (parent_group_name, group_name)) |
|
252 | 252 | |
|
253 | 253 | # because we are doing a cleanup, we need to check if such directory |
|
254 | 254 | # already exists. If we don't do that we can accidentally delete |
|
255 | 255 | # existing directory via cleanup that can cause data issues, since |
|
256 | 256 | # delete does a folder rename to special syntax later cleanup |
|
257 | 257 | # functions can delete this |
|
258 | 258 | cleanup_group = self.check_exist_filesystem(group_name, |
|
259 | 259 | exc_on_failure=False) |
|
260 | 260 | user = self._get_user(owner) |
|
261 | 261 | if not user: |
|
262 | 262 | raise ValueError('Owner %s not found as rhodecode user', owner) |
|
263 | 263 | |
|
264 | 264 | try: |
|
265 | 265 | new_repo_group = RepoGroup() |
|
266 | 266 | new_repo_group.user = user |
|
267 | 267 | new_repo_group.group_description = group_description or group_name |
|
268 | 268 | new_repo_group.parent_group = parent_group |
|
269 | 269 | new_repo_group.group_name = group_name |
|
270 | 270 | new_repo_group.personal = personal |
|
271 | 271 | |
|
272 | 272 | self.sa.add(new_repo_group) |
|
273 | 273 | |
|
274 | 274 | # create an ADMIN permission for owner except if we're super admin, |
|
275 | 275 | # later owner should go into the owner field of groups |
|
276 | 276 | if not user.is_admin: |
|
277 | 277 | self.grant_user_permission(repo_group=new_repo_group, |
|
278 | 278 | user=owner, perm='group.admin') |
|
279 | 279 | |
|
280 | 280 | if parent_group and copy_permissions: |
|
281 | 281 | # copy permissions from parent |
|
282 | 282 | user_perms = UserRepoGroupToPerm.query() \ |
|
283 | 283 | .filter(UserRepoGroupToPerm.group == parent_group).all() |
|
284 | 284 | |
|
285 | 285 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
286 | 286 | .filter(UserGroupRepoGroupToPerm.group == parent_group).all() |
|
287 | 287 | |
|
288 | 288 | for perm in user_perms: |
|
289 | 289 | # don't copy over the permission for user who is creating |
|
290 | 290 | # this group, if he is not super admin he get's admin |
|
291 | 291 | # permission set above |
|
292 | 292 | if perm.user != user or user.is_admin: |
|
293 | 293 | UserRepoGroupToPerm.create( |
|
294 | 294 | perm.user, new_repo_group, perm.permission) |
|
295 | 295 | |
|
296 | 296 | for perm in group_perms: |
|
297 | 297 | UserGroupRepoGroupToPerm.create( |
|
298 | 298 | perm.users_group, new_repo_group, perm.permission) |
|
299 | 299 | else: |
|
300 | 300 | perm_obj = self._create_default_perms(new_repo_group) |
|
301 | 301 | self.sa.add(perm_obj) |
|
302 | 302 | |
|
303 | 303 | # now commit the changes, earlier so we are sure everything is in |
|
304 | 304 | # the database. |
|
305 | 305 | if commit_early: |
|
306 | 306 | self.sa.commit() |
|
307 | 307 | if not just_db: |
|
308 | 308 | self._create_group(new_repo_group.group_name) |
|
309 | 309 | |
|
310 | 310 | # trigger the post hook |
|
311 | 311 | from rhodecode.lib.hooks_base import log_create_repository_group |
|
312 | 312 | repo_group = RepoGroup.get_by_group_name(group_name) |
|
313 | 313 | |
|
314 | 314 | # update repo group commit caches initially |
|
315 | 315 | repo_group.update_commit_cache() |
|
316 | 316 | |
|
317 | 317 | log_create_repository_group( |
|
318 | 318 | created_by=user.username, **repo_group.get_dict()) |
|
319 | 319 | |
|
320 | 320 | # Trigger create event. |
|
321 | 321 | events.trigger(events.RepoGroupCreateEvent(repo_group)) |
|
322 | 322 | |
|
323 | 323 | return new_repo_group |
|
324 | 324 | except Exception: |
|
325 | 325 | self.sa.rollback() |
|
326 | 326 | log.exception('Exception occurred when creating repository group, ' |
|
327 | 327 | 'doing cleanup...') |
|
328 | 328 | # rollback things manually ! |
|
329 | 329 | repo_group = RepoGroup.get_by_group_name(group_name) |
|
330 | 330 | if repo_group: |
|
331 | 331 | RepoGroup.delete(repo_group.group_id) |
|
332 | 332 | self.sa.commit() |
|
333 | 333 | if cleanup_group: |
|
334 | 334 | RepoGroupModel()._delete_filesystem_group(repo_group) |
|
335 | 335 | raise |
|
336 | 336 | |
|
337 | 337 | def update_permissions( |
|
338 | 338 | self, repo_group, perm_additions=None, perm_updates=None, |
|
339 | 339 | perm_deletions=None, recursive=None, check_perms=True, |
|
340 | 340 | cur_user=None): |
|
341 | 341 | from rhodecode.model.repo import RepoModel |
|
342 | 342 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
343 | 343 | |
|
344 | 344 | if not perm_additions: |
|
345 | 345 | perm_additions = [] |
|
346 | 346 | if not perm_updates: |
|
347 | 347 | perm_updates = [] |
|
348 | 348 | if not perm_deletions: |
|
349 | 349 | perm_deletions = [] |
|
350 | 350 | |
|
351 | 351 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
352 | 352 | |
|
353 | 353 | changes = { |
|
354 | 354 | 'added': [], |
|
355 | 355 | 'updated': [], |
|
356 | 356 | 'deleted': [] |
|
357 | 357 | } |
|
358 | 358 | |
|
359 | 359 | def _set_perm_user(obj, user, perm): |
|
360 | 360 | if isinstance(obj, RepoGroup): |
|
361 | 361 | self.grant_user_permission( |
|
362 | 362 | repo_group=obj, user=user, perm=perm) |
|
363 | 363 | elif isinstance(obj, Repository): |
|
364 | 364 | # private repos will not allow to change the default |
|
365 | 365 | # permissions using recursive mode |
|
366 | 366 | if obj.private and user == User.DEFAULT_USER: |
|
367 | 367 | return |
|
368 | 368 | |
|
369 | 369 | # we set group permission but we have to switch to repo |
|
370 | 370 | # permission |
|
371 | 371 | perm = perm.replace('group.', 'repository.') |
|
372 | 372 | RepoModel().grant_user_permission( |
|
373 | 373 | repo=obj, user=user, perm=perm) |
|
374 | 374 | |
|
375 | 375 | def _set_perm_group(obj, users_group, perm): |
|
376 | 376 | if isinstance(obj, RepoGroup): |
|
377 | 377 | self.grant_user_group_permission( |
|
378 | 378 | repo_group=obj, group_name=users_group, perm=perm) |
|
379 | 379 | elif isinstance(obj, Repository): |
|
380 | 380 | # we set group permission but we have to switch to repo |
|
381 | 381 | # permission |
|
382 | 382 | perm = perm.replace('group.', 'repository.') |
|
383 | 383 | RepoModel().grant_user_group_permission( |
|
384 | 384 | repo=obj, group_name=users_group, perm=perm) |
|
385 | 385 | |
|
386 | 386 | def _revoke_perm_user(obj, user): |
|
387 | 387 | if isinstance(obj, RepoGroup): |
|
388 | 388 | self.revoke_user_permission(repo_group=obj, user=user) |
|
389 | 389 | elif isinstance(obj, Repository): |
|
390 | 390 | RepoModel().revoke_user_permission(repo=obj, user=user) |
|
391 | 391 | |
|
392 | 392 | def _revoke_perm_group(obj, user_group): |
|
393 | 393 | if isinstance(obj, RepoGroup): |
|
394 | 394 | self.revoke_user_group_permission( |
|
395 | 395 | repo_group=obj, group_name=user_group) |
|
396 | 396 | elif isinstance(obj, Repository): |
|
397 | 397 | RepoModel().revoke_user_group_permission( |
|
398 | 398 | repo=obj, group_name=user_group) |
|
399 | 399 | |
|
400 | 400 | # start updates |
|
401 | 401 | log.debug('Now updating permissions for %s in recursive mode:%s', |
|
402 | 402 | repo_group, recursive) |
|
403 | 403 | |
|
404 | 404 | # initialize check function, we'll call that multiple times |
|
405 | 405 | has_group_perm = HasUserGroupPermissionAny(*req_perms) |
|
406 | 406 | |
|
407 | 407 | for obj in repo_group.recursive_groups_and_repos(): |
|
408 | 408 | # iterated obj is an instance of a repos group or repository in |
|
409 | 409 | # that group, recursive option can be: none, repos, groups, all |
|
410 | 410 | if recursive == 'all': |
|
411 | 411 | obj = obj |
|
412 | 412 | elif recursive == 'repos': |
|
413 | 413 | # skip groups, other than this one |
|
414 | 414 | if isinstance(obj, RepoGroup) and not obj == repo_group: |
|
415 | 415 | continue |
|
416 | 416 | elif recursive == 'groups': |
|
417 | 417 | # skip repos |
|
418 | 418 | if isinstance(obj, Repository): |
|
419 | 419 | continue |
|
420 | 420 | else: # recursive == 'none': |
|
421 | 421 | # DEFAULT option - don't apply to iterated objects |
|
422 | 422 | # also we do a break at the end of this loop. if we are not |
|
423 | 423 | # in recursive mode |
|
424 | 424 | obj = repo_group |
|
425 | 425 | |
|
426 | 426 | change_obj = obj.get_api_data() |
|
427 | 427 | |
|
428 | 428 | # update permissions |
|
429 | 429 | for member_id, perm, member_type in perm_updates: |
|
430 | 430 | member_id = int(member_id) |
|
431 | 431 | if member_type == 'user': |
|
432 | 432 | member_name = User.get(member_id).username |
|
433 | 433 | # this updates also current one if found |
|
434 | 434 | _set_perm_user(obj, user=member_id, perm=perm) |
|
435 | 435 | elif member_type == 'user_group': |
|
436 | 436 | member_name = UserGroup.get(member_id).users_group_name |
|
437 | 437 | if not check_perms or has_group_perm(member_name, |
|
438 | 438 | user=cur_user): |
|
439 | 439 | _set_perm_group(obj, users_group=member_id, perm=perm) |
|
440 | 440 | else: |
|
441 | 441 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
442 | 442 | "got {} instead".format(member_type)) |
|
443 | 443 | |
|
444 | 444 | changes['updated'].append( |
|
445 | 445 | {'change_obj': change_obj, 'type': member_type, |
|
446 | 446 | 'id': member_id, 'name': member_name, 'new_perm': perm}) |
|
447 | 447 | |
|
448 | 448 | # set new permissions |
|
449 | 449 | for member_id, perm, member_type in perm_additions: |
|
450 | 450 | member_id = int(member_id) |
|
451 | 451 | if member_type == 'user': |
|
452 | 452 | member_name = User.get(member_id).username |
|
453 | 453 | _set_perm_user(obj, user=member_id, perm=perm) |
|
454 | 454 | elif member_type == 'user_group': |
|
455 | 455 | # check if we have permissions to alter this usergroup |
|
456 | 456 | member_name = UserGroup.get(member_id).users_group_name |
|
457 | 457 | if not check_perms or has_group_perm(member_name, |
|
458 | 458 | user=cur_user): |
|
459 | 459 | _set_perm_group(obj, users_group=member_id, perm=perm) |
|
460 | 460 | else: |
|
461 | 461 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
462 | 462 | "got {} instead".format(member_type)) |
|
463 | 463 | |
|
464 | 464 | changes['added'].append( |
|
465 | 465 | {'change_obj': change_obj, 'type': member_type, |
|
466 | 466 | 'id': member_id, 'name': member_name, 'new_perm': perm}) |
|
467 | 467 | |
|
468 | 468 | # delete permissions |
|
469 | 469 | for member_id, perm, member_type in perm_deletions: |
|
470 | 470 | member_id = int(member_id) |
|
471 | 471 | if member_type == 'user': |
|
472 | 472 | member_name = User.get(member_id).username |
|
473 | 473 | _revoke_perm_user(obj, user=member_id) |
|
474 | 474 | elif member_type == 'user_group': |
|
475 | 475 | # check if we have permissions to alter this usergroup |
|
476 | 476 | member_name = UserGroup.get(member_id).users_group_name |
|
477 | 477 | if not check_perms or has_group_perm(member_name, |
|
478 | 478 | user=cur_user): |
|
479 | 479 | _revoke_perm_group(obj, user_group=member_id) |
|
480 | 480 | else: |
|
481 | 481 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
482 | 482 | "got {} instead".format(member_type)) |
|
483 | 483 | |
|
484 | 484 | changes['deleted'].append( |
|
485 | 485 | {'change_obj': change_obj, 'type': member_type, |
|
486 | 486 | 'id': member_id, 'name': member_name, 'new_perm': perm}) |
|
487 | 487 | |
|
488 | 488 | # if it's not recursive call for all,repos,groups |
|
489 | 489 | # break the loop and don't proceed with other changes |
|
490 | 490 | if recursive not in ['all', 'repos', 'groups']: |
|
491 | 491 | break |
|
492 | 492 | |
|
493 | 493 | return changes |
|
494 | 494 | |
|
495 | 495 | def update(self, repo_group, form_data): |
|
496 | 496 | try: |
|
497 | 497 | repo_group = self._get_repo_group(repo_group) |
|
498 | 498 | old_path = repo_group.full_path |
|
499 | 499 | |
|
500 | 500 | # change properties |
|
501 | 501 | if 'group_description' in form_data: |
|
502 | 502 | repo_group.group_description = form_data['group_description'] |
|
503 | 503 | |
|
504 | 504 | if 'enable_locking' in form_data: |
|
505 | 505 | repo_group.enable_locking = form_data['enable_locking'] |
|
506 | 506 | |
|
507 | 507 | if 'group_parent_id' in form_data: |
|
508 | 508 | parent_group = ( |
|
509 | 509 | self._get_repo_group(form_data['group_parent_id'])) |
|
510 | 510 | repo_group.group_parent_id = ( |
|
511 | 511 | parent_group.group_id if parent_group else None) |
|
512 | 512 | repo_group.parent_group = parent_group |
|
513 | 513 | |
|
514 | 514 | # mikhail: to update the full_path, we have to explicitly |
|
515 | 515 | # update group_name |
|
516 | 516 | group_name = form_data.get('group_name', repo_group.name) |
|
517 | 517 | repo_group.group_name = repo_group.get_new_name(group_name) |
|
518 | 518 | |
|
519 | 519 | new_path = repo_group.full_path |
|
520 | 520 | |
|
521 | 521 | if 'user' in form_data: |
|
522 | 522 | repo_group.user = User.get_by_username(form_data['user']) |
|
523 | 523 | |
|
524 | 524 | self.sa.add(repo_group) |
|
525 | 525 | |
|
526 | 526 | # iterate over all members of this groups and do fixes |
|
527 | 527 | # set locking if given |
|
528 | 528 | # if obj is a repoGroup also fix the name of the group according |
|
529 | 529 | # to the parent |
|
530 | 530 | # if obj is a Repo fix it's name |
|
531 | 531 | # this can be potentially heavy operation |
|
532 | 532 | for obj in repo_group.recursive_groups_and_repos(): |
|
533 | 533 | # set the value from it's parent |
|
534 | 534 | obj.enable_locking = repo_group.enable_locking |
|
535 | 535 | if isinstance(obj, RepoGroup): |
|
536 | 536 | new_name = obj.get_new_name(obj.name) |
|
537 | 537 | log.debug('Fixing group %s to new name %s', |
|
538 | 538 | obj.group_name, new_name) |
|
539 | 539 | obj.group_name = new_name |
|
540 | 540 | |
|
541 | 541 | elif isinstance(obj, Repository): |
|
542 | 542 | # we need to get all repositories from this new group and |
|
543 | 543 | # rename them accordingly to new group path |
|
544 | 544 | new_name = obj.get_new_name(obj.just_name) |
|
545 | 545 | log.debug('Fixing repo %s to new name %s', |
|
546 | 546 | obj.repo_name, new_name) |
|
547 | 547 | obj.repo_name = new_name |
|
548 | 548 | |
|
549 | 549 | self.sa.add(obj) |
|
550 | 550 | |
|
551 | 551 | self._rename_group(old_path, new_path) |
|
552 | 552 | |
|
553 | 553 | # Trigger update event. |
|
554 | 554 | events.trigger(events.RepoGroupUpdateEvent(repo_group)) |
|
555 | 555 | |
|
556 | 556 | return repo_group |
|
557 | 557 | except Exception: |
|
558 | 558 | log.error(traceback.format_exc()) |
|
559 | 559 | raise |
|
560 | 560 | |
|
561 | 561 | def delete(self, repo_group, force_delete=False, fs_remove=True): |
|
562 | 562 | repo_group = self._get_repo_group(repo_group) |
|
563 | 563 | if not repo_group: |
|
564 | 564 | return False |
|
565 | 565 | try: |
|
566 | 566 | self.sa.delete(repo_group) |
|
567 | 567 | if fs_remove: |
|
568 | 568 | self._delete_filesystem_group(repo_group, force_delete) |
|
569 | 569 | else: |
|
570 | 570 | log.debug('skipping removal from filesystem') |
|
571 | 571 | |
|
572 | 572 | # Trigger delete event. |
|
573 | 573 | events.trigger(events.RepoGroupDeleteEvent(repo_group)) |
|
574 | 574 | return True |
|
575 | 575 | |
|
576 | 576 | except Exception: |
|
577 | 577 | log.error('Error removing repo_group %s', repo_group) |
|
578 | 578 | raise |
|
579 | 579 | |
|
580 | 580 | def grant_user_permission(self, repo_group, user, perm): |
|
581 | 581 | """ |
|
582 | 582 | Grant permission for user on given repository group, or update |
|
583 | 583 | existing one if found |
|
584 | 584 | |
|
585 | 585 | :param repo_group: Instance of RepoGroup, repositories_group_id, |
|
586 | 586 | or repositories_group name |
|
587 | 587 | :param user: Instance of User, user_id or username |
|
588 | 588 | :param perm: Instance of Permission, or permission_name |
|
589 | 589 | """ |
|
590 | 590 | |
|
591 | 591 | repo_group = self._get_repo_group(repo_group) |
|
592 | 592 | user = self._get_user(user) |
|
593 | 593 | permission = self._get_perm(perm) |
|
594 | 594 | |
|
595 | 595 | # check if we have that permission already |
|
596 | 596 | obj = self.sa.query(UserRepoGroupToPerm)\ |
|
597 | 597 | .filter(UserRepoGroupToPerm.user == user)\ |
|
598 | 598 | .filter(UserRepoGroupToPerm.group == repo_group)\ |
|
599 | 599 | .scalar() |
|
600 | 600 | if obj is None: |
|
601 | 601 | # create new ! |
|
602 | 602 | obj = UserRepoGroupToPerm() |
|
603 | 603 | obj.group = repo_group |
|
604 | 604 | obj.user = user |
|
605 | 605 | obj.permission = permission |
|
606 | 606 | self.sa.add(obj) |
|
607 | 607 | log.debug('Granted perm %s to %s on %s', perm, user, repo_group) |
|
608 | 608 | action_logger_generic( |
|
609 | 609 | 'granted permission: {} to user: {} on repogroup: {}'.format( |
|
610 | 610 | perm, user, repo_group), namespace='security.repogroup') |
|
611 | 611 | return obj |
|
612 | 612 | |
|
613 | 613 | def revoke_user_permission(self, repo_group, user): |
|
614 | 614 | """ |
|
615 | 615 | Revoke permission for user on given repository group |
|
616 | 616 | |
|
617 | 617 | :param repo_group: Instance of RepoGroup, repositories_group_id, |
|
618 | 618 | or repositories_group name |
|
619 | 619 | :param user: Instance of User, user_id or username |
|
620 | 620 | """ |
|
621 | 621 | |
|
622 | 622 | repo_group = self._get_repo_group(repo_group) |
|
623 | 623 | user = self._get_user(user) |
|
624 | 624 | |
|
625 | 625 | obj = self.sa.query(UserRepoGroupToPerm)\ |
|
626 | 626 | .filter(UserRepoGroupToPerm.user == user)\ |
|
627 | 627 | .filter(UserRepoGroupToPerm.group == repo_group)\ |
|
628 | 628 | .scalar() |
|
629 | 629 | if obj: |
|
630 | 630 | self.sa.delete(obj) |
|
631 | 631 | log.debug('Revoked perm on %s on %s', repo_group, user) |
|
632 | 632 | action_logger_generic( |
|
633 | 633 | 'revoked permission from user: {} on repogroup: {}'.format( |
|
634 | 634 | user, repo_group), namespace='security.repogroup') |
|
635 | 635 | |
|
636 | 636 | def grant_user_group_permission(self, repo_group, group_name, perm): |
|
637 | 637 | """ |
|
638 | 638 | Grant permission for user group on given repository group, or update |
|
639 | 639 | existing one if found |
|
640 | 640 | |
|
641 | 641 | :param repo_group: Instance of RepoGroup, repositories_group_id, |
|
642 | 642 | or repositories_group name |
|
643 | 643 | :param group_name: Instance of UserGroup, users_group_id, |
|
644 | 644 | or user group name |
|
645 | 645 | :param perm: Instance of Permission, or permission_name |
|
646 | 646 | """ |
|
647 | 647 | repo_group = self._get_repo_group(repo_group) |
|
648 | 648 | group_name = self._get_user_group(group_name) |
|
649 | 649 | permission = self._get_perm(perm) |
|
650 | 650 | |
|
651 | 651 | # check if we have that permission already |
|
652 | 652 | obj = self.sa.query(UserGroupRepoGroupToPerm)\ |
|
653 | 653 | .filter(UserGroupRepoGroupToPerm.group == repo_group)\ |
|
654 | 654 | .filter(UserGroupRepoGroupToPerm.users_group == group_name)\ |
|
655 | 655 | .scalar() |
|
656 | 656 | |
|
657 | 657 | if obj is None: |
|
658 | 658 | # create new |
|
659 | 659 | obj = UserGroupRepoGroupToPerm() |
|
660 | 660 | |
|
661 | 661 | obj.group = repo_group |
|
662 | 662 | obj.users_group = group_name |
|
663 | 663 | obj.permission = permission |
|
664 | 664 | self.sa.add(obj) |
|
665 | 665 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group) |
|
666 | 666 | action_logger_generic( |
|
667 | 667 | 'granted permission: {} to usergroup: {} on repogroup: {}'.format( |
|
668 | 668 | perm, group_name, repo_group), namespace='security.repogroup') |
|
669 | 669 | return obj |
|
670 | 670 | |
|
671 | 671 | def revoke_user_group_permission(self, repo_group, group_name): |
|
672 | 672 | """ |
|
673 | 673 | Revoke permission for user group on given repository group |
|
674 | 674 | |
|
675 | 675 | :param repo_group: Instance of RepoGroup, repositories_group_id, |
|
676 | 676 | or repositories_group name |
|
677 | 677 | :param group_name: Instance of UserGroup, users_group_id, |
|
678 | 678 | or user group name |
|
679 | 679 | """ |
|
680 | 680 | repo_group = self._get_repo_group(repo_group) |
|
681 | 681 | group_name = self._get_user_group(group_name) |
|
682 | 682 | |
|
683 | 683 | obj = self.sa.query(UserGroupRepoGroupToPerm)\ |
|
684 | 684 | .filter(UserGroupRepoGroupToPerm.group == repo_group)\ |
|
685 | 685 | .filter(UserGroupRepoGroupToPerm.users_group == group_name)\ |
|
686 | 686 | .scalar() |
|
687 | 687 | if obj: |
|
688 | 688 | self.sa.delete(obj) |
|
689 | 689 | log.debug('Revoked perm to %s on %s', repo_group, group_name) |
|
690 | 690 | action_logger_generic( |
|
691 | 691 | 'revoked permission from usergroup: {} on repogroup: {}'.format( |
|
692 | 692 | group_name, repo_group), namespace='security.repogroup') |
|
693 | 693 | |
|
694 | 694 | @classmethod |
|
695 | 695 | def update_commit_cache(cls, repo_groups=None): |
|
696 | 696 | if not repo_groups: |
|
697 | 697 | repo_groups = RepoGroup.getAll() |
|
698 | 698 | for repo_group in repo_groups: |
|
699 | 699 | repo_group.update_commit_cache() |
|
700 | 700 | |
|
701 | 701 | |
|
702 | 702 | |
|
703 | 703 | def get_repo_groups_as_dict(self, repo_group_list=None, admin=False, |
|
704 | 704 | super_user_actions=False): |
|
705 | 705 | |
|
706 | 706 | from pyramid.threadlocal import get_current_request |
|
707 | 707 | _render = get_current_request().get_partial_renderer( |
|
708 | 708 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
709 | 709 | c = _render.get_call_context() |
|
710 | 710 | h = _render.get_helpers() |
|
711 | 711 | |
|
712 | 712 | def quick_menu(repo_group_name): |
|
713 | 713 | return _render('quick_repo_group_menu', repo_group_name) |
|
714 | 714 | |
|
715 | 715 | def repo_group_lnk(repo_group_name): |
|
716 | 716 | return _render('repo_group_name', repo_group_name) |
|
717 | 717 | |
|
718 | 718 | def last_change(last_change): |
|
719 | 719 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: |
|
720 | 720 | ts = time.time() |
|
721 | 721 | utc_offset = (datetime.datetime.fromtimestamp(ts) |
|
722 | 722 | - datetime.datetime.utcfromtimestamp(ts)).total_seconds() |
|
723 | 723 | last_change = last_change + datetime.timedelta(seconds=utc_offset) |
|
724 | 724 | return _render("last_change", last_change) |
|
725 | 725 | |
|
726 | 726 | def desc(desc, personal): |
|
727 | 727 | return _render( |
|
728 | 728 | 'repo_group_desc', desc, personal, c.visual.stylify_metatags) |
|
729 | 729 | |
|
730 | 730 | def repo_group_actions(repo_group_id, repo_group_name, gr_count): |
|
731 | 731 | return _render( |
|
732 | 732 | 'repo_group_actions', repo_group_id, repo_group_name, gr_count) |
|
733 | 733 | |
|
734 | 734 | def repo_group_name(repo_group_name, children_groups): |
|
735 | 735 | return _render("repo_group_name", repo_group_name, children_groups) |
|
736 | 736 | |
|
737 | 737 | def user_profile(username): |
|
738 | 738 | return _render('user_profile', username) |
|
739 | 739 | |
|
740 | 740 | repo_group_data = [] |
|
741 | 741 | for group in repo_group_list: |
|
742 | 742 | # NOTE(marcink): because we use only raw column we need to load it like that |
|
743 | 743 | changeset_cache = RepoGroup._load_changeset_cache( |
|
744 | 744 | '', group._changeset_cache) |
|
745 | 745 | last_commit_change = RepoGroup._load_commit_change(changeset_cache) |
|
746 | 746 | row = { |
|
747 | 747 | "menu": quick_menu(group.group_name), |
|
748 | 748 | "name": repo_group_lnk(group.group_name), |
|
749 | 749 | "name_raw": group.group_name, |
|
750 | 750 | |
|
751 | 751 | "last_change": last_change(last_commit_change), |
|
752 | 752 | "last_change_raw": datetime_to_time(last_commit_change), |
|
753 | 753 | |
|
754 | 754 | "last_changeset": "", |
|
755 | 755 | "last_changeset_raw": "", |
|
756 | 756 | |
|
757 | 757 | "desc": desc(group.group_description, group.personal), |
|
758 | 758 | "top_level_repos": 0, |
|
759 | 759 | "owner": user_profile(group.User.username) |
|
760 | 760 | } |
|
761 | 761 | if admin: |
|
762 | 762 | repo_count = group.repositories.count() |
|
763 | 763 | children_groups = map( |
|
764 | 764 | h.safe_unicode, |
|
765 | 765 | itertools.chain((g.name for g in group.parents), |
|
766 | 766 | (x.name for x in [group]))) |
|
767 | 767 | row.update({ |
|
768 | 768 | "action": repo_group_actions( |
|
769 | 769 | group.group_id, group.group_name, repo_count), |
|
770 | 770 | "top_level_repos": repo_count, |
|
771 | 771 | "name": repo_group_name(group.group_name, children_groups), |
|
772 | 772 | |
|
773 | 773 | }) |
|
774 | 774 | repo_group_data.append(row) |
|
775 | 775 | |
|
776 | 776 | return repo_group_data |
|
777 | 777 | |
|
778 | def get_repo_groups_data_table( | |
|
779 | self, draw, start, limit, | |
|
780 | search_q, order_by, order_dir, | |
|
781 | auth_user, repo_group_id): | |
|
782 | from rhodecode.model.scm import RepoGroupList | |
|
783 | ||
|
784 | _perms = ['group.read', 'group.write', 'group.admin'] | |
|
785 | repo_groups = RepoGroup.query() \ | |
|
786 | .filter(RepoGroup.group_parent_id == repo_group_id) \ | |
|
787 | .all() | |
|
788 | auth_repo_group_list = RepoGroupList( | |
|
789 | repo_groups, perm_set=_perms, | |
|
790 | extra_kwargs=dict(user=auth_user)) | |
|
791 | ||
|
792 | allowed_ids = [-1] | |
|
793 | for repo_group in auth_repo_group_list: | |
|
794 | allowed_ids.append(repo_group.group_id) | |
|
795 | ||
|
796 | repo_groups_data_total_count = RepoGroup.query() \ | |
|
797 | .filter(RepoGroup.group_parent_id == repo_group_id) \ | |
|
798 | .filter(or_( | |
|
799 | # generate multiple IN to fix limitation problems | |
|
800 | *in_filter_generator(RepoGroup.group_id, allowed_ids)) | |
|
801 | ) \ | |
|
802 | .count() | |
|
803 | ||
|
804 | base_q = Session.query( | |
|
805 | RepoGroup.group_name, | |
|
806 | RepoGroup.group_name_hash, | |
|
807 | RepoGroup.group_description, | |
|
808 | RepoGroup.group_id, | |
|
809 | RepoGroup.personal, | |
|
810 | RepoGroup.updated_on, | |
|
811 | RepoGroup._changeset_cache, | |
|
812 | User, | |
|
813 | ) \ | |
|
814 | .filter(RepoGroup.group_parent_id == repo_group_id) \ | |
|
815 | .filter(or_( | |
|
816 | # generate multiple IN to fix limitation problems | |
|
817 | *in_filter_generator(RepoGroup.group_id, allowed_ids)) | |
|
818 | ) \ | |
|
819 | .join(User, User.user_id == RepoGroup.user_id) \ | |
|
820 | .group_by(RepoGroup, User) | |
|
821 | ||
|
822 | repo_groups_data_total_filtered_count = base_q.count() | |
|
823 | ||
|
824 | sort_defined = False | |
|
825 | ||
|
826 | if order_by == 'group_name': | |
|
827 | sort_col = func.lower(RepoGroup.group_name) | |
|
828 | sort_defined = True | |
|
829 | elif order_by == 'user_username': | |
|
830 | sort_col = User.username | |
|
831 | else: | |
|
832 | sort_col = getattr(RepoGroup, order_by, None) | |
|
833 | ||
|
834 | if sort_defined or sort_col: | |
|
835 | if order_dir == 'asc': | |
|
836 | sort_col = sort_col.asc() | |
|
837 | else: | |
|
838 | sort_col = sort_col.desc() | |
|
839 | ||
|
840 | base_q = base_q.order_by(sort_col) | |
|
841 | base_q = base_q.offset(start).limit(limit) | |
|
842 | ||
|
843 | repo_group_list = base_q.all() | |
|
844 | ||
|
845 | repo_groups_data = RepoGroupModel().get_repo_groups_as_dict( | |
|
846 | repo_group_list=repo_group_list, admin=False) | |
|
847 | ||
|
848 | data = ({ | |
|
849 | 'draw': draw, | |
|
850 | 'data': repo_groups_data, | |
|
851 | 'recordsTotal': repo_groups_data_total_count, | |
|
852 | 'recordsFiltered': repo_groups_data_total_filtered_count, | |
|
853 | }) | |
|
854 | return data | |
|
855 | ||
|
778 | 856 | def _get_defaults(self, repo_group_name): |
|
779 | 857 | repo_group = RepoGroup.get_by_group_name(repo_group_name) |
|
780 | 858 | |
|
781 | 859 | if repo_group is None: |
|
782 | 860 | return None |
|
783 | 861 | |
|
784 | 862 | defaults = repo_group.get_dict() |
|
785 | 863 | defaults['repo_group_name'] = repo_group.name |
|
786 | 864 | defaults['repo_group_description'] = repo_group.group_description |
|
787 | 865 | defaults['repo_group_enable_locking'] = repo_group.enable_locking |
|
788 | 866 | |
|
789 | 867 | # we use -1 as this is how in HTML, we mark an empty group |
|
790 | 868 | defaults['repo_group'] = defaults['group_parent_id'] or -1 |
|
791 | 869 | |
|
792 | 870 | # fill owner |
|
793 | 871 | if repo_group.user: |
|
794 | 872 | defaults.update({'user': repo_group.user.username}) |
|
795 | 873 | else: |
|
796 | 874 | replacement_user = User.get_first_super_admin().username |
|
797 | 875 | defaults.update({'user': replacement_user}) |
|
798 | 876 | |
|
799 | 877 | return defaults |
@@ -1,390 +1,392 b'' | |||
|
1 | 1 | |
|
2 | 2 | /****************************************************************************** |
|
3 | 3 | * * |
|
4 | 4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
5 | 5 | * * |
|
6 | 6 | * * |
|
7 | 7 | * This file is automatically generated when the app starts up with * |
|
8 | 8 | * generate_js_files = true * |
|
9 | 9 | * * |
|
10 | 10 | * To add a route here pass jsroute=True to the route definition in the app * |
|
11 | 11 | * * |
|
12 | 12 | ******************************************************************************/ |
|
13 | 13 | function registerRCRoutes() { |
|
14 | 14 | // routes registration |
|
15 | 15 | pyroutes.register('favicon', '/favicon.ico', []); |
|
16 | 16 | pyroutes.register('robots', '/robots.txt', []); |
|
17 | 17 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); |
|
18 | 18 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); |
|
19 | 19 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); |
|
20 | 20 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); |
|
21 | 21 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); |
|
22 | 22 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); |
|
23 | 23 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']); |
|
24 | 24 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']); |
|
25 | 25 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']); |
|
26 | 26 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); |
|
27 | 27 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); |
|
28 | 28 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); |
|
29 | 29 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); |
|
30 | 30 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); |
|
31 | 31 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); |
|
32 | 32 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); |
|
33 | 33 | pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']); |
|
34 | 34 | pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']); |
|
35 | 35 | pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']); |
|
36 | 36 | pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']); |
|
37 | 37 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); |
|
38 | 38 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); |
|
39 | 39 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); |
|
40 | 40 | pyroutes.register('ops_ping_legacy', '/_admin/ping', []); |
|
41 | 41 | pyroutes.register('ops_error_test_legacy', '/_admin/error_test', []); |
|
42 | 42 | pyroutes.register('admin_home', '/_admin', []); |
|
43 | 43 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); |
|
44 | 44 | pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']); |
|
45 | 45 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); |
|
46 | 46 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); |
|
47 | 47 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); |
|
48 | 48 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); |
|
49 | 49 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); |
|
50 | 50 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); |
|
51 | 51 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); |
|
52 | 52 | pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []); |
|
53 | 53 | pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions/delete', []); |
|
54 | 54 | pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']); |
|
55 | 55 | pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']); |
|
56 | 56 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); |
|
57 | 57 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); |
|
58 | 58 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); |
|
59 | 59 | pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []); |
|
60 | 60 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); |
|
61 | 61 | pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []); |
|
62 | 62 | pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []); |
|
63 | 63 | pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []); |
|
64 | 64 | pyroutes.register('admin_settings', '/_admin/settings', []); |
|
65 | 65 | pyroutes.register('admin_settings_update', '/_admin/settings/update', []); |
|
66 | 66 | pyroutes.register('admin_settings_global', '/_admin/settings/global', []); |
|
67 | 67 | pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []); |
|
68 | 68 | pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []); |
|
69 | 69 | pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []); |
|
70 | 70 | pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []); |
|
71 | 71 | pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []); |
|
72 | 72 | pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []); |
|
73 | 73 | pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []); |
|
74 | 74 | pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []); |
|
75 | 75 | pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []); |
|
76 | 76 | pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []); |
|
77 | 77 | pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []); |
|
78 | 78 | pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []); |
|
79 | 79 | pyroutes.register('admin_settings_email', '/_admin/settings/email', []); |
|
80 | 80 | pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []); |
|
81 | 81 | pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []); |
|
82 | 82 | pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []); |
|
83 | 83 | pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []); |
|
84 | 84 | pyroutes.register('admin_settings_search', '/_admin/settings/search', []); |
|
85 | 85 | pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []); |
|
86 | 86 | pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []); |
|
87 | 87 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); |
|
88 | 88 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); |
|
89 | 89 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); |
|
90 | 90 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); |
|
91 | 91 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); |
|
92 | 92 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); |
|
93 | 93 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); |
|
94 | 94 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); |
|
95 | 95 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); |
|
96 | 96 | pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []); |
|
97 | 97 | pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []); |
|
98 | 98 | pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []); |
|
99 | 99 | pyroutes.register('users', '/_admin/users', []); |
|
100 | 100 | pyroutes.register('users_data', '/_admin/users_data', []); |
|
101 | 101 | pyroutes.register('users_create', '/_admin/users/create', []); |
|
102 | 102 | pyroutes.register('users_new', '/_admin/users/new', []); |
|
103 | 103 | pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']); |
|
104 | 104 | pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']); |
|
105 | 105 | pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']); |
|
106 | 106 | pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']); |
|
107 | 107 | pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']); |
|
108 | 108 | pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']); |
|
109 | 109 | pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']); |
|
110 | 110 | pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']); |
|
111 | 111 | pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']); |
|
112 | 112 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); |
|
113 | 113 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); |
|
114 | 114 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); |
|
115 | 115 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); |
|
116 | 116 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); |
|
117 | 117 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); |
|
118 | 118 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); |
|
119 | 119 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); |
|
120 | 120 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); |
|
121 | 121 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); |
|
122 | 122 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); |
|
123 | 123 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); |
|
124 | 124 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); |
|
125 | 125 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); |
|
126 | 126 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); |
|
127 | 127 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); |
|
128 | 128 | pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']); |
|
129 | 129 | pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']); |
|
130 | 130 | pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']); |
|
131 | 131 | pyroutes.register('user_groups', '/_admin/user_groups', []); |
|
132 | 132 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); |
|
133 | 133 | pyroutes.register('user_groups_new', '/_admin/user_groups/new', []); |
|
134 | 134 | pyroutes.register('user_groups_create', '/_admin/user_groups/create', []); |
|
135 | 135 | pyroutes.register('repos', '/_admin/repos', []); |
|
136 | 136 | pyroutes.register('repos_data', '/_admin/repos_data', []); |
|
137 | 137 | pyroutes.register('repo_new', '/_admin/repos/new', []); |
|
138 | 138 | pyroutes.register('repo_create', '/_admin/repos/create', []); |
|
139 | 139 | pyroutes.register('repo_groups', '/_admin/repo_groups', []); |
|
140 | 140 | pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []); |
|
141 | 141 | pyroutes.register('repo_group_new', '/_admin/repo_group/new', []); |
|
142 | 142 | pyroutes.register('repo_group_create', '/_admin/repo_group/create', []); |
|
143 | 143 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); |
|
144 | 144 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); |
|
145 | 145 | pyroutes.register('channelstream_proxy', '/_channelstream', []); |
|
146 | 146 | pyroutes.register('upload_file', '/_file_store/upload', []); |
|
147 | 147 | pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']); |
|
148 | 148 | pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']); |
|
149 | 149 | pyroutes.register('logout', '/_admin/logout', []); |
|
150 | 150 | pyroutes.register('reset_password', '/_admin/password_reset', []); |
|
151 | 151 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); |
|
152 | 152 | pyroutes.register('home', '/', []); |
|
153 | pyroutes.register('main_page_repos_data', '/_home_repos', []); | |
|
154 | pyroutes.register('main_page_repo_groups_data', '/_home_repo_groups', []); | |
|
153 | 155 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
154 | 156 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); |
|
155 | 157 | pyroutes.register('repo_list_data', '/_repos', []); |
|
156 | 158 | pyroutes.register('repo_group_list_data', '/_repo_groups', []); |
|
157 | 159 | pyroutes.register('goto_switcher_data', '/_goto_data', []); |
|
158 | 160 | pyroutes.register('markup_preview', '/_markup_preview', []); |
|
159 | 161 | pyroutes.register('file_preview', '/_file_preview', []); |
|
160 | 162 | pyroutes.register('store_user_session_value', '/_store_session_attr', []); |
|
161 | 163 | pyroutes.register('journal', '/_admin/journal', []); |
|
162 | 164 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); |
|
163 | 165 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); |
|
164 | 166 | pyroutes.register('journal_public', '/_admin/public_journal', []); |
|
165 | 167 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); |
|
166 | 168 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); |
|
167 | 169 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); |
|
168 | 170 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); |
|
169 | 171 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
170 | 172 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); |
|
171 | 173 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); |
|
172 | 174 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); |
|
173 | 175 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); |
|
174 | 176 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
175 | 177 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); |
|
176 | 178 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); |
|
177 | 179 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); |
|
178 | 180 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); |
|
179 | 181 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); |
|
180 | 182 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); |
|
181 | 183 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); |
|
182 | 184 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); |
|
183 | 185 | pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']); |
|
184 | 186 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); |
|
185 | 187 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
186 | 188 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
187 | 189 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); |
|
188 | 190 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); |
|
189 | 191 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
190 | 192 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
191 | 193 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); |
|
192 | 194 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
193 | 195 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
194 | 196 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
195 | 197 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
196 | 198 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
197 | 199 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
198 | 200 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
199 | 201 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
200 | 202 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
201 | 203 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
202 | 204 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
203 | 205 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
204 | 206 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
205 | 207 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
206 | 208 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
207 | 209 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
208 | 210 | pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
209 | 211 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
210 | 212 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
211 | 213 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
212 | 214 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
213 | 215 | pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']); |
|
214 | 216 | pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
215 | 217 | pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']); |
|
216 | 218 | pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
217 | 219 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); |
|
218 | 220 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
219 | 221 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); |
|
220 | 222 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
221 | 223 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); |
|
222 | 224 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); |
|
223 | 225 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); |
|
224 | 226 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); |
|
225 | 227 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); |
|
226 | 228 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); |
|
227 | 229 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); |
|
228 | 230 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
229 | 231 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
230 | 232 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); |
|
231 | 233 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
232 | 234 | pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']); |
|
233 | 235 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
234 | 236 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); |
|
235 | 237 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); |
|
236 | 238 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); |
|
237 | 239 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); |
|
238 | 240 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); |
|
239 | 241 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); |
|
240 | 242 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
241 | 243 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); |
|
242 | 244 | pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']); |
|
243 | 245 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); |
|
244 | 246 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); |
|
245 | 247 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); |
|
246 | 248 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); |
|
247 | 249 | pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']); |
|
248 | 250 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); |
|
249 | 251 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
250 | 252 | pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']); |
|
251 | 253 | pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); |
|
252 | 254 | pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); |
|
253 | 255 | pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']); |
|
254 | 256 | pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']); |
|
255 | 257 | pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']); |
|
256 | 258 | pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']); |
|
257 | 259 | pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']); |
|
258 | 260 | pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']); |
|
259 | 261 | pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']); |
|
260 | 262 | pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']); |
|
261 | 263 | pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']); |
|
262 | 264 | pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']); |
|
263 | 265 | pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']); |
|
264 | 266 | pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']); |
|
265 | 267 | pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']); |
|
266 | 268 | pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']); |
|
267 | 269 | pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']); |
|
268 | 270 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); |
|
269 | 271 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); |
|
270 | 272 | pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']); |
|
271 | 273 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); |
|
272 | 274 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); |
|
273 | 275 | pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']); |
|
274 | 276 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']); |
|
275 | 277 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']); |
|
276 | 278 | pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']); |
|
277 | 279 | pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']); |
|
278 | 280 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); |
|
279 | 281 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); |
|
280 | 282 | pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']); |
|
281 | 283 | pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']); |
|
282 | 284 | pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']); |
|
283 | 285 | pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']); |
|
284 | 286 | pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']); |
|
285 | 287 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); |
|
286 | 288 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); |
|
287 | 289 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); |
|
288 | 290 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); |
|
289 | 291 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); |
|
290 | 292 | pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']); |
|
291 | 293 | pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']); |
|
292 | 294 | pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']); |
|
293 | 295 | pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']); |
|
294 | 296 | pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']); |
|
295 | 297 | pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']); |
|
296 | 298 | pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']); |
|
297 | 299 | pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']); |
|
298 | 300 | pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']); |
|
299 | 301 | pyroutes.register('search', '/_admin/search', []); |
|
300 | 302 | pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']); |
|
301 | 303 | pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']); |
|
302 | 304 | pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']); |
|
303 | 305 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); |
|
304 | 306 | pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']); |
|
305 | 307 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); |
|
306 | 308 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); |
|
307 | 309 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); |
|
308 | 310 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); |
|
309 | 311 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); |
|
310 | 312 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); |
|
311 | 313 | pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []); |
|
312 | 314 | pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []); |
|
313 | 315 | pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []); |
|
314 | 316 | pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []); |
|
315 | 317 | pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []); |
|
316 | 318 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); |
|
317 | 319 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); |
|
318 | 320 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); |
|
319 | 321 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); |
|
320 | 322 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); |
|
321 | 323 | pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []); |
|
322 | 324 | pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []); |
|
323 | 325 | pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']); |
|
324 | 326 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); |
|
325 | 327 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); |
|
326 | 328 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); |
|
327 | 329 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); |
|
328 | 330 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); |
|
329 | 331 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); |
|
330 | 332 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); |
|
331 | 333 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); |
|
332 | 334 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); |
|
333 | 335 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); |
|
334 | 336 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); |
|
335 | 337 | pyroutes.register('gists_show', '/_admin/gists', []); |
|
336 | 338 | pyroutes.register('gists_new', '/_admin/gists/new', []); |
|
337 | 339 | pyroutes.register('gists_create', '/_admin/gists/create', []); |
|
338 | 340 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); |
|
339 | 341 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); |
|
340 | 342 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); |
|
341 | 343 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); |
|
342 | 344 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); |
|
343 | 345 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); |
|
344 | 346 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); |
|
345 | 347 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); |
|
346 | 348 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); |
|
347 | 349 | pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']); |
|
348 | 350 | pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']); |
|
349 | 351 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); |
|
350 | 352 | pyroutes.register('apiv2', '/_admin/api', []); |
|
351 | 353 | pyroutes.register('admin_settings_license', '/_admin/settings/license', []); |
|
352 | 354 | pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []); |
|
353 | 355 | pyroutes.register('login', '/_admin/login', []); |
|
354 | 356 | pyroutes.register('register', '/_admin/register', []); |
|
355 | 357 | pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']); |
|
356 | 358 | pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']); |
|
357 | 359 | pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']); |
|
358 | 360 | pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']); |
|
359 | 361 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); |
|
360 | 362 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); |
|
361 | 363 | pyroutes.register('admin_settings_scheduler_show_tasks', '/_admin/settings/scheduler/_tasks', []); |
|
362 | 364 | pyroutes.register('admin_settings_scheduler_show_all', '/_admin/settings/scheduler', []); |
|
363 | 365 | pyroutes.register('admin_settings_scheduler_new', '/_admin/settings/scheduler/new', []); |
|
364 | 366 | pyroutes.register('admin_settings_scheduler_create', '/_admin/settings/scheduler/create', []); |
|
365 | 367 | pyroutes.register('admin_settings_scheduler_edit', '/_admin/settings/scheduler/%(schedule_id)s', ['schedule_id']); |
|
366 | 368 | pyroutes.register('admin_settings_scheduler_update', '/_admin/settings/scheduler/%(schedule_id)s/update', ['schedule_id']); |
|
367 | 369 | pyroutes.register('admin_settings_scheduler_delete', '/_admin/settings/scheduler/%(schedule_id)s/delete', ['schedule_id']); |
|
368 | 370 | pyroutes.register('admin_settings_scheduler_execute', '/_admin/settings/scheduler/%(schedule_id)s/execute', ['schedule_id']); |
|
369 | 371 | pyroutes.register('admin_settings_automation', '/_admin/settings/automation', []); |
|
370 | 372 | pyroutes.register('admin_settings_automation_update', '/_admin/settings/automation/%(entry_id)s/update', ['entry_id']); |
|
371 | 373 | pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []); |
|
372 | 374 | pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []); |
|
373 | 375 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); |
|
374 | 376 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); |
|
375 | 377 | pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []); |
|
376 | 378 | pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []); |
|
377 | 379 | pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']); |
|
378 | 380 | pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']); |
|
379 | 381 | pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']); |
|
380 | 382 | pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']); |
|
381 | 383 | pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']); |
|
382 | 384 | pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']); |
|
383 | 385 | pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']); |
|
384 | 386 | pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']); |
|
385 | 387 | pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']); |
|
386 | 388 | pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']); |
|
387 | 389 | pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']); |
|
388 | 390 | pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']); |
|
389 | 391 | pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']); |
|
390 | 392 | } |
@@ -1,121 +1,221 b'' | |||
|
1 | 1 | <%inherit file="/base/base.mako"/> |
|
2 | 2 | |
|
3 | 3 | |
|
4 | 4 | <%def name="menu_bar_subnav()"> |
|
5 | 5 | % if c.repo_group: |
|
6 | 6 | ${self.repo_group_menu(active='home')} |
|
7 | 7 | % endif |
|
8 | 8 | </%def> |
|
9 | 9 | |
|
10 | 10 | |
|
11 | 11 | <%def name="main()"> |
|
12 | 12 | <div class="box"> |
|
13 | 13 | <!-- box / title --> |
|
14 | 14 | <div class="title"> |
|
15 | 15 | |
|
16 | 16 | </div> |
|
17 | 17 | <!-- end box / title --> |
|
18 | <div class="table"> | |
|
19 | <div id="groups_list_wrap"> | |
|
20 | <table id="group_list_table" class="display" style="width: 100%"></table> | |
|
21 | </div> | |
|
22 | </div> | |
|
23 | ||
|
24 | <div class="table"> | |
|
25 | <div id="repos_list_wrap"> | |
|
26 | <table id="repo_list_table" class="display" style="width: 100%"></table> | |
|
27 | </div> | |
|
28 | </div> | |
|
29 | ||
|
30 | ## no repository groups and repos present, show something to the users | |
|
31 | % if c.repo_groups_data == '[]' and c.repos_data == '[]': | |
|
32 | <div class="table"> | |
|
18 | <div id="no_grid_data" class="table" style="display: none"> | |
|
33 | 19 | <h2 class="no-object-border"> |
|
34 | 20 | ${_('No repositories or repositories groups exists here.')} |
|
35 | 21 | </h2> |
|
36 | 22 | </div> |
|
37 | % endif | |
|
23 | ||
|
24 | <div class="table"> | |
|
25 | <div id="groups_list_wrap" style="min-height: 200px;"> | |
|
26 | <table id="group_list_table" class="display" style="width: 100%;"></table> | |
|
27 | </div> | |
|
28 | </div> | |
|
29 | ||
|
30 | <div class="table"> | |
|
31 | <div id="repos_list_wrap" style="min-height: 200px;"> | |
|
32 | <table id="repo_list_table" class="display" style="width: 100%;"></table> | |
|
33 | </div> | |
|
34 | </div> | |
|
38 | 35 | |
|
39 | 36 | </div> |
|
40 | 37 | <script> |
|
41 |
|
|
|
38 | $(document).ready(function () { | |
|
42 | 39 | |
|
43 | 40 | // repo group list |
|
44 | % if c.repo_groups_data != '[]': | |
|
45 | $('#group_list_table').DataTable({ | |
|
46 | data: ${c.repo_groups_data|n}, | |
|
47 |
|
|
|
48 | pageLength: ${c.visual.dashboard_items}, | |
|
49 | order: [[ 0, "asc" ]], | |
|
50 | columns: [ | |
|
51 |
|
|
|
52 | "sort": "name_raw"}, title: "${_('Name')}", className: "truncate-wrap td-grid-name" }, | |
|
53 | { data: 'menu', "bSortable": false, className: "quick_repo_menu" }, | |
|
54 | { data: {"_": "desc", | |
|
55 | "sort": "desc"}, title: "${_('Description')}", className: "td-description" }, | |
|
56 | { data: {"_": "last_change", | |
|
57 | "sort": "last_change_raw", | |
|
58 | "type": Number}, title: "${_('Last Change')}", className: "td-time" }, | |
|
59 | { data: {"_": "last_changeset", | |
|
60 | "sort": "last_changeset_raw", | |
|
61 | "type": Number}, title: "", className: "td-hash" }, | |
|
62 |
|
|
|
63 | "sort": "owner"}, title: "${_('Owner')}", className: "td-user" } | |
|
64 | ], | |
|
65 | language: { | |
|
66 | paginate: DEFAULT_GRID_PAGINATION, | |
|
67 | emptyTable: _gettext("No repository groups available yet.") | |
|
68 |
|
|
|
69 | "drawCallback": function( settings, json ) { | |
|
70 | timeagoActivate(); | |
|
71 | tooltipActivate(); | |
|
72 | quick_repo_menu(); | |
|
73 | // hide pagination for single page | |
|
74 | if (settings._iDisplayLength >= settings.fnRecordsDisplay()) { | |
|
75 | $(settings.nTableWrapper).find('.dataTables_paginate').hide(); | |
|
76 | } | |
|
77 | } | |
|
41 | var $groupListTable = $('#group_list_table'); | |
|
42 | ||
|
43 | $groupListTable.DataTable({ | |
|
44 | processing: true, | |
|
45 | serverSide: true, | |
|
46 | ajax: { | |
|
47 | "url": "${h.route_path('main_page_repo_groups_data')}", | |
|
48 | "data": function (d) { | |
|
49 | % if c.repo_group: | |
|
50 | d.repo_group_id = ${c.repo_group.group_id} | |
|
51 | % endif | |
|
52 | } | |
|
53 | }, | |
|
54 | dom: 'rtp', | |
|
55 | pageLength: ${c.visual.dashboard_items}, | |
|
56 | order: [[0, "asc"]], | |
|
57 | columns: [ | |
|
58 | { | |
|
59 | data: { | |
|
60 | "_": "name", | |
|
61 | "sort": "name_raw" | |
|
62 | }, title: "${_('Name')}", className: "truncate-wrap td-grid-name" | |
|
63 | }, | |
|
64 | {data: 'menu', "bSortable": false, className: "quick_repo_menu"}, | |
|
65 | { | |
|
66 | data: { | |
|
67 | "_": "desc", | |
|
68 | "sort": "desc" | |
|
69 | }, title: "${_('Description')}", className: "td-description" | |
|
70 | }, | |
|
71 | { | |
|
72 | data: { | |
|
73 | "_": "last_change", | |
|
74 | "sort": "last_change_raw", | |
|
75 | "type": Number | |
|
76 | }, title: "${_('Last Change')}", className: "td-time" | |
|
77 | }, | |
|
78 | { | |
|
79 | data: { | |
|
80 | "_": "last_changeset", | |
|
81 | "sort": "last_changeset_raw", | |
|
82 | "type": Number | |
|
83 | }, title: "", className: "td-hash" | |
|
84 | }, | |
|
85 | { | |
|
86 | data: { | |
|
87 | "_": "owner", | |
|
88 | "sort": "owner" | |
|
89 | }, title: "${_('Owner')}", className: "td-user" | |
|
90 | } | |
|
91 | ], | |
|
92 | language: { | |
|
93 | paginate: DEFAULT_GRID_PAGINATION, | |
|
94 | sProcessing: _gettext('loading...'), | |
|
95 | emptyTable: _gettext("No repository groups present.") | |
|
96 | }, | |
|
97 | "drawCallback": function (settings, json) { | |
|
98 | // hide grid if it's empty | |
|
99 | if (settings.fnRecordsDisplay() === 0) { | |
|
100 | $('#groups_list_wrap').hide(); | |
|
101 | // both hidden, show no-data | |
|
102 | if ($('#repos_list_wrap').is(':hidden')) { | |
|
103 | $('#no_grid_data').show(); | |
|
104 | } | |
|
105 | } else { | |
|
106 | $('#groups_list_wrap').show(); | |
|
107 | } | |
|
108 | ||
|
109 | timeagoActivate(); | |
|
110 | tooltipActivate(); | |
|
111 | quick_repo_menu(); | |
|
112 | // hide pagination for single page | |
|
113 | if (settings._iDisplayLength >= settings.fnRecordsDisplay()) { | |
|
114 | $(settings.nTableWrapper).find('.dataTables_paginate').hide(); | |
|
115 | } | |
|
116 | ||
|
117 | }, | |
|
78 | 118 | }); |
|
79 | % endif | |
|
119 | ||
|
120 | $groupListTable.on('xhr.dt', function (e, settings, json, xhr) { | |
|
121 | $groupListTable.css('opacity', 1); | |
|
122 | }); | |
|
123 | ||
|
124 | $groupListTable.on('preXhr.dt', function (e, settings, data) { | |
|
125 | $groupListTable.css('opacity', 0.3); | |
|
126 | }); | |
|
127 | ||
|
80 | 128 | |
|
81 | // repo list | |
|
82 | % if c.repos_data != '[]': | |
|
83 | $('#repo_list_table').DataTable({ | |
|
84 | data: ${c.repos_data|n}, | |
|
85 |
|
|
|
86 | order: [[ 0, "asc" ]], | |
|
87 | pageLength: ${c.visual.dashboard_items}, | |
|
88 | columns: [ | |
|
89 |
|
|
|
90 | "sort": "name_raw"}, title: "${_('Name')}", className: "truncate-wrap td-grid-name" }, | |
|
91 | { data: 'menu', "bSortable": false, className: "quick_repo_menu" }, | |
|
92 | { data: {"_": "desc", | |
|
93 | "sort": "desc"}, title: "${_('Description')}", className: "td-description" }, | |
|
94 | { data: {"_": "last_change", | |
|
95 | "sort": "last_change_raw", | |
|
96 | "type": Number}, title: "${_('Last Change')}", className: "td-time" }, | |
|
97 | { data: {"_": "last_changeset", | |
|
98 | "sort": "last_changeset_raw", | |
|
99 | "type": Number}, title: "${_('Commit')}", className: "td-hash" }, | |
|
100 |
|
|
|
101 | "sort": "owner"}, title: "${_('Owner')}", className: "td-user" } | |
|
102 | ], | |
|
103 | language: { | |
|
104 | paginate: DEFAULT_GRID_PAGINATION, | |
|
105 | emptyTable: _gettext("No repositories available yet.") | |
|
106 | }, | |
|
107 | "drawCallback": function( settings, json ) { | |
|
108 | timeagoActivate(); | |
|
109 | tooltipActivate(); | |
|
110 | quick_repo_menu(); | |
|
111 | // hide pagination for single page | |
|
112 | if (settings._iDisplayLength >= settings.fnRecordsDisplay()) { | |
|
113 | $(settings.nTableWrapper).find('.dataTables_paginate').hide(); | |
|
114 |
|
|
|
115 | } | |
|
129 | ## // repo list | |
|
130 | var $repoListTable = $('#repo_list_table'); | |
|
131 | ||
|
132 | $repoListTable.DataTable({ | |
|
133 | processing: true, | |
|
134 | serverSide: true, | |
|
135 | ajax: { | |
|
136 | "url": "${h.route_path('main_page_repos_data')}", | |
|
137 | "data": function (d) { | |
|
138 | % if c.repo_group: | |
|
139 | d.repo_group_id = ${c.repo_group.group_id} | |
|
140 | % endif | |
|
141 | } | |
|
142 | }, | |
|
143 | order: [[0, "asc"]], | |
|
144 | dom: 'rtp', | |
|
145 | pageLength: ${c.visual.dashboard_items}, | |
|
146 | columns: [ | |
|
147 | { | |
|
148 | data: { | |
|
149 | "_": "name", | |
|
150 | "sort": "name_raw" | |
|
151 | }, title: "${_('Name')}", className: "truncate-wrap td-grid-name" | |
|
152 | }, | |
|
153 | { | |
|
154 | data: 'menu', "bSortable": false, className: "quick_repo_menu" | |
|
155 | }, | |
|
156 | { | |
|
157 | data: { | |
|
158 | "_": "desc", | |
|
159 | "sort": "desc" | |
|
160 | }, title: "${_('Description')}", className: "td-description" | |
|
161 | }, | |
|
162 | { | |
|
163 | data: { | |
|
164 | "_": "last_change", | |
|
165 | "sort": "last_change_raw", | |
|
166 | "type": Number | |
|
167 | }, title: "${_('Last Change')}", className: "td-time", orderable: false | |
|
168 | }, | |
|
169 | { | |
|
170 | data: { | |
|
171 | "_": "last_changeset", | |
|
172 | "sort": "last_changeset_raw", | |
|
173 | "type": Number | |
|
174 | }, title: "${_('Commit')}", className: "td-hash" | |
|
175 | }, | |
|
176 | { | |
|
177 | data: { | |
|
178 | "_": "owner", | |
|
179 | "sort": "owner" | |
|
180 | }, title: "${_('Owner')}", className: "td-user" | |
|
181 | } | |
|
182 | ], | |
|
183 | language: { | |
|
184 | paginate: DEFAULT_GRID_PAGINATION, | |
|
185 | sProcessing: _gettext('loading...'), | |
|
186 | emptyTable: _gettext("No repositories present.") | |
|
187 | }, | |
|
188 | "drawCallback": function (settings, json) { | |
|
189 | // hide grid if it's empty | |
|
190 | if (settings.fnRecordsDisplay() == 0) { | |
|
191 | $('#repos_list_wrap').hide() | |
|
192 | // both hidden, show no-data | |
|
193 | if ($('#groups_list_wrap').is(':hidden')) { | |
|
194 | $('#no_grid_data').show() | |
|
195 | } | |
|
196 | } else { | |
|
197 | $('#repos_list_wrap').show() | |
|
198 | } | |
|
199 | ||
|
200 | timeagoActivate(); | |
|
201 | tooltipActivate(); | |
|
202 | quick_repo_menu(); | |
|
203 | // hide pagination for single page | |
|
204 | if (settings._iDisplayLength >= settings.fnRecordsDisplay()) { | |
|
205 | $(settings.nTableWrapper).find('.dataTables_paginate').hide(); | |
|
206 | } | |
|
207 | ||
|
208 | }, | |
|
116 | 209 | }); |
|
117 | % endif | |
|
210 | ||
|
211 | $repoListTable.on('xhr.dt', function (e, settings, json, xhr) { | |
|
212 | $repoListTable.css('opacity', 1); | |
|
213 | }); | |
|
118 | 214 | |
|
119 | }); | |
|
215 | $repoListTable.on('preXhr.dt', function (e, settings, data) { | |
|
216 | $repoListTable.css('opacity', 0.3); | |
|
217 | }); | |
|
218 | ||
|
219 | }); | |
|
120 | 220 | </script> |
|
121 | 221 | </%def> |
General Comments 0
You need to be logged in to leave comments.
Login now