##// END OF EJS Templates
repositories: added option to archive repositories instead of deleting them....
marcink -
r3090:bdd9dc16 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,36 b''
1 import logging
2
3 from sqlalchemy import *
4
5 from rhodecode.model import meta
6 from rhodecode.lib.dbmigrate.versions import _reset_base, notify
7
8 log = logging.getLogger(__name__)
9
10
11 def upgrade(migrate_engine):
12 """
13 Upgrade operations go here.
14 Don't create your own engine; bind migrate_engine to your metadata
15 """
16 _reset_base(migrate_engine)
17 from rhodecode.lib.dbmigrate.schema import db_4_13_0_0 as db
18
19 repository_table = db.Repository.__table__
20
21 archived = Column('archived', Boolean(), nullable=True)
22 archived.create(table=repository_table)
23
24 # issue fixups
25 fixups(db, meta.Session)
26
27
28 def downgrade(migrate_engine):
29 meta = MetaData()
30 meta.bind = migrate_engine
31
32
33 def fixups(models, _SESSION):
34 pass
35
36
@@ -1,63 +1,63 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22
23 23 RhodeCode, a web based repository management software
24 24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 25 """
26 26
27 27 import os
28 28 import sys
29 29 import platform
30 30
31 31 VERSION = tuple(open(os.path.join(
32 32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33 33
34 34 BACKENDS = {
35 35 'hg': 'Mercurial repository',
36 36 'git': 'Git repository',
37 37 'svn': 'Subversion repository',
38 38 }
39 39
40 40 CELERY_ENABLED = False
41 41 CELERY_EAGER = False
42 42
43 43 # link to config for pyramid
44 44 CONFIG = {}
45 45
46 46 # Populated with the settings dictionary from application init in
47 47 # rhodecode.conf.environment.load_pyramid_environment
48 48 PYRAMID_SETTINGS = {}
49 49
50 50 # Linked module for extensions
51 51 EXTENSIONS = {}
52 52
53 53 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
54 __dbversion__ = 90 # defines current db version for migrations
54 __dbversion__ = 91 # defines current db version for migrations
55 55 __platform__ = platform.system()
56 56 __license__ = 'AGPLv3, and Commercial License'
57 57 __author__ = 'RhodeCode GmbH'
58 58 __url__ = 'https://code.rhodecode.com'
59 59
60 60 is_windows = __platform__ in ['Windows']
61 61 is_unix = not is_windows
62 62 is_test = False
63 63 disable_error_handler = False
@@ -1,652 +1,677 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import logging
23 23 import operator
24 24
25 25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
26 26
27 27 from rhodecode.lib import helpers as h, diffs
28 28 from rhodecode.lib.utils2 import StrictAttributeDict, safe_int, datetime_to_time
29 29 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 30 from rhodecode.model import repo
31 31 from rhodecode.model import repo_group
32 32 from rhodecode.model import user_group
33 33 from rhodecode.model import user
34 34 from rhodecode.model.db import User
35 35 from rhodecode.model.scm import ScmModel
36 36 from rhodecode.model.settings import VcsSettingsModel
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 ADMIN_PREFIX = '/_admin'
42 42 STATIC_FILE_PREFIX = '/_static'
43 43
44 44 URL_NAME_REQUIREMENTS = {
45 45 # group name can have a slash in them, but they must not end with a slash
46 46 'group_name': r'.*?[^/]',
47 47 'repo_group_name': r'.*?[^/]',
48 48 # repo names can have a slash in them, but they must not end with a slash
49 49 'repo_name': r'.*?[^/]',
50 50 # file path eats up everything at the end
51 51 'f_path': r'.*',
52 52 # reference types
53 53 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
54 54 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
55 55 }
56 56
57 57
58 58 def add_route_with_slash(config,name, pattern, **kw):
59 59 config.add_route(name, pattern, **kw)
60 60 if not pattern.endswith('/'):
61 61 config.add_route(name + '_slash', pattern + '/', **kw)
62 62
63 63
64 64 def add_route_requirements(route_path, requirements=URL_NAME_REQUIREMENTS):
65 65 """
66 66 Adds regex requirements to pyramid routes using a mapping dict
67 67 e.g::
68 68 add_route_requirements('{repo_name}/settings')
69 69 """
70 70 for key, regex in requirements.items():
71 71 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
72 72 return route_path
73 73
74 74
75 75 def get_format_ref_id(repo):
76 76 """Returns a `repo` specific reference formatter function"""
77 77 if h.is_svn(repo):
78 78 return _format_ref_id_svn
79 79 else:
80 80 return _format_ref_id
81 81
82 82
83 83 def _format_ref_id(name, raw_id):
84 84 """Default formatting of a given reference `name`"""
85 85 return name
86 86
87 87
88 88 def _format_ref_id_svn(name, raw_id):
89 89 """Special way of formatting a reference for Subversion including path"""
90 90 return '%s@%s' % (name, raw_id)
91 91
92 92
93 93 class TemplateArgs(StrictAttributeDict):
94 94 pass
95 95
96 96
97 97 class BaseAppView(object):
98 98
99 99 def __init__(self, context, request):
100 100 self.request = request
101 101 self.context = context
102 102 self.session = request.session
103 103 if not hasattr(request, 'user'):
104 104 # NOTE(marcink): edge case, we ended up in matched route
105 105 # but probably of web-app context, e.g API CALL/VCS CALL
106 106 if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'):
107 107 log.warning('Unable to process request `%s` in this scope', request)
108 108 raise HTTPBadRequest()
109 109
110 110 self._rhodecode_user = request.user # auth user
111 111 self._rhodecode_db_user = self._rhodecode_user.get_instance()
112 112 self._maybe_needs_password_change(
113 113 request.matched_route.name, self._rhodecode_db_user)
114 114
115 115 def _maybe_needs_password_change(self, view_name, user_obj):
116 116 log.debug('Checking if user %s needs password change on view %s',
117 117 user_obj, view_name)
118 118 skip_user_views = [
119 119 'logout', 'login',
120 120 'my_account_password', 'my_account_password_update'
121 121 ]
122 122
123 123 if not user_obj:
124 124 return
125 125
126 126 if user_obj.username == User.DEFAULT_USER:
127 127 return
128 128
129 129 now = time.time()
130 130 should_change = user_obj.user_data.get('force_password_change')
131 131 change_after = safe_int(should_change) or 0
132 132 if should_change and now > change_after:
133 133 log.debug('User %s requires password change', user_obj)
134 134 h.flash('You are required to change your password', 'warning',
135 135 ignore_duplicate=True)
136 136
137 137 if view_name not in skip_user_views:
138 138 raise HTTPFound(
139 139 self.request.route_path('my_account_password'))
140 140
141 141 def _log_creation_exception(self, e, repo_name):
142 142 _ = self.request.translate
143 143 reason = None
144 144 if len(e.args) == 2:
145 145 reason = e.args[1]
146 146
147 147 if reason == 'INVALID_CERTIFICATE':
148 148 log.exception(
149 149 'Exception creating a repository: invalid certificate')
150 150 msg = (_('Error creating repository %s: invalid certificate')
151 151 % repo_name)
152 152 else:
153 153 log.exception("Exception creating a repository")
154 154 msg = (_('Error creating repository %s')
155 155 % repo_name)
156 156 return msg
157 157
158 158 def _get_local_tmpl_context(self, include_app_defaults=True):
159 159 c = TemplateArgs()
160 160 c.auth_user = self.request.user
161 161 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
162 162 c.rhodecode_user = self.request.user
163 163
164 164 if include_app_defaults:
165 165 from rhodecode.lib.base import attach_context_attributes
166 166 attach_context_attributes(c, self.request, self.request.user.user_id)
167 167
168 168 return c
169 169
170 170 def _get_template_context(self, tmpl_args, **kwargs):
171 171
172 172 local_tmpl_args = {
173 173 'defaults': {},
174 174 'errors': {},
175 175 'c': tmpl_args
176 176 }
177 177 local_tmpl_args.update(kwargs)
178 178 return local_tmpl_args
179 179
180 180 def load_default_context(self):
181 181 """
182 182 example:
183 183
184 184 def load_default_context(self):
185 185 c = self._get_local_tmpl_context()
186 186 c.custom_var = 'foobar'
187 187
188 188 return c
189 189 """
190 190 raise NotImplementedError('Needs implementation in view class')
191 191
192 192
193 193 class RepoAppView(BaseAppView):
194 194
195 195 def __init__(self, context, request):
196 196 super(RepoAppView, self).__init__(context, request)
197 197 self.db_repo = request.db_repo
198 198 self.db_repo_name = self.db_repo.repo_name
199 199 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
200 200
201 201 def _handle_missing_requirements(self, error):
202 202 log.error(
203 203 'Requirements are missing for repository %s: %s',
204 204 self.db_repo_name, error.message)
205 205
206 206 def _get_local_tmpl_context(self, include_app_defaults=True):
207 207 _ = self.request.translate
208 208 c = super(RepoAppView, self)._get_local_tmpl_context(
209 209 include_app_defaults=include_app_defaults)
210 210
211 211 # register common vars for this type of view
212 212 c.rhodecode_db_repo = self.db_repo
213 213 c.repo_name = self.db_repo_name
214 214 c.repository_pull_requests = self.db_repo_pull_requests
215 215 self.path_filter = PathFilter(None)
216 216
217 217 c.repository_requirements_missing = {}
218 218 try:
219 219 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
220 220 if self.rhodecode_vcs_repo:
221 221 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
222 222 c.auth_user.username)
223 223 self.path_filter = PathFilter(path_perms)
224 224 except RepositoryRequirementError as e:
225 225 c.repository_requirements_missing = {'error': str(e)}
226 226 self._handle_missing_requirements(e)
227 227 self.rhodecode_vcs_repo = None
228 228
229 229 c.path_filter = self.path_filter # used by atom_feed_entry.mako
230 230
231 231 if self.rhodecode_vcs_repo is None:
232 232 # unable to fetch this repo as vcs instance, report back to user
233 233 h.flash(_(
234 234 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
235 235 "Please check if it exist, or is not damaged.") %
236 236 {'repo_name': c.repo_name},
237 237 category='error', ignore_duplicate=True)
238 238 if c.repository_requirements_missing:
239 239 route = self.request.matched_route.name
240 240 if route.startswith(('edit_repo', 'repo_summary')):
241 241 # allow summary and edit repo on missing requirements
242 242 return c
243 243
244 244 raise HTTPFound(
245 245 h.route_path('repo_summary', repo_name=self.db_repo_name))
246 246
247 247 else: # redirect if we don't show missing requirements
248 248 raise HTTPFound(h.route_path('home'))
249 249
250 250 return c
251 251
252 252 def _get_f_path_unchecked(self, matchdict, default=None):
253 253 """
254 254 Should only be used by redirects, everything else should call _get_f_path
255 255 """
256 256 f_path = matchdict.get('f_path')
257 257 if f_path:
258 258 # fix for multiple initial slashes that causes errors for GIT
259 259 return f_path.lstrip('/')
260 260
261 261 return default
262 262
263 263 def _get_f_path(self, matchdict, default=None):
264 264 f_path_match = self._get_f_path_unchecked(matchdict, default)
265 265 return self.path_filter.assert_path_permissions(f_path_match)
266 266
267 267 def _get_general_setting(self, target_repo, settings_key, default=False):
268 268 settings_model = VcsSettingsModel(repo=target_repo)
269 269 settings = settings_model.get_general_settings()
270 270 return settings.get(settings_key, default)
271 271
272 272
273 273 class PathFilter(object):
274 274
275 275 # Expects and instance of BasePathPermissionChecker or None
276 276 def __init__(self, permission_checker):
277 277 self.permission_checker = permission_checker
278 278
279 279 def assert_path_permissions(self, path):
280 280 if path and self.permission_checker and not self.permission_checker.has_access(path):
281 281 raise HTTPForbidden()
282 282 return path
283 283
284 284 def filter_patchset(self, patchset):
285 285 if not self.permission_checker or not patchset:
286 286 return patchset, False
287 287 had_filtered = False
288 288 filtered_patchset = []
289 289 for patch in patchset:
290 290 filename = patch.get('filename', None)
291 291 if not filename or self.permission_checker.has_access(filename):
292 292 filtered_patchset.append(patch)
293 293 else:
294 294 had_filtered = True
295 295 if had_filtered:
296 296 if isinstance(patchset, diffs.LimitedDiffContainer):
297 297 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
298 298 return filtered_patchset, True
299 299 else:
300 300 return patchset, False
301 301
302 302 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
303 303 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
304 304 result = diffset.render_patchset(filtered_patchset, source_ref=source_ref, target_ref=target_ref)
305 305 result.has_hidden_changes = has_hidden_changes
306 306 return result
307 307
308 308 def get_raw_patch(self, diff_processor):
309 309 if self.permission_checker is None:
310 310 return diff_processor.as_raw()
311 311 elif self.permission_checker.has_full_access:
312 312 return diff_processor.as_raw()
313 313 else:
314 314 return '# Repository has user-specific filters, raw patch generation is disabled.'
315 315
316 316 @property
317 317 def is_enabled(self):
318 318 return self.permission_checker is not None
319 319
320 320
321 321 class RepoGroupAppView(BaseAppView):
322 322 def __init__(self, context, request):
323 323 super(RepoGroupAppView, self).__init__(context, request)
324 324 self.db_repo_group = request.db_repo_group
325 325 self.db_repo_group_name = self.db_repo_group.group_name
326 326
327 327 def _revoke_perms_on_yourself(self, form_result):
328 328 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
329 329 form_result['perm_updates'])
330 330 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
331 331 form_result['perm_additions'])
332 332 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
333 333 form_result['perm_deletions'])
334 334 admin_perm = 'group.admin'
335 335 if _updates and _updates[0][1] != admin_perm or \
336 336 _additions and _additions[0][1] != admin_perm or \
337 337 _deletions and _deletions[0][1] != admin_perm:
338 338 return True
339 339 return False
340 340
341 341
342 342 class UserGroupAppView(BaseAppView):
343 343 def __init__(self, context, request):
344 344 super(UserGroupAppView, self).__init__(context, request)
345 345 self.db_user_group = request.db_user_group
346 346 self.db_user_group_name = self.db_user_group.users_group_name
347 347
348 348
349 349 class UserAppView(BaseAppView):
350 350 def __init__(self, context, request):
351 351 super(UserAppView, self).__init__(context, request)
352 352 self.db_user = request.db_user
353 353 self.db_user_id = self.db_user.user_id
354 354
355 355 _ = self.request.translate
356 356 if not request.db_user_supports_default:
357 357 if self.db_user.username == User.DEFAULT_USER:
358 358 h.flash(_("Editing user `{}` is disabled.".format(
359 359 User.DEFAULT_USER)), category='warning')
360 360 raise HTTPFound(h.route_path('users'))
361 361
362 362
363 363 class DataGridAppView(object):
364 364 """
365 365 Common class to have re-usable grid rendering components
366 366 """
367 367
368 368 def _extract_ordering(self, request, column_map=None):
369 369 column_map = column_map or {}
370 370 column_index = safe_int(request.GET.get('order[0][column]'))
371 371 order_dir = request.GET.get(
372 372 'order[0][dir]', 'desc')
373 373 order_by = request.GET.get(
374 374 'columns[%s][data][sort]' % column_index, 'name_raw')
375 375
376 376 # translate datatable to DB columns
377 377 order_by = column_map.get(order_by) or order_by
378 378
379 379 search_q = request.GET.get('search[value]')
380 380 return search_q, order_by, order_dir
381 381
382 382 def _extract_chunk(self, request):
383 383 start = safe_int(request.GET.get('start'), 0)
384 384 length = safe_int(request.GET.get('length'), 25)
385 385 draw = safe_int(request.GET.get('draw'))
386 386 return draw, start, length
387 387
388 388 def _get_order_col(self, order_by, model):
389 389 if isinstance(order_by, basestring):
390 390 try:
391 391 return operator.attrgetter(order_by)(model)
392 392 except AttributeError:
393 393 return None
394 394 else:
395 395 return order_by
396 396
397 397
398 398 class BaseReferencesView(RepoAppView):
399 399 """
400 400 Base for reference view for branches, tags and bookmarks.
401 401 """
402 402 def load_default_context(self):
403 403 c = self._get_local_tmpl_context()
404 404
405 405
406 406 return c
407 407
408 408 def load_refs_context(self, ref_items, partials_template):
409 409 _render = self.request.get_partial_renderer(partials_template)
410 410 pre_load = ["author", "date", "message"]
411 411
412 412 is_svn = h.is_svn(self.rhodecode_vcs_repo)
413 413 is_hg = h.is_hg(self.rhodecode_vcs_repo)
414 414
415 415 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
416 416
417 417 closed_refs = {}
418 418 if is_hg:
419 419 closed_refs = self.rhodecode_vcs_repo.branches_closed
420 420
421 421 data = []
422 422 for ref_name, commit_id in ref_items:
423 423 commit = self.rhodecode_vcs_repo.get_commit(
424 424 commit_id=commit_id, pre_load=pre_load)
425 425 closed = ref_name in closed_refs
426 426
427 427 # TODO: johbo: Unify generation of reference links
428 428 use_commit_id = '/' in ref_name or is_svn
429 429
430 430 if use_commit_id:
431 431 files_url = h.route_path(
432 432 'repo_files',
433 433 repo_name=self.db_repo_name,
434 434 f_path=ref_name if is_svn else '',
435 435 commit_id=commit_id)
436 436
437 437 else:
438 438 files_url = h.route_path(
439 439 'repo_files',
440 440 repo_name=self.db_repo_name,
441 441 f_path=ref_name if is_svn else '',
442 442 commit_id=ref_name,
443 443 _query=dict(at=ref_name))
444 444
445 445 data.append({
446 446 "name": _render('name', ref_name, files_url, closed),
447 447 "name_raw": ref_name,
448 448 "date": _render('date', commit.date),
449 449 "date_raw": datetime_to_time(commit.date),
450 450 "author": _render('author', commit.author),
451 451 "commit": _render(
452 452 'commit', commit.message, commit.raw_id, commit.idx),
453 453 "commit_raw": commit.idx,
454 454 "compare": _render(
455 455 'compare', format_ref_id(ref_name, commit.raw_id)),
456 456 })
457 457
458 458 return data
459 459
460 460
461 461 class RepoRoutePredicate(object):
462 462 def __init__(self, val, config):
463 463 self.val = val
464 464
465 465 def text(self):
466 466 return 'repo_route = %s' % self.val
467 467
468 468 phash = text
469 469
470 470 def __call__(self, info, request):
471 471 if hasattr(request, 'vcs_call'):
472 472 # skip vcs calls
473 473 return
474 474
475 475 repo_name = info['match']['repo_name']
476 476 repo_model = repo.RepoModel()
477 477
478 478 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
479 479
480 480 def redirect_if_creating(route_info, db_repo):
481 481 skip_views = ['edit_repo_advanced_delete']
482 482 route = route_info['route']
483 483 # we should skip delete view so we can actually "remove" repositories
484 484 # if they get stuck in creating state.
485 485 if route.name in skip_views:
486 486 return
487 487
488 488 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
489 489 repo_creating_url = request.route_path(
490 490 'repo_creating', repo_name=db_repo.repo_name)
491 491 raise HTTPFound(repo_creating_url)
492 492
493 493 if by_name_match:
494 494 # register this as request object we can re-use later
495 495 request.db_repo = by_name_match
496 496 redirect_if_creating(info, by_name_match)
497 497 return True
498 498
499 499 by_id_match = repo_model.get_repo_by_id(repo_name)
500 500 if by_id_match:
501 501 request.db_repo = by_id_match
502 502 redirect_if_creating(info, by_id_match)
503 503 return True
504 504
505 505 return False
506 506
507 507
508 class RepoForbidArchivedRoutePredicate(object):
509 def __init__(self, val, config):
510 self.val = val
511
512 def text(self):
513 return 'repo_forbid_archived = %s' % self.val
514
515 phash = text
516
517 def __call__(self, info, request):
518 _ = request.translate
519 rhodecode_db_repo = request.db_repo
520
521 log.debug(
522 '%s checking if archived flag for repo for %s',
523 self.__class__.__name__, rhodecode_db_repo.repo_name)
524
525 if rhodecode_db_repo.archived:
526 log.warning('Current view is not supported for archived repo:%s',
527 rhodecode_db_repo.repo_name)
528
529 h.flash(
530 h.literal(_('Action not supported for archived repository.')),
531 category='warning')
532 summary_url = request.route_path(
533 'repo_summary', repo_name=rhodecode_db_repo.repo_name)
534 raise HTTPFound(summary_url)
535 return True
536
537
508 538 class RepoTypeRoutePredicate(object):
509 539 def __init__(self, val, config):
510 540 self.val = val or ['hg', 'git', 'svn']
511 541
512 542 def text(self):
513 543 return 'repo_accepted_type = %s' % self.val
514 544
515 545 phash = text
516 546
517 547 def __call__(self, info, request):
518 548 if hasattr(request, 'vcs_call'):
519 549 # skip vcs calls
520 550 return
521 551
522 552 rhodecode_db_repo = request.db_repo
523 553
524 554 log.debug(
525 555 '%s checking repo type for %s in %s',
526 556 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
527 557
528 558 if rhodecode_db_repo.repo_type in self.val:
529 559 return True
530 560 else:
531 561 log.warning('Current view is not supported for repo type:%s',
532 562 rhodecode_db_repo.repo_type)
533
534 # h.flash(h.literal(
535 # _('Action not supported for %s.' % rhodecode_repo.alias)),
536 # category='warning')
537 # return redirect(
538 # route_path('repo_summary', repo_name=cls.rhodecode_db_repo.repo_name))
539
540 563 return False
541 564
542 565
543 566 class RepoGroupRoutePredicate(object):
544 567 def __init__(self, val, config):
545 568 self.val = val
546 569
547 570 def text(self):
548 571 return 'repo_group_route = %s' % self.val
549 572
550 573 phash = text
551 574
552 575 def __call__(self, info, request):
553 576 if hasattr(request, 'vcs_call'):
554 577 # skip vcs calls
555 578 return
556 579
557 580 repo_group_name = info['match']['repo_group_name']
558 581 repo_group_model = repo_group.RepoGroupModel()
559 582 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
560 583
561 584 if by_name_match:
562 585 # register this as request object we can re-use later
563 586 request.db_repo_group = by_name_match
564 587 return True
565 588
566 589 return False
567 590
568 591
569 592 class UserGroupRoutePredicate(object):
570 593 def __init__(self, val, config):
571 594 self.val = val
572 595
573 596 def text(self):
574 597 return 'user_group_route = %s' % self.val
575 598
576 599 phash = text
577 600
578 601 def __call__(self, info, request):
579 602 if hasattr(request, 'vcs_call'):
580 603 # skip vcs calls
581 604 return
582 605
583 606 user_group_id = info['match']['user_group_id']
584 607 user_group_model = user_group.UserGroup()
585 608 by_id_match = user_group_model.get(user_group_id, cache=False)
586 609
587 610 if by_id_match:
588 611 # register this as request object we can re-use later
589 612 request.db_user_group = by_id_match
590 613 return True
591 614
592 615 return False
593 616
594 617
595 618 class UserRoutePredicateBase(object):
596 619 supports_default = None
597 620
598 621 def __init__(self, val, config):
599 622 self.val = val
600 623
601 624 def text(self):
602 625 raise NotImplementedError()
603 626
604 627 def __call__(self, info, request):
605 628 if hasattr(request, 'vcs_call'):
606 629 # skip vcs calls
607 630 return
608 631
609 632 user_id = info['match']['user_id']
610 633 user_model = user.User()
611 634 by_id_match = user_model.get(user_id, cache=False)
612 635
613 636 if by_id_match:
614 637 # register this as request object we can re-use later
615 638 request.db_user = by_id_match
616 639 request.db_user_supports_default = self.supports_default
617 640 return True
618 641
619 642 return False
620 643
621 644
622 645 class UserRoutePredicate(UserRoutePredicateBase):
623 646 supports_default = False
624 647
625 648 def text(self):
626 649 return 'user_route = %s' % self.val
627 650
628 651 phash = text
629 652
630 653
631 654 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
632 655 supports_default = True
633 656
634 657 def text(self):
635 658 return 'user_with_default_route = %s' % self.val
636 659
637 660 phash = text
638 661
639 662
640 663 def includeme(config):
641 664 config.add_route_predicate(
642 665 'repo_route', RepoRoutePredicate)
643 666 config.add_route_predicate(
644 667 'repo_accepted_types', RepoTypeRoutePredicate)
645 668 config.add_route_predicate(
669 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate)
670 config.add_route_predicate(
646 671 'repo_group_route', RepoGroupRoutePredicate)
647 672 config.add_route_predicate(
648 673 'user_group_route', UserGroupRoutePredicate)
649 674 config.add_route_predicate(
650 675 'user_route_with_default', UserRouteWithDefaultPredicate)
651 676 config.add_route_predicate(
652 'user_route', UserRoutePredicate) No newline at end of file
677 'user_route', UserRoutePredicate)
@@ -1,461 +1,462 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import re
22 22 import logging
23 23 import collections
24 24
25 25 from pyramid.view import view_config
26 26
27 27 from rhodecode.apps._base import BaseAppView
28 28 from rhodecode.lib import helpers as h
29 29 from rhodecode.lib.auth import (
30 30 LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator,
31 31 CSRFRequired)
32 32 from rhodecode.lib.index import searcher_from_config
33 33 from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int
34 34 from rhodecode.lib.ext_json import json
35 35 from rhodecode.model.db import (
36 func, or_, in_filter_generator, Repository, RepoGroup, User, UserGroup)
36 func, true, or_, in_filter_generator, Repository, RepoGroup, User, UserGroup)
37 37 from rhodecode.model.repo import RepoModel
38 38 from rhodecode.model.repo_group import RepoGroupModel
39 39 from rhodecode.model.scm import RepoGroupList, RepoList
40 40 from rhodecode.model.user import UserModel
41 41 from rhodecode.model.user_group import UserGroupModel
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 class HomeView(BaseAppView):
47 47
48 48 def load_default_context(self):
49 49 c = self._get_local_tmpl_context()
50 50 c.user = c.auth_user.get_instance()
51 51
52 52 return c
53 53
54 54 @LoginRequired()
55 55 @view_config(
56 56 route_name='user_autocomplete_data', request_method='GET',
57 57 renderer='json_ext', xhr=True)
58 58 def user_autocomplete_data(self):
59 59 self.load_default_context()
60 60 query = self.request.GET.get('query')
61 61 active = str2bool(self.request.GET.get('active') or True)
62 62 include_groups = str2bool(self.request.GET.get('user_groups'))
63 63 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
64 64 skip_default_user = str2bool(self.request.GET.get('skip_default_user'))
65 65
66 66 log.debug('generating user list, query:%s, active:%s, with_groups:%s',
67 67 query, active, include_groups)
68 68
69 69 _users = UserModel().get_users(
70 70 name_contains=query, only_active=active)
71 71
72 72 def maybe_skip_default_user(usr):
73 73 if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER:
74 74 return False
75 75 return True
76 76 _users = filter(maybe_skip_default_user, _users)
77 77
78 78 if include_groups:
79 79 # extend with user groups
80 80 _user_groups = UserGroupModel().get_user_groups(
81 81 name_contains=query, only_active=active,
82 82 expand_groups=expand_groups)
83 83 _users = _users + _user_groups
84 84
85 85 return {'suggestions': _users}
86 86
87 87 @LoginRequired()
88 88 @NotAnonymous()
89 89 @view_config(
90 90 route_name='user_group_autocomplete_data', request_method='GET',
91 91 renderer='json_ext', xhr=True)
92 92 def user_group_autocomplete_data(self):
93 93 self.load_default_context()
94 94 query = self.request.GET.get('query')
95 95 active = str2bool(self.request.GET.get('active') or True)
96 96 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
97 97
98 98 log.debug('generating user group list, query:%s, active:%s',
99 99 query, active)
100 100
101 101 _user_groups = UserGroupModel().get_user_groups(
102 102 name_contains=query, only_active=active,
103 103 expand_groups=expand_groups)
104 104 _user_groups = _user_groups
105 105
106 106 return {'suggestions': _user_groups}
107 107
108 108 def _get_repo_list(self, name_contains=None, repo_type=None, limit=20):
109 109 org_query = name_contains
110 110 allowed_ids = self._rhodecode_user.repo_acl_ids(
111 111 ['repository.read', 'repository.write', 'repository.admin'],
112 112 cache=False, name_filter=name_contains) or [-1]
113 113
114 114 query = Repository.query()\
115 115 .order_by(func.length(Repository.repo_name))\
116 116 .order_by(Repository.repo_name)\
117 .filter(Repository.archived.isnot(true()))\
117 118 .filter(or_(
118 119 # generate multiple IN to fix limitation problems
119 120 *in_filter_generator(Repository.repo_id, allowed_ids)
120 121 ))
121 122
122 123 if repo_type:
123 124 query = query.filter(Repository.repo_type == repo_type)
124 125
125 126 if name_contains:
126 127 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
127 128 query = query.filter(
128 129 Repository.repo_name.ilike(ilike_expression))
129 130 query = query.limit(limit)
130 131
131 132 acl_iter = query
132 133
133 134 return [
134 135 {
135 136 'id': obj.repo_name,
136 137 'value': org_query,
137 138 'value_display': obj.repo_name,
138 139 'text': obj.repo_name,
139 140 'type': 'repo',
140 141 'repo_id': obj.repo_id,
141 142 'repo_type': obj.repo_type,
142 143 'private': obj.private,
143 144 'url': h.route_path('repo_summary', repo_name=obj.repo_name)
144 145 }
145 146 for obj in acl_iter]
146 147
147 148 def _get_repo_group_list(self, name_contains=None, limit=20):
148 149 org_query = name_contains
149 150 allowed_ids = self._rhodecode_user.repo_group_acl_ids(
150 151 ['group.read', 'group.write', 'group.admin'],
151 152 cache=False, name_filter=name_contains) or [-1]
152 153
153 154 query = RepoGroup.query()\
154 155 .order_by(func.length(RepoGroup.group_name))\
155 156 .order_by(RepoGroup.group_name) \
156 157 .filter(or_(
157 158 # generate multiple IN to fix limitation problems
158 159 *in_filter_generator(RepoGroup.group_id, allowed_ids)
159 160 ))
160 161
161 162 if name_contains:
162 163 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
163 164 query = query.filter(
164 165 RepoGroup.group_name.ilike(ilike_expression))
165 166 query = query.limit(limit)
166 167
167 168 acl_iter = query
168 169
169 170 return [
170 171 {
171 172 'id': obj.group_name,
172 173 'value': org_query,
173 174 'value_display': obj.group_name,
174 175 'type': 'repo_group',
175 176 'url': h.route_path(
176 177 'repo_group_home', repo_group_name=obj.group_name)
177 178 }
178 179 for obj in acl_iter]
179 180
180 181 def _get_user_list(self, name_contains=None, limit=20):
181 182 org_query = name_contains
182 183 if not name_contains:
183 184 return []
184 185
185 186 name_contains = re.compile('(?:user:)(.+)').findall(name_contains)
186 187 if len(name_contains) != 1:
187 188 return []
188 189 name_contains = name_contains[0]
189 190
190 191 query = User.query()\
191 192 .order_by(func.length(User.username))\
192 193 .order_by(User.username) \
193 194 .filter(User.username != User.DEFAULT_USER)
194 195
195 196 if name_contains:
196 197 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
197 198 query = query.filter(
198 199 User.username.ilike(ilike_expression))
199 200 query = query.limit(limit)
200 201
201 202 acl_iter = query
202 203
203 204 return [
204 205 {
205 206 'id': obj.user_id,
206 207 'value': org_query,
207 208 'value_display': obj.username,
208 209 'type': 'user',
209 210 'icon_link': h.gravatar_url(obj.email, 30),
210 211 'url': h.route_path(
211 212 'user_profile', username=obj.username)
212 213 }
213 214 for obj in acl_iter]
214 215
215 216 def _get_user_groups_list(self, name_contains=None, limit=20):
216 217 org_query = name_contains
217 218 if not name_contains:
218 219 return []
219 220
220 221 name_contains = re.compile('(?:user_group:)(.+)').findall(name_contains)
221 222 if len(name_contains) != 1:
222 223 return []
223 224 name_contains = name_contains[0]
224 225
225 226 query = UserGroup.query()\
226 227 .order_by(func.length(UserGroup.users_group_name))\
227 228 .order_by(UserGroup.users_group_name)
228 229
229 230 if name_contains:
230 231 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
231 232 query = query.filter(
232 233 UserGroup.users_group_name.ilike(ilike_expression))
233 234 query = query.limit(limit)
234 235
235 236 acl_iter = query
236 237
237 238 return [
238 239 {
239 240 'id': obj.users_group_id,
240 241 'value': org_query,
241 242 'value_display': obj.users_group_name,
242 243 'type': 'user_group',
243 244 'url': h.route_path(
244 245 'user_group_profile', user_group_name=obj.users_group_name)
245 246 }
246 247 for obj in acl_iter]
247 248
248 249 def _get_hash_commit_list(self, auth_user, query):
249 250 org_query = query
250 251 if not query or len(query) < 3:
251 252 return []
252 253
253 254 commit_hashes = re.compile('(?:commit:)([0-9a-f]{2,40})').findall(query)
254 255
255 256 if len(commit_hashes) != 1:
256 257 return []
257 258 commit_hash = commit_hashes[0]
258 259
259 260 searcher = searcher_from_config(self.request.registry.settings)
260 261 result = searcher.search(
261 262 'commit_id:%s*' % commit_hash, 'commit', auth_user,
262 263 raise_on_exc=False)
263 264
264 265 return [
265 266 {
266 267 'id': entry['commit_id'],
267 268 'value': org_query,
268 269 'value_display': 'repo `{}` commit: {}'.format(
269 270 entry['repository'], entry['commit_id']),
270 271 'type': 'commit',
271 272 'repo': entry['repository'],
272 273 'url': h.route_path(
273 274 'repo_commit',
274 275 repo_name=entry['repository'], commit_id=entry['commit_id'])
275 276 }
276 277 for entry in result['results']]
277 278
278 279 @LoginRequired()
279 280 @view_config(
280 281 route_name='repo_list_data', request_method='GET',
281 282 renderer='json_ext', xhr=True)
282 283 def repo_list_data(self):
283 284 _ = self.request.translate
284 285 self.load_default_context()
285 286
286 287 query = self.request.GET.get('query')
287 288 repo_type = self.request.GET.get('repo_type')
288 289 log.debug('generating repo list, query:%s, repo_type:%s',
289 290 query, repo_type)
290 291
291 292 res = []
292 293 repos = self._get_repo_list(query, repo_type=repo_type)
293 294 if repos:
294 295 res.append({
295 296 'text': _('Repositories'),
296 297 'children': repos
297 298 })
298 299
299 300 data = {
300 301 'more': False,
301 302 'results': res
302 303 }
303 304 return data
304 305
305 306 @LoginRequired()
306 307 @view_config(
307 308 route_name='goto_switcher_data', request_method='GET',
308 309 renderer='json_ext', xhr=True)
309 310 def goto_switcher_data(self):
310 311 c = self.load_default_context()
311 312
312 313 _ = self.request.translate
313 314
314 315 query = self.request.GET.get('query')
315 316 log.debug('generating main filter data, query %s', query)
316 317
317 318 default_search_val = u'Full text search for: `{}`'.format(query)
318 319 res = []
319 320 if not query:
320 321 return {'suggestions': res}
321 322
322 323 res.append({
323 324 'id': -1,
324 325 'value': query,
325 326 'value_display': default_search_val,
326 327 'type': 'search',
327 328 'url': h.route_path(
328 329 'search', _query={'q': query})
329 330 })
330 331 repo_group_id = safe_int(self.request.GET.get('repo_group_id'))
331 332 if repo_group_id:
332 333 repo_group = RepoGroup.get(repo_group_id)
333 334 composed_hint = '{}/{}'.format(repo_group.group_name, query)
334 335 show_hint = not query.startswith(repo_group.group_name)
335 336 if repo_group and show_hint:
336 337 hint = u'Group search: `{}`'.format(composed_hint)
337 338 res.append({
338 339 'id': -1,
339 340 'value': composed_hint,
340 341 'value_display': hint,
341 342 'type': 'hint',
342 343 'url': ""
343 344 })
344 345
345 346 repo_groups = self._get_repo_group_list(query)
346 347 for serialized_repo_group in repo_groups:
347 348 res.append(serialized_repo_group)
348 349
349 350 repos = self._get_repo_list(query)
350 351 for serialized_repo in repos:
351 352 res.append(serialized_repo)
352 353
353 354 # TODO(marcink): permissions for that ?
354 355 allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER
355 356 if allowed_user_search:
356 357 users = self._get_user_list(query)
357 358 for serialized_user in users:
358 359 res.append(serialized_user)
359 360
360 361 user_groups = self._get_user_groups_list(query)
361 362 for serialized_user_group in user_groups:
362 363 res.append(serialized_user_group)
363 364
364 365 commits = self._get_hash_commit_list(c.auth_user, query)
365 366 if commits:
366 367 unique_repos = collections.OrderedDict()
367 368 for commit in commits:
368 369 repo_name = commit['repo']
369 370 unique_repos.setdefault(repo_name, []).append(commit)
370 371
371 372 for repo, commits in unique_repos.items():
372 373 for commit in commits:
373 374 res.append(commit)
374 375
375 376 return {'suggestions': res}
376 377
377 378 def _get_groups_and_repos(self, repo_group_id=None):
378 379 # repo groups groups
379 380 repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id)
380 381 _perms = ['group.read', 'group.write', 'group.admin']
381 382 repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms)
382 383 repo_group_data = RepoGroupModel().get_repo_groups_as_dict(
383 384 repo_group_list=repo_group_list_acl, admin=False)
384 385
385 386 # repositories
386 387 repo_list = Repository.get_all_repos(group_id=repo_group_id)
387 388 _perms = ['repository.read', 'repository.write', 'repository.admin']
388 389 repo_list_acl = RepoList(repo_list, perm_set=_perms)
389 390 repo_data = RepoModel().get_repos_as_dict(
390 391 repo_list=repo_list_acl, admin=False)
391 392
392 393 return repo_data, repo_group_data
393 394
394 395 @LoginRequired()
395 396 @view_config(
396 397 route_name='home', request_method='GET',
397 398 renderer='rhodecode:templates/index.mako')
398 399 def main_page(self):
399 400 c = self.load_default_context()
400 401 c.repo_group = None
401 402
402 403 repo_data, repo_group_data = self._get_groups_and_repos()
403 404 # json used to render the grids
404 405 c.repos_data = json.dumps(repo_data)
405 406 c.repo_groups_data = json.dumps(repo_group_data)
406 407
407 408 return self._get_template_context(c)
408 409
409 410 @LoginRequired()
410 411 @HasRepoGroupPermissionAnyDecorator(
411 412 'group.read', 'group.write', 'group.admin')
412 413 @view_config(
413 414 route_name='repo_group_home', request_method='GET',
414 415 renderer='rhodecode:templates/index_repo_group.mako')
415 416 @view_config(
416 417 route_name='repo_group_home_slash', request_method='GET',
417 418 renderer='rhodecode:templates/index_repo_group.mako')
418 419 def repo_group_main_page(self):
419 420 c = self.load_default_context()
420 421 c.repo_group = self.request.db_repo_group
421 422 repo_data, repo_group_data = self._get_groups_and_repos(
422 423 c.repo_group.group_id)
423 424
424 425 # json used to render the grids
425 426 c.repos_data = json.dumps(repo_data)
426 427 c.repo_groups_data = json.dumps(repo_group_data)
427 428
428 429 return self._get_template_context(c)
429 430
430 431 @LoginRequired()
431 432 @CSRFRequired()
432 433 @view_config(
433 434 route_name='markup_preview', request_method='POST',
434 435 renderer='string', xhr=True)
435 436 def markup_preview(self):
436 437 # Technically a CSRF token is not needed as no state changes with this
437 438 # call. However, as this is a POST is better to have it, so automated
438 439 # tools don't flag it as potential CSRF.
439 440 # Post is required because the payload could be bigger than the maximum
440 441 # allowed by GET.
441 442
442 443 text = self.request.POST.get('text')
443 444 renderer = self.request.POST.get('renderer') or 'rst'
444 445 if text:
445 446 return h.render(text, renderer=renderer, mentions=True)
446 447 return ''
447 448
448 449 @LoginRequired()
449 450 @CSRFRequired()
450 451 @view_config(
451 452 route_name='store_user_session_value', request_method='POST',
452 453 renderer='string', xhr=True)
453 454 def store_user_session_attr(self):
454 455 key = self.request.POST.get('key')
455 456 val = self.request.POST.get('val')
456 457
457 458 existing_value = self.request.session.get(key)
458 459 if existing_value != val:
459 460 self.request.session[key] = val
460 461
461 462 return 'stored:{}'.format(key)
@@ -1,476 +1,483 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 from rhodecode.apps._base import add_route_with_slash
21 21
22 22
23 23 def includeme(config):
24 24
25 25 # repo creating checks, special cases that aren't repo routes
26 26 config.add_route(
27 27 name='repo_creating',
28 28 pattern='/{repo_name:.*?[^/]}/repo_creating')
29 29
30 30 config.add_route(
31 31 name='repo_creating_check',
32 32 pattern='/{repo_name:.*?[^/]}/repo_creating_check')
33 33
34 34 # Summary
35 35 # NOTE(marcink): one additional route is defined in very bottom, catch
36 36 # all pattern
37 37 config.add_route(
38 38 name='repo_summary_explicit',
39 39 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
40 40 config.add_route(
41 41 name='repo_summary_commits',
42 42 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
43 43
44 44 # Commits
45 45 config.add_route(
46 46 name='repo_commit',
47 47 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True)
48 48
49 49 config.add_route(
50 50 name='repo_commit_children',
51 51 pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True)
52 52
53 53 config.add_route(
54 54 name='repo_commit_parents',
55 55 pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True)
56 56
57 57 config.add_route(
58 58 name='repo_commit_raw',
59 59 pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True)
60 60
61 61 config.add_route(
62 62 name='repo_commit_patch',
63 63 pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True)
64 64
65 65 config.add_route(
66 66 name='repo_commit_download',
67 67 pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True)
68 68
69 69 config.add_route(
70 70 name='repo_commit_data',
71 71 pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True)
72 72
73 73 config.add_route(
74 74 name='repo_commit_comment_create',
75 75 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True)
76 76
77 77 config.add_route(
78 78 name='repo_commit_comment_preview',
79 79 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True)
80 80
81 81 config.add_route(
82 82 name='repo_commit_comment_delete',
83 83 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True)
84 84
85 85 # still working url for backward compat.
86 86 config.add_route(
87 87 name='repo_commit_raw_deprecated',
88 88 pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True)
89 89
90 90 # Files
91 91 config.add_route(
92 92 name='repo_archivefile',
93 93 pattern='/{repo_name:.*?[^/]}/archive/{fname}', repo_route=True)
94 94
95 95 config.add_route(
96 96 name='repo_files_diff',
97 97 pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True)
98 98 config.add_route( # legacy route to make old links work
99 99 name='repo_files_diff_2way_redirect',
100 100 pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True)
101 101
102 102 config.add_route(
103 103 name='repo_files',
104 104 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True)
105 105 config.add_route(
106 106 name='repo_files:default_path',
107 107 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True)
108 108 config.add_route(
109 109 name='repo_files:default_commit',
110 110 pattern='/{repo_name:.*?[^/]}/files', repo_route=True)
111 111
112 112 config.add_route(
113 113 name='repo_files:rendered',
114 114 pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True)
115 115
116 116 config.add_route(
117 117 name='repo_files:annotated',
118 118 pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True)
119 119 config.add_route(
120 120 name='repo_files:annotated_previous',
121 121 pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True)
122 122
123 123 config.add_route(
124 124 name='repo_nodetree_full',
125 125 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True)
126 126 config.add_route(
127 127 name='repo_nodetree_full:default_path',
128 128 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True)
129 129
130 130 config.add_route(
131 131 name='repo_files_nodelist',
132 132 pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True)
133 133
134 134 config.add_route(
135 135 name='repo_file_raw',
136 136 pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True)
137 137
138 138 config.add_route(
139 139 name='repo_file_download',
140 140 pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True)
141 141 config.add_route( # backward compat to keep old links working
142 142 name='repo_file_download:legacy',
143 143 pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}',
144 144 repo_route=True)
145 145
146 146 config.add_route(
147 147 name='repo_file_history',
148 148 pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True)
149 149
150 150 config.add_route(
151 151 name='repo_file_authors',
152 152 pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True)
153 153
154 154 config.add_route(
155 155 name='repo_files_remove_file',
156 156 pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}',
157 157 repo_route=True)
158 158 config.add_route(
159 159 name='repo_files_delete_file',
160 160 pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}',
161 161 repo_route=True)
162 162 config.add_route(
163 163 name='repo_files_edit_file',
164 164 pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}',
165 165 repo_route=True)
166 166 config.add_route(
167 167 name='repo_files_update_file',
168 168 pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}',
169 169 repo_route=True)
170 170 config.add_route(
171 171 name='repo_files_add_file',
172 172 pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}',
173 173 repo_route=True)
174 174 config.add_route(
175 175 name='repo_files_create_file',
176 176 pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}',
177 177 repo_route=True)
178 178
179 179 # Refs data
180 180 config.add_route(
181 181 name='repo_refs_data',
182 182 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
183 183
184 184 config.add_route(
185 185 name='repo_refs_changelog_data',
186 186 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
187 187
188 188 config.add_route(
189 189 name='repo_stats',
190 190 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
191 191
192 192 # Changelog
193 193 config.add_route(
194 194 name='repo_changelog',
195 195 pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True)
196 196 config.add_route(
197 197 name='repo_changelog_file',
198 198 pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True)
199 199 config.add_route(
200 200 name='repo_changelog_elements',
201 201 pattern='/{repo_name:.*?[^/]}/changelog_elements', repo_route=True)
202 202 config.add_route(
203 203 name='repo_changelog_elements_file',
204 204 pattern='/{repo_name:.*?[^/]}/changelog_elements/{commit_id}/{f_path:.*}', repo_route=True)
205 205
206 206 # Compare
207 207 config.add_route(
208 208 name='repo_compare_select',
209 209 pattern='/{repo_name:.*?[^/]}/compare', repo_route=True)
210 210
211 211 config.add_route(
212 212 name='repo_compare',
213 213 pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True)
214 214
215 215 # Tags
216 216 config.add_route(
217 217 name='tags_home',
218 218 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
219 219
220 220 # Branches
221 221 config.add_route(
222 222 name='branches_home',
223 223 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
224 224
225 225 # Bookmarks
226 226 config.add_route(
227 227 name='bookmarks_home',
228 228 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
229 229
230 230 # Forks
231 231 config.add_route(
232 232 name='repo_fork_new',
233 233 pattern='/{repo_name:.*?[^/]}/fork', repo_route=True,
234 repo_forbid_when_archived=True,
234 235 repo_accepted_types=['hg', 'git'])
235 236
236 237 config.add_route(
237 238 name='repo_fork_create',
238 239 pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True,
240 repo_forbid_when_archived=True,
239 241 repo_accepted_types=['hg', 'git'])
240 242
241 243 config.add_route(
242 244 name='repo_forks_show_all',
243 245 pattern='/{repo_name:.*?[^/]}/forks', repo_route=True,
244 246 repo_accepted_types=['hg', 'git'])
245 247 config.add_route(
246 248 name='repo_forks_data',
247 249 pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True,
248 250 repo_accepted_types=['hg', 'git'])
249 251
250 252 # Pull Requests
251 253 config.add_route(
252 254 name='pullrequest_show',
253 255 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}',
254 256 repo_route=True)
255 257
256 258 config.add_route(
257 259 name='pullrequest_show_all',
258 260 pattern='/{repo_name:.*?[^/]}/pull-request',
259 261 repo_route=True, repo_accepted_types=['hg', 'git'])
260 262
261 263 config.add_route(
262 264 name='pullrequest_show_all_data',
263 265 pattern='/{repo_name:.*?[^/]}/pull-request-data',
264 266 repo_route=True, repo_accepted_types=['hg', 'git'])
265 267
266 268 config.add_route(
267 269 name='pullrequest_repo_refs',
268 270 pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}',
269 271 repo_route=True)
270 272
271 273 config.add_route(
272 274 name='pullrequest_repo_destinations',
273 275 pattern='/{repo_name:.*?[^/]}/pull-request/repo-destinations',
274 276 repo_route=True)
275 277
276 278 config.add_route(
277 279 name='pullrequest_new',
278 280 pattern='/{repo_name:.*?[^/]}/pull-request/new',
279 repo_route=True, repo_accepted_types=['hg', 'git'])
281 repo_route=True, repo_accepted_types=['hg', 'git'],
282 repo_forbid_when_archived=True)
280 283
281 284 config.add_route(
282 285 name='pullrequest_create',
283 286 pattern='/{repo_name:.*?[^/]}/pull-request/create',
284 repo_route=True, repo_accepted_types=['hg', 'git'])
287 repo_route=True, repo_accepted_types=['hg', 'git'],
288 repo_forbid_when_archived=True)
285 289
286 290 config.add_route(
287 291 name='pullrequest_update',
288 292 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update',
289 repo_route=True)
293 repo_route=True, repo_forbid_when_archived=True)
290 294
291 295 config.add_route(
292 296 name='pullrequest_merge',
293 297 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge',
294 repo_route=True)
298 repo_route=True, repo_forbid_when_archived=True)
295 299
296 300 config.add_route(
297 301 name='pullrequest_delete',
298 302 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete',
299 repo_route=True)
303 repo_route=True, repo_forbid_when_archived=True)
300 304
301 305 config.add_route(
302 306 name='pullrequest_comment_create',
303 307 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment',
304 308 repo_route=True)
305 309
306 310 config.add_route(
307 311 name='pullrequest_comment_delete',
308 312 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete',
309 313 repo_route=True, repo_accepted_types=['hg', 'git'])
310 314
311 315 # Settings
312 316 config.add_route(
313 317 name='edit_repo',
314 318 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
315 319 # update is POST on edit_repo
316 320
317 321 # Settings advanced
318 322 config.add_route(
319 323 name='edit_repo_advanced',
320 324 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
321 325 config.add_route(
326 name='edit_repo_advanced_archive',
327 pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True)
328 config.add_route(
322 329 name='edit_repo_advanced_delete',
323 330 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
324 331 config.add_route(
325 332 name='edit_repo_advanced_locking',
326 333 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
327 334 config.add_route(
328 335 name='edit_repo_advanced_journal',
329 336 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
330 337 config.add_route(
331 338 name='edit_repo_advanced_fork',
332 339 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
333 340
334 341 config.add_route(
335 342 name='edit_repo_advanced_hooks',
336 343 pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True)
337 344
338 345 # Caches
339 346 config.add_route(
340 347 name='edit_repo_caches',
341 348 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
342 349
343 350 # Permissions
344 351 config.add_route(
345 352 name='edit_repo_perms',
346 353 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
347 354
348 355 # Permissions Branch (EE feature)
349 356 config.add_route(
350 357 name='edit_repo_perms_branch',
351 358 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True)
352 359 config.add_route(
353 360 name='edit_repo_perms_branch_delete',
354 361 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete',
355 362 repo_route=True)
356 363
357 364 # Maintenance
358 365 config.add_route(
359 366 name='edit_repo_maintenance',
360 367 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
361 368
362 369 config.add_route(
363 370 name='edit_repo_maintenance_execute',
364 371 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
365 372
366 373 # Fields
367 374 config.add_route(
368 375 name='edit_repo_fields',
369 376 pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True)
370 377 config.add_route(
371 378 name='edit_repo_fields_create',
372 379 pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True)
373 380 config.add_route(
374 381 name='edit_repo_fields_delete',
375 382 pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True)
376 383
377 384 # Locking
378 385 config.add_route(
379 386 name='repo_edit_toggle_locking',
380 387 pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True)
381 388
382 389 # Remote
383 390 config.add_route(
384 391 name='edit_repo_remote',
385 392 pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True)
386 393 config.add_route(
387 394 name='edit_repo_remote_pull',
388 395 pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True)
389 396 config.add_route(
390 397 name='edit_repo_remote_push',
391 398 pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True)
392 399
393 400 # Statistics
394 401 config.add_route(
395 402 name='edit_repo_statistics',
396 403 pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True)
397 404 config.add_route(
398 405 name='edit_repo_statistics_reset',
399 406 pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True)
400 407
401 408 # Issue trackers
402 409 config.add_route(
403 410 name='edit_repo_issuetracker',
404 411 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True)
405 412 config.add_route(
406 413 name='edit_repo_issuetracker_test',
407 414 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True)
408 415 config.add_route(
409 416 name='edit_repo_issuetracker_delete',
410 417 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True)
411 418 config.add_route(
412 419 name='edit_repo_issuetracker_update',
413 420 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True)
414 421
415 422 # VCS Settings
416 423 config.add_route(
417 424 name='edit_repo_vcs',
418 425 pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True)
419 426 config.add_route(
420 427 name='edit_repo_vcs_update',
421 428 pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True)
422 429
423 430 # svn pattern
424 431 config.add_route(
425 432 name='edit_repo_vcs_svn_pattern_delete',
426 433 pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True)
427 434
428 435 # Repo Review Rules (EE feature)
429 436 config.add_route(
430 437 name='repo_reviewers',
431 438 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
432 439
433 440 config.add_route(
434 441 name='repo_default_reviewers_data',
435 442 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
436 443
437 444 # Repo Automation (EE feature)
438 445 config.add_route(
439 446 name='repo_automation',
440 447 pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True)
441 448
442 449 # Strip
443 450 config.add_route(
444 451 name='edit_repo_strip',
445 452 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
446 453
447 454 config.add_route(
448 455 name='strip_check',
449 456 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
450 457
451 458 config.add_route(
452 459 name='strip_execute',
453 460 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
454 461
455 462 # Audit logs
456 463 config.add_route(
457 464 name='edit_repo_audit_logs',
458 465 pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True)
459 466
460 467 # ATOM/RSS Feed
461 468 config.add_route(
462 469 name='rss_feed_home',
463 470 pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True)
464 471
465 472 config.add_route(
466 473 name='atom_feed_home',
467 474 pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True)
468 475
469 476 # NOTE(marcink): needs to be at the end for catch-all
470 477 add_route_with_slash(
471 478 config,
472 479 name='repo_summary',
473 480 pattern='/{repo_name:.*?[^/]}', repo_route=True)
474 481
475 482 # Scan module for configuration decorators.
476 483 config.scan('.views', ignore='.tests')
@@ -1,315 +1,336 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.tests import TestController, assert_session_flash, HG_FORK, GIT_FORK
24 24
25 25 from rhodecode.tests.fixture import Fixture
26 26 from rhodecode.lib import helpers as h
27 27
28 28 from rhodecode.model.db import Repository
29 29 from rhodecode.model.repo import RepoModel
30 30 from rhodecode.model.user import UserModel
31 31 from rhodecode.model.meta import Session
32 32
33 33 fixture = Fixture()
34 34
35 35
36 36 def route_path(name, params=None, **kwargs):
37 37 import urllib
38 38
39 39 base_url = {
40 40 'repo_summary': '/{repo_name}',
41 41 'repo_creating_check': '/{repo_name}/repo_creating_check',
42 42 'repo_fork_new': '/{repo_name}/fork',
43 43 'repo_fork_create': '/{repo_name}/fork/create',
44 44 'repo_forks_show_all': '/{repo_name}/forks',
45 45 'repo_forks_data': '/{repo_name}/forks/data',
46 46 }[name].format(**kwargs)
47 47
48 48 if params:
49 49 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
50 50 return base_url
51 51
52 52
53 53 FORK_NAME = {
54 54 'hg': HG_FORK,
55 55 'git': GIT_FORK
56 56 }
57 57
58 58
59 59 @pytest.mark.skip_backends('svn')
60 60 class TestRepoForkViewTests(TestController):
61 61
62 62 def test_show_forks(self, backend, xhr_header):
63 63 self.log_user()
64 64 response = self.app.get(
65 65 route_path('repo_forks_data', repo_name=backend.repo_name),
66 66 extra_environ=xhr_header)
67 67
68 68 assert response.json == {u'data': [], u'draw': None,
69 69 u'recordsFiltered': 0, u'recordsTotal': 0}
70 70
71 71 def test_no_permissions_to_fork_page(self, backend, user_util):
72 72 user = user_util.create_user(password='qweqwe')
73 73 user_id = user.user_id
74 74 self.log_user(user.username, 'qweqwe')
75 75
76 76 user_model = UserModel()
77 77 user_model.revoke_perm(user_id, 'hg.fork.repository')
78 78 user_model.grant_perm(user_id, 'hg.fork.none')
79 79 u = UserModel().get(user_id)
80 80 u.inherit_default_permissions = False
81 81 Session().commit()
82 82 # try create a fork
83 83 self.app.get(
84 84 route_path('repo_fork_new', repo_name=backend.repo_name),
85 85 status=404)
86 86
87 87 def test_no_permissions_to_fork_submit(self, backend, csrf_token, user_util):
88 88 user = user_util.create_user(password='qweqwe')
89 89 user_id = user.user_id
90 90 self.log_user(user.username, 'qweqwe')
91 91
92 92 user_model = UserModel()
93 93 user_model.revoke_perm(user_id, 'hg.fork.repository')
94 94 user_model.grant_perm(user_id, 'hg.fork.none')
95 95 u = UserModel().get(user_id)
96 96 u.inherit_default_permissions = False
97 97 Session().commit()
98 98 # try create a fork
99 99 self.app.post(
100 100 route_path('repo_fork_create', repo_name=backend.repo_name),
101 101 {'csrf_token': csrf_token},
102 102 status=404)
103 103
104 104 def test_fork_missing_data(self, autologin_user, backend, csrf_token):
105 105 # try create a fork
106 106 response = self.app.post(
107 107 route_path('repo_fork_create', repo_name=backend.repo_name),
108 108 {'csrf_token': csrf_token},
109 109 status=200)
110 110 # test if html fill works fine
111 111 response.mustcontain('Missing value')
112 112
113 113 def test_create_fork_page(self, autologin_user, backend):
114 114 self.app.get(
115 115 route_path('repo_fork_new', repo_name=backend.repo_name),
116 116 status=200)
117 117
118 118 def test_create_and_show_fork(
119 119 self, autologin_user, backend, csrf_token, xhr_header):
120 120
121 121 # create a fork
122 122 fork_name = FORK_NAME[backend.alias]
123 123 description = 'fork of vcs test'
124 124 repo_name = backend.repo_name
125 125 source_repo = Repository.get_by_repo_name(repo_name)
126 126 creation_args = {
127 127 'repo_name': fork_name,
128 128 'repo_group': '',
129 129 'fork_parent_id': source_repo.repo_id,
130 130 'repo_type': backend.alias,
131 131 'description': description,
132 132 'private': 'False',
133 133 'landing_rev': 'rev:tip',
134 134 'csrf_token': csrf_token,
135 135 }
136 136
137 137 self.app.post(
138 138 route_path('repo_fork_create', repo_name=repo_name), creation_args)
139 139
140 140 response = self.app.get(
141 141 route_path('repo_forks_data', repo_name=repo_name),
142 142 extra_environ=xhr_header)
143 143
144 144 assert response.json['data'][0]['fork_name'] == \
145 145 """<a href="/%s">%s</a>""" % (fork_name, fork_name)
146 146
147 147 # remove this fork
148 148 fixture.destroy_repo(fork_name)
149 149
150 150 def test_fork_create(self, autologin_user, backend, csrf_token):
151 151 fork_name = FORK_NAME[backend.alias]
152 152 description = 'fork of vcs test'
153 153 repo_name = backend.repo_name
154 154 source_repo = Repository.get_by_repo_name(repo_name)
155 155 creation_args = {
156 156 'repo_name': fork_name,
157 157 'repo_group': '',
158 158 'fork_parent_id': source_repo.repo_id,
159 159 'repo_type': backend.alias,
160 160 'description': description,
161 161 'private': 'False',
162 162 'landing_rev': 'rev:tip',
163 163 'csrf_token': csrf_token,
164 164 }
165 165 self.app.post(
166 166 route_path('repo_fork_create', repo_name=repo_name), creation_args)
167 167 repo = Repository.get_by_repo_name(FORK_NAME[backend.alias])
168 168 assert repo.fork.repo_name == backend.repo_name
169 169
170 170 # run the check page that triggers the flash message
171 171 response = self.app.get(
172 172 route_path('repo_creating_check', repo_name=fork_name))
173 173 # test if we have a message that fork is ok
174 174 assert_session_flash(response,
175 175 'Forked repository %s as <a href="/%s">%s</a>'
176 176 % (repo_name, fork_name, fork_name))
177 177
178 178 # test if the fork was created in the database
179 179 fork_repo = Session().query(Repository)\
180 180 .filter(Repository.repo_name == fork_name).one()
181 181
182 182 assert fork_repo.repo_name == fork_name
183 183 assert fork_repo.fork.repo_name == repo_name
184 184
185 185 # test if the repository is visible in the list ?
186 186 response = self.app.get(
187 187 h.route_path('repo_summary', repo_name=fork_name))
188 188 response.mustcontain(fork_name)
189 189 response.mustcontain(backend.alias)
190 190 response.mustcontain('Fork of')
191 191 response.mustcontain('<a href="/%s">%s</a>' % (repo_name, repo_name))
192 192
193 193 def test_fork_create_into_group(self, autologin_user, backend, csrf_token):
194 194 group = fixture.create_repo_group('vc')
195 195 group_id = group.group_id
196 196 fork_name = FORK_NAME[backend.alias]
197 197 fork_name_full = 'vc/%s' % fork_name
198 198 description = 'fork of vcs test'
199 199 repo_name = backend.repo_name
200 200 source_repo = Repository.get_by_repo_name(repo_name)
201 201 creation_args = {
202 202 'repo_name': fork_name,
203 203 'repo_group': group_id,
204 204 'fork_parent_id': source_repo.repo_id,
205 205 'repo_type': backend.alias,
206 206 'description': description,
207 207 'private': 'False',
208 208 'landing_rev': 'rev:tip',
209 209 'csrf_token': csrf_token,
210 210 }
211 211 self.app.post(
212 212 route_path('repo_fork_create', repo_name=repo_name), creation_args)
213 213 repo = Repository.get_by_repo_name(fork_name_full)
214 214 assert repo.fork.repo_name == backend.repo_name
215 215
216 216 # run the check page that triggers the flash message
217 217 response = self.app.get(
218 218 route_path('repo_creating_check', repo_name=fork_name_full))
219 219 # test if we have a message that fork is ok
220 220 assert_session_flash(response,
221 221 'Forked repository %s as <a href="/%s">%s</a>'
222 222 % (repo_name, fork_name_full, fork_name_full))
223 223
224 224 # test if the fork was created in the database
225 225 fork_repo = Session().query(Repository)\
226 226 .filter(Repository.repo_name == fork_name_full).one()
227 227
228 228 assert fork_repo.repo_name == fork_name_full
229 229 assert fork_repo.fork.repo_name == repo_name
230 230
231 231 # test if the repository is visible in the list ?
232 232 response = self.app.get(
233 233 h.route_path('repo_summary', repo_name=fork_name_full))
234 234 response.mustcontain(fork_name_full)
235 235 response.mustcontain(backend.alias)
236 236
237 237 response.mustcontain('Fork of')
238 238 response.mustcontain('<a href="/%s">%s</a>' % (repo_name, repo_name))
239 239
240 240 fixture.destroy_repo(fork_name_full)
241 241 fixture.destroy_repo_group(group_id)
242 242
243 243 def test_fork_read_permission(self, backend, xhr_header, user_util):
244 244 user = user_util.create_user(password='qweqwe')
245 245 user_id = user.user_id
246 246 self.log_user(user.username, 'qweqwe')
247 247
248 248 # create a fake fork
249 249 fork = user_util.create_repo(repo_type=backend.alias)
250 250 source = user_util.create_repo(repo_type=backend.alias)
251 251 repo_name = source.repo_name
252 252
253 253 fork.fork_id = source.repo_id
254 254 fork_name = fork.repo_name
255 255 Session().commit()
256 256
257 257 forks = Repository.query()\
258 258 .filter(Repository.repo_type == backend.alias)\
259 259 .filter(Repository.fork_id == source.repo_id).all()
260 260 assert 1 == len(forks)
261 261
262 262 # set read permissions for this
263 263 RepoModel().grant_user_permission(
264 264 repo=forks[0], user=user_id, perm='repository.read')
265 265 Session().commit()
266 266
267 267 response = self.app.get(
268 268 route_path('repo_forks_data', repo_name=repo_name),
269 269 extra_environ=xhr_header)
270 270
271 271 assert response.json['data'][0]['fork_name'] == \
272 272 """<a href="/%s">%s</a>""" % (fork_name, fork_name)
273 273
274 274 def test_fork_none_permission(self, backend, xhr_header, user_util):
275 275 user = user_util.create_user(password='qweqwe')
276 276 user_id = user.user_id
277 277 self.log_user(user.username, 'qweqwe')
278 278
279 279 # create a fake fork
280 280 fork = user_util.create_repo(repo_type=backend.alias)
281 281 source = user_util.create_repo(repo_type=backend.alias)
282 282 repo_name = source.repo_name
283 283
284 284 fork.fork_id = source.repo_id
285 285
286 286 Session().commit()
287 287
288 288 forks = Repository.query()\
289 289 .filter(Repository.repo_type == backend.alias)\
290 290 .filter(Repository.fork_id == source.repo_id).all()
291 291 assert 1 == len(forks)
292 292
293 293 # set none
294 294 RepoModel().grant_user_permission(
295 295 repo=forks[0], user=user_id, perm='repository.none')
296 296 Session().commit()
297 297
298 298 # fork shouldn't be there
299 299 response = self.app.get(
300 300 route_path('repo_forks_data', repo_name=repo_name),
301 301 extra_environ=xhr_header)
302 302
303 303 assert response.json == {u'data': [], u'draw': None,
304 304 u'recordsFiltered': 0, u'recordsTotal': 0}
305 305
306 @pytest.mark.parametrize('url_type', [
307 'repo_fork_new',
308 'repo_fork_create'
309 ])
310 def test_fork_is_forbidden_on_archived_repo(self, backend, xhr_header, user_util, url_type):
311 user = user_util.create_user(password='qweqwe')
312 self.log_user(user.username, 'qweqwe')
313
314 # create a temporary repo
315 source = user_util.create_repo(repo_type=backend.alias)
316 repo_name = source.repo_name
317 repo = Repository.get_by_repo_name(repo_name)
318 repo.archived = True
319 Session().commit()
320
321 response = self.app.get(
322 route_path(url_type, repo_name=repo_name), status=302)
323
324 msg = 'Action not supported for archived repository.'
325 assert_session_flash(response, msg)
326
306 327
307 328 class TestSVNFork(TestController):
308 329 @pytest.mark.parametrize('route_name', [
309 330 'repo_fork_create', 'repo_fork_new'
310 331 ])
311 332 def test_fork_redirects(self, autologin_user, backend_svn, route_name):
312 333
313 334 self.app.get(route_path(
314 335 route_name, repo_name=backend_svn.repo_name),
315 336 status=404)
@@ -1,1206 +1,1228 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35 from rhodecode.tests.utils import AssertResponse
36 36
37 37
38 38 def route_path(name, params=None, **kwargs):
39 39 import urllib
40 40
41 41 base_url = {
42 42 'repo_changelog': '/{repo_name}/changelog',
43 43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 56 }[name].format(**kwargs)
57 57
58 58 if params:
59 59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 60 return base_url
61 61
62 62
63 63 @pytest.mark.usefixtures('app', 'autologin_user')
64 64 @pytest.mark.backends("git", "hg")
65 65 class TestPullrequestsView(object):
66 66
67 67 def test_index(self, backend):
68 68 self.app.get(route_path(
69 69 'pullrequest_new',
70 70 repo_name=backend.repo_name))
71 71
72 72 def test_option_menu_create_pull_request_exists(self, backend):
73 73 repo_name = backend.repo_name
74 74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 75
76 76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 77 'pullrequest_new', repo_name=repo_name)
78 78 response.mustcontain(create_pr_link)
79 79
80 80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 81 repo = backend.repo
82 82
83 83 self.app.get(
84 84 route_path('pullrequest_new',
85 85 repo_name=repo.repo_name,
86 86 commit=repo.get_commit().raw_id),
87 87 status=200)
88 88
89 89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 90 def test_show(self, pr_util, pr_merge_enabled):
91 91 pull_request = pr_util.create_pull_request(
92 92 mergeable=pr_merge_enabled, enable_notifications=False)
93 93
94 94 response = self.app.get(route_path(
95 95 'pullrequest_show',
96 96 repo_name=pull_request.target_repo.scm_instance().name,
97 97 pull_request_id=pull_request.pull_request_id))
98 98
99 99 for commit_id in pull_request.revisions:
100 100 response.mustcontain(commit_id)
101 101
102 102 assert pull_request.target_ref_parts.type in response
103 103 assert pull_request.target_ref_parts.name in response
104 104 target_clone_url = pull_request.target_repo.clone_url()
105 105 assert target_clone_url in response
106 106
107 107 assert 'class="pull-request-merge"' in response
108 108 assert (
109 109 'Server-side pull request merging is disabled.'
110 110 in response) != pr_merge_enabled
111 111
112 112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
113 113 # Logout
114 114 response = self.app.post(
115 115 h.route_path('logout'),
116 116 params={'csrf_token': csrf_token})
117 117 # Login as regular user
118 118 response = self.app.post(h.route_path('login'),
119 119 {'username': TEST_USER_REGULAR_LOGIN,
120 120 'password': 'test12'})
121 121
122 122 pull_request = pr_util.create_pull_request(
123 123 author=TEST_USER_REGULAR_LOGIN)
124 124
125 125 response = self.app.get(route_path(
126 126 'pullrequest_show',
127 127 repo_name=pull_request.target_repo.scm_instance().name,
128 128 pull_request_id=pull_request.pull_request_id))
129 129
130 130 response.mustcontain('Server-side pull request merging is disabled.')
131 131
132 132 assert_response = response.assert_response()
133 133 # for regular user without a merge permissions, we don't see it
134 134 assert_response.no_element_exists('#close-pull-request-action')
135 135
136 136 user_util.grant_user_permission_to_repo(
137 137 pull_request.target_repo,
138 138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
139 139 'repository.write')
140 140 response = self.app.get(route_path(
141 141 'pullrequest_show',
142 142 repo_name=pull_request.target_repo.scm_instance().name,
143 143 pull_request_id=pull_request.pull_request_id))
144 144
145 145 response.mustcontain('Server-side pull request merging is disabled.')
146 146
147 147 assert_response = response.assert_response()
148 148 # now regular user has a merge permissions, we have CLOSE button
149 149 assert_response.one_element_exists('#close-pull-request-action')
150 150
151 151 def test_show_invalid_commit_id(self, pr_util):
152 152 # Simulating invalid revisions which will cause a lookup error
153 153 pull_request = pr_util.create_pull_request()
154 154 pull_request.revisions = ['invalid']
155 155 Session().add(pull_request)
156 156 Session().commit()
157 157
158 158 response = self.app.get(route_path(
159 159 'pullrequest_show',
160 160 repo_name=pull_request.target_repo.scm_instance().name,
161 161 pull_request_id=pull_request.pull_request_id))
162 162
163 163 for commit_id in pull_request.revisions:
164 164 response.mustcontain(commit_id)
165 165
166 166 def test_show_invalid_source_reference(self, pr_util):
167 167 pull_request = pr_util.create_pull_request()
168 168 pull_request.source_ref = 'branch:b:invalid'
169 169 Session().add(pull_request)
170 170 Session().commit()
171 171
172 172 self.app.get(route_path(
173 173 'pullrequest_show',
174 174 repo_name=pull_request.target_repo.scm_instance().name,
175 175 pull_request_id=pull_request.pull_request_id))
176 176
177 177 def test_edit_title_description(self, pr_util, csrf_token):
178 178 pull_request = pr_util.create_pull_request()
179 179 pull_request_id = pull_request.pull_request_id
180 180
181 181 response = self.app.post(
182 182 route_path('pullrequest_update',
183 183 repo_name=pull_request.target_repo.repo_name,
184 184 pull_request_id=pull_request_id),
185 185 params={
186 186 'edit_pull_request': 'true',
187 187 'title': 'New title',
188 188 'description': 'New description',
189 189 'csrf_token': csrf_token})
190 190
191 191 assert_session_flash(
192 192 response, u'Pull request title & description updated.',
193 193 category='success')
194 194
195 195 pull_request = PullRequest.get(pull_request_id)
196 196 assert pull_request.title == 'New title'
197 197 assert pull_request.description == 'New description'
198 198
199 199 def test_edit_title_description_closed(self, pr_util, csrf_token):
200 200 pull_request = pr_util.create_pull_request()
201 201 pull_request_id = pull_request.pull_request_id
202 202 repo_name = pull_request.target_repo.repo_name
203 203 pr_util.close()
204 204
205 205 response = self.app.post(
206 206 route_path('pullrequest_update',
207 207 repo_name=repo_name, pull_request_id=pull_request_id),
208 208 params={
209 209 'edit_pull_request': 'true',
210 210 'title': 'New title',
211 211 'description': 'New description',
212 212 'csrf_token': csrf_token}, status=200)
213 213 assert_session_flash(
214 214 response, u'Cannot update closed pull requests.',
215 215 category='error')
216 216
217 217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
218 218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
219 219
220 220 pull_request = pr_util.create_pull_request()
221 221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
222 222 Session().add(pull_request)
223 223 Session().commit()
224 224
225 225 pull_request_id = pull_request.pull_request_id
226 226
227 227 response = self.app.post(
228 228 route_path('pullrequest_update',
229 229 repo_name=pull_request.target_repo.repo_name,
230 230 pull_request_id=pull_request_id),
231 231 params={'update_commits': 'true',
232 232 'csrf_token': csrf_token})
233 233
234 234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
235 235 UpdateFailureReason.MISSING_SOURCE_REF])
236 236 assert_session_flash(response, expected_msg, category='error')
237 237
238 238 def test_missing_target_reference(self, pr_util, csrf_token):
239 239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
240 240 pull_request = pr_util.create_pull_request(
241 241 approved=True, mergeable=True)
242 242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
243 243 Session().add(pull_request)
244 244 Session().commit()
245 245
246 246 pull_request_id = pull_request.pull_request_id
247 247 pull_request_url = route_path(
248 248 'pullrequest_show',
249 249 repo_name=pull_request.target_repo.repo_name,
250 250 pull_request_id=pull_request_id)
251 251
252 252 response = self.app.get(pull_request_url)
253 253
254 254 assertr = AssertResponse(response)
255 255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
256 256 MergeFailureReason.MISSING_TARGET_REF]
257 257 assertr.element_contains(
258 258 'span[data-role="merge-message"]', str(expected_msg))
259 259
260 260 def test_comment_and_close_pull_request_custom_message_approved(
261 261 self, pr_util, csrf_token, xhr_header):
262 262
263 263 pull_request = pr_util.create_pull_request(approved=True)
264 264 pull_request_id = pull_request.pull_request_id
265 265 author = pull_request.user_id
266 266 repo = pull_request.target_repo.repo_id
267 267
268 268 self.app.post(
269 269 route_path('pullrequest_comment_create',
270 270 repo_name=pull_request.target_repo.scm_instance().name,
271 271 pull_request_id=pull_request_id),
272 272 params={
273 273 'close_pull_request': '1',
274 274 'text': 'Closing a PR',
275 275 'csrf_token': csrf_token},
276 276 extra_environ=xhr_header,)
277 277
278 278 journal = UserLog.query()\
279 279 .filter(UserLog.user_id == author)\
280 280 .filter(UserLog.repository_id == repo) \
281 281 .order_by('user_log_id') \
282 282 .all()
283 283 assert journal[-1].action == 'repo.pull_request.close'
284 284
285 285 pull_request = PullRequest.get(pull_request_id)
286 286 assert pull_request.is_closed()
287 287
288 288 status = ChangesetStatusModel().get_status(
289 289 pull_request.source_repo, pull_request=pull_request)
290 290 assert status == ChangesetStatus.STATUS_APPROVED
291 291 comments = ChangesetComment().query() \
292 292 .filter(ChangesetComment.pull_request == pull_request) \
293 293 .order_by(ChangesetComment.comment_id.asc())\
294 294 .all()
295 295 assert comments[-1].text == 'Closing a PR'
296 296
297 297 def test_comment_force_close_pull_request_rejected(
298 298 self, pr_util, csrf_token, xhr_header):
299 299 pull_request = pr_util.create_pull_request()
300 300 pull_request_id = pull_request.pull_request_id
301 301 PullRequestModel().update_reviewers(
302 302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
303 303 pull_request.author)
304 304 author = pull_request.user_id
305 305 repo = pull_request.target_repo.repo_id
306 306
307 307 self.app.post(
308 308 route_path('pullrequest_comment_create',
309 309 repo_name=pull_request.target_repo.scm_instance().name,
310 310 pull_request_id=pull_request_id),
311 311 params={
312 312 'close_pull_request': '1',
313 313 'csrf_token': csrf_token},
314 314 extra_environ=xhr_header)
315 315
316 316 pull_request = PullRequest.get(pull_request_id)
317 317
318 318 journal = UserLog.query()\
319 319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
320 320 .order_by('user_log_id') \
321 321 .all()
322 322 assert journal[-1].action == 'repo.pull_request.close'
323 323
324 324 # check only the latest status, not the review status
325 325 status = ChangesetStatusModel().get_status(
326 326 pull_request.source_repo, pull_request=pull_request)
327 327 assert status == ChangesetStatus.STATUS_REJECTED
328 328
329 329 def test_comment_and_close_pull_request(
330 330 self, pr_util, csrf_token, xhr_header):
331 331 pull_request = pr_util.create_pull_request()
332 332 pull_request_id = pull_request.pull_request_id
333 333
334 334 response = self.app.post(
335 335 route_path('pullrequest_comment_create',
336 336 repo_name=pull_request.target_repo.scm_instance().name,
337 337 pull_request_id=pull_request.pull_request_id),
338 338 params={
339 339 'close_pull_request': 'true',
340 340 'csrf_token': csrf_token},
341 341 extra_environ=xhr_header)
342 342
343 343 assert response.json
344 344
345 345 pull_request = PullRequest.get(pull_request_id)
346 346 assert pull_request.is_closed()
347 347
348 348 # check only the latest status, not the review status
349 349 status = ChangesetStatusModel().get_status(
350 350 pull_request.source_repo, pull_request=pull_request)
351 351 assert status == ChangesetStatus.STATUS_REJECTED
352 352
353 353 def test_create_pull_request(self, backend, csrf_token):
354 354 commits = [
355 355 {'message': 'ancestor'},
356 356 {'message': 'change'},
357 357 {'message': 'change2'},
358 358 ]
359 359 commit_ids = backend.create_master_repo(commits)
360 360 target = backend.create_repo(heads=['ancestor'])
361 361 source = backend.create_repo(heads=['change2'])
362 362
363 363 response = self.app.post(
364 364 route_path('pullrequest_create', repo_name=source.repo_name),
365 365 [
366 366 ('source_repo', source.repo_name),
367 367 ('source_ref', 'branch:default:' + commit_ids['change2']),
368 368 ('target_repo', target.repo_name),
369 369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
370 370 ('common_ancestor', commit_ids['ancestor']),
371 371 ('pullrequest_title', 'Title'),
372 372 ('pullrequest_desc', 'Description'),
373 373 ('description_renderer', 'markdown'),
374 374 ('__start__', 'review_members:sequence'),
375 375 ('__start__', 'reviewer:mapping'),
376 376 ('user_id', '1'),
377 377 ('__start__', 'reasons:sequence'),
378 378 ('reason', 'Some reason'),
379 379 ('__end__', 'reasons:sequence'),
380 380 ('__start__', 'rules:sequence'),
381 381 ('__end__', 'rules:sequence'),
382 382 ('mandatory', 'False'),
383 383 ('__end__', 'reviewer:mapping'),
384 384 ('__end__', 'review_members:sequence'),
385 385 ('__start__', 'revisions:sequence'),
386 386 ('revisions', commit_ids['change']),
387 387 ('revisions', commit_ids['change2']),
388 388 ('__end__', 'revisions:sequence'),
389 389 ('user', ''),
390 390 ('csrf_token', csrf_token),
391 391 ],
392 392 status=302)
393 393
394 394 location = response.headers['Location']
395 395 pull_request_id = location.rsplit('/', 1)[1]
396 396 assert pull_request_id != 'new'
397 397 pull_request = PullRequest.get(int(pull_request_id))
398 398
399 399 # check that we have now both revisions
400 400 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
401 401 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
402 402 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
403 403 assert pull_request.target_ref == expected_target_ref
404 404
405 405 def test_reviewer_notifications(self, backend, csrf_token):
406 406 # We have to use the app.post for this test so it will create the
407 407 # notifications properly with the new PR
408 408 commits = [
409 409 {'message': 'ancestor',
410 410 'added': [FileNode('file_A', content='content_of_ancestor')]},
411 411 {'message': 'change',
412 412 'added': [FileNode('file_a', content='content_of_change')]},
413 413 {'message': 'change-child'},
414 414 {'message': 'ancestor-child', 'parents': ['ancestor'],
415 415 'added': [
416 416 FileNode('file_B', content='content_of_ancestor_child')]},
417 417 {'message': 'ancestor-child-2'},
418 418 ]
419 419 commit_ids = backend.create_master_repo(commits)
420 420 target = backend.create_repo(heads=['ancestor-child'])
421 421 source = backend.create_repo(heads=['change'])
422 422
423 423 response = self.app.post(
424 424 route_path('pullrequest_create', repo_name=source.repo_name),
425 425 [
426 426 ('source_repo', source.repo_name),
427 427 ('source_ref', 'branch:default:' + commit_ids['change']),
428 428 ('target_repo', target.repo_name),
429 429 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
430 430 ('common_ancestor', commit_ids['ancestor']),
431 431 ('pullrequest_title', 'Title'),
432 432 ('pullrequest_desc', 'Description'),
433 433 ('description_renderer', 'markdown'),
434 434 ('__start__', 'review_members:sequence'),
435 435 ('__start__', 'reviewer:mapping'),
436 436 ('user_id', '2'),
437 437 ('__start__', 'reasons:sequence'),
438 438 ('reason', 'Some reason'),
439 439 ('__end__', 'reasons:sequence'),
440 440 ('__start__', 'rules:sequence'),
441 441 ('__end__', 'rules:sequence'),
442 442 ('mandatory', 'False'),
443 443 ('__end__', 'reviewer:mapping'),
444 444 ('__end__', 'review_members:sequence'),
445 445 ('__start__', 'revisions:sequence'),
446 446 ('revisions', commit_ids['change']),
447 447 ('__end__', 'revisions:sequence'),
448 448 ('user', ''),
449 449 ('csrf_token', csrf_token),
450 450 ],
451 451 status=302)
452 452
453 453 location = response.headers['Location']
454 454
455 455 pull_request_id = location.rsplit('/', 1)[1]
456 456 assert pull_request_id != 'new'
457 457 pull_request = PullRequest.get(int(pull_request_id))
458 458
459 459 # Check that a notification was made
460 460 notifications = Notification.query()\
461 461 .filter(Notification.created_by == pull_request.author.user_id,
462 462 Notification.type_ == Notification.TYPE_PULL_REQUEST,
463 463 Notification.subject.contains(
464 464 "wants you to review pull request #%s" % pull_request_id))
465 465 assert len(notifications.all()) == 1
466 466
467 467 # Change reviewers and check that a notification was made
468 468 PullRequestModel().update_reviewers(
469 469 pull_request.pull_request_id, [(1, [], False, [])],
470 470 pull_request.author)
471 471 assert len(notifications.all()) == 2
472 472
473 473 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
474 474 csrf_token):
475 475 commits = [
476 476 {'message': 'ancestor',
477 477 'added': [FileNode('file_A', content='content_of_ancestor')]},
478 478 {'message': 'change',
479 479 'added': [FileNode('file_a', content='content_of_change')]},
480 480 {'message': 'change-child'},
481 481 {'message': 'ancestor-child', 'parents': ['ancestor'],
482 482 'added': [
483 483 FileNode('file_B', content='content_of_ancestor_child')]},
484 484 {'message': 'ancestor-child-2'},
485 485 ]
486 486 commit_ids = backend.create_master_repo(commits)
487 487 target = backend.create_repo(heads=['ancestor-child'])
488 488 source = backend.create_repo(heads=['change'])
489 489
490 490 response = self.app.post(
491 491 route_path('pullrequest_create', repo_name=source.repo_name),
492 492 [
493 493 ('source_repo', source.repo_name),
494 494 ('source_ref', 'branch:default:' + commit_ids['change']),
495 495 ('target_repo', target.repo_name),
496 496 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
497 497 ('common_ancestor', commit_ids['ancestor']),
498 498 ('pullrequest_title', 'Title'),
499 499 ('pullrequest_desc', 'Description'),
500 500 ('description_renderer', 'markdown'),
501 501 ('__start__', 'review_members:sequence'),
502 502 ('__start__', 'reviewer:mapping'),
503 503 ('user_id', '1'),
504 504 ('__start__', 'reasons:sequence'),
505 505 ('reason', 'Some reason'),
506 506 ('__end__', 'reasons:sequence'),
507 507 ('__start__', 'rules:sequence'),
508 508 ('__end__', 'rules:sequence'),
509 509 ('mandatory', 'False'),
510 510 ('__end__', 'reviewer:mapping'),
511 511 ('__end__', 'review_members:sequence'),
512 512 ('__start__', 'revisions:sequence'),
513 513 ('revisions', commit_ids['change']),
514 514 ('__end__', 'revisions:sequence'),
515 515 ('user', ''),
516 516 ('csrf_token', csrf_token),
517 517 ],
518 518 status=302)
519 519
520 520 location = response.headers['Location']
521 521
522 522 pull_request_id = location.rsplit('/', 1)[1]
523 523 assert pull_request_id != 'new'
524 524 pull_request = PullRequest.get(int(pull_request_id))
525 525
526 526 # target_ref has to point to the ancestor's commit_id in order to
527 527 # show the correct diff
528 528 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
529 529 assert pull_request.target_ref == expected_target_ref
530 530
531 531 # Check generated diff contents
532 532 response = response.follow()
533 533 assert 'content_of_ancestor' not in response.body
534 534 assert 'content_of_ancestor-child' not in response.body
535 535 assert 'content_of_change' in response.body
536 536
537 537 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
538 538 # Clear any previous calls to rcextensions
539 539 rhodecode.EXTENSIONS.calls.clear()
540 540
541 541 pull_request = pr_util.create_pull_request(
542 542 approved=True, mergeable=True)
543 543 pull_request_id = pull_request.pull_request_id
544 544 repo_name = pull_request.target_repo.scm_instance().name,
545 545
546 546 response = self.app.post(
547 547 route_path('pullrequest_merge',
548 548 repo_name=str(repo_name[0]),
549 549 pull_request_id=pull_request_id),
550 550 params={'csrf_token': csrf_token}).follow()
551 551
552 552 pull_request = PullRequest.get(pull_request_id)
553 553
554 554 assert response.status_int == 200
555 555 assert pull_request.is_closed()
556 556 assert_pull_request_status(
557 557 pull_request, ChangesetStatus.STATUS_APPROVED)
558 558
559 559 # Check the relevant log entries were added
560 560 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
561 561 actions = [log.action for log in user_logs]
562 562 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
563 563 expected_actions = [
564 564 u'repo.pull_request.close',
565 565 u'repo.pull_request.merge',
566 566 u'repo.pull_request.comment.create'
567 567 ]
568 568 assert actions == expected_actions
569 569
570 570 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
571 571 actions = [log for log in user_logs]
572 572 assert actions[-1].action == 'user.push'
573 573 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
574 574
575 575 # Check post_push rcextension was really executed
576 576 push_calls = rhodecode.EXTENSIONS.calls['post_push']
577 577 assert len(push_calls) == 1
578 578 unused_last_call_args, last_call_kwargs = push_calls[0]
579 579 assert last_call_kwargs['action'] == 'push'
580 580 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
581 581
582 582 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
583 583 pull_request = pr_util.create_pull_request(mergeable=False)
584 584 pull_request_id = pull_request.pull_request_id
585 585 pull_request = PullRequest.get(pull_request_id)
586 586
587 587 response = self.app.post(
588 588 route_path('pullrequest_merge',
589 589 repo_name=pull_request.target_repo.scm_instance().name,
590 590 pull_request_id=pull_request.pull_request_id),
591 591 params={'csrf_token': csrf_token}).follow()
592 592
593 593 assert response.status_int == 200
594 594 response.mustcontain(
595 595 'Merge is not currently possible because of below failed checks.')
596 596 response.mustcontain('Server-side pull request merging is disabled.')
597 597
598 598 @pytest.mark.skip_backends('svn')
599 599 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
600 600 pull_request = pr_util.create_pull_request(mergeable=True)
601 601 pull_request_id = pull_request.pull_request_id
602 602 repo_name = pull_request.target_repo.scm_instance().name
603 603
604 604 response = self.app.post(
605 605 route_path('pullrequest_merge',
606 606 repo_name=repo_name,
607 607 pull_request_id=pull_request_id),
608 608 params={'csrf_token': csrf_token}).follow()
609 609
610 610 assert response.status_int == 200
611 611
612 612 response.mustcontain(
613 613 'Merge is not currently possible because of below failed checks.')
614 614 response.mustcontain('Pull request reviewer approval is pending.')
615 615
616 616 def test_merge_pull_request_renders_failure_reason(
617 617 self, user_regular, csrf_token, pr_util):
618 618 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
619 619 pull_request_id = pull_request.pull_request_id
620 620 repo_name = pull_request.target_repo.scm_instance().name
621 621
622 622 model_patcher = mock.patch.multiple(
623 623 PullRequestModel,
624 624 merge_repo=mock.Mock(return_value=MergeResponse(
625 625 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
626 626 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
627 627
628 628 with model_patcher:
629 629 response = self.app.post(
630 630 route_path('pullrequest_merge',
631 631 repo_name=repo_name,
632 632 pull_request_id=pull_request_id),
633 633 params={'csrf_token': csrf_token}, status=302)
634 634
635 635 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
636 636 MergeFailureReason.PUSH_FAILED])
637 637
638 638 def test_update_source_revision(self, backend, csrf_token):
639 639 commits = [
640 640 {'message': 'ancestor'},
641 641 {'message': 'change'},
642 642 {'message': 'change-2'},
643 643 ]
644 644 commit_ids = backend.create_master_repo(commits)
645 645 target = backend.create_repo(heads=['ancestor'])
646 646 source = backend.create_repo(heads=['change'])
647 647
648 648 # create pr from a in source to A in target
649 649 pull_request = PullRequest()
650 650 pull_request.source_repo = source
651 651 # TODO: johbo: Make sure that we write the source ref this way!
652 652 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
653 653 branch=backend.default_branch_name, commit_id=commit_ids['change'])
654 654 pull_request.target_repo = target
655 655
656 656 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
657 657 branch=backend.default_branch_name,
658 658 commit_id=commit_ids['ancestor'])
659 659 pull_request.revisions = [commit_ids['change']]
660 660 pull_request.title = u"Test"
661 661 pull_request.description = u"Description"
662 662 pull_request.author = UserModel().get_by_username(
663 663 TEST_USER_ADMIN_LOGIN)
664 664 Session().add(pull_request)
665 665 Session().commit()
666 666 pull_request_id = pull_request.pull_request_id
667 667
668 668 # source has ancestor - change - change-2
669 669 backend.pull_heads(source, heads=['change-2'])
670 670
671 671 # update PR
672 672 self.app.post(
673 673 route_path('pullrequest_update',
674 674 repo_name=target.repo_name,
675 675 pull_request_id=pull_request_id),
676 676 params={'update_commits': 'true',
677 677 'csrf_token': csrf_token})
678 678
679 679 # check that we have now both revisions
680 680 pull_request = PullRequest.get(pull_request_id)
681 681 assert pull_request.revisions == [
682 682 commit_ids['change-2'], commit_ids['change']]
683 683
684 684 # TODO: johbo: this should be a test on its own
685 685 response = self.app.get(route_path(
686 686 'pullrequest_new',
687 687 repo_name=target.repo_name))
688 688 assert response.status_int == 200
689 689 assert 'Pull request updated to' in response.body
690 690 assert 'with 1 added, 0 removed commits.' in response.body
691 691
692 692 def test_update_target_revision(self, backend, csrf_token):
693 693 commits = [
694 694 {'message': 'ancestor'},
695 695 {'message': 'change'},
696 696 {'message': 'ancestor-new', 'parents': ['ancestor']},
697 697 {'message': 'change-rebased'},
698 698 ]
699 699 commit_ids = backend.create_master_repo(commits)
700 700 target = backend.create_repo(heads=['ancestor'])
701 701 source = backend.create_repo(heads=['change'])
702 702
703 703 # create pr from a in source to A in target
704 704 pull_request = PullRequest()
705 705 pull_request.source_repo = source
706 706 # TODO: johbo: Make sure that we write the source ref this way!
707 707 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
708 708 branch=backend.default_branch_name, commit_id=commit_ids['change'])
709 709 pull_request.target_repo = target
710 710 # TODO: johbo: Target ref should be branch based, since tip can jump
711 711 # from branch to branch
712 712 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
713 713 branch=backend.default_branch_name,
714 714 commit_id=commit_ids['ancestor'])
715 715 pull_request.revisions = [commit_ids['change']]
716 716 pull_request.title = u"Test"
717 717 pull_request.description = u"Description"
718 718 pull_request.author = UserModel().get_by_username(
719 719 TEST_USER_ADMIN_LOGIN)
720 720 Session().add(pull_request)
721 721 Session().commit()
722 722 pull_request_id = pull_request.pull_request_id
723 723
724 724 # target has ancestor - ancestor-new
725 725 # source has ancestor - ancestor-new - change-rebased
726 726 backend.pull_heads(target, heads=['ancestor-new'])
727 727 backend.pull_heads(source, heads=['change-rebased'])
728 728
729 729 # update PR
730 730 self.app.post(
731 731 route_path('pullrequest_update',
732 732 repo_name=target.repo_name,
733 733 pull_request_id=pull_request_id),
734 734 params={'update_commits': 'true',
735 735 'csrf_token': csrf_token},
736 736 status=200)
737 737
738 738 # check that we have now both revisions
739 739 pull_request = PullRequest.get(pull_request_id)
740 740 assert pull_request.revisions == [commit_ids['change-rebased']]
741 741 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
742 742 branch=backend.default_branch_name,
743 743 commit_id=commit_ids['ancestor-new'])
744 744
745 745 # TODO: johbo: This should be a test on its own
746 746 response = self.app.get(route_path(
747 747 'pullrequest_new',
748 748 repo_name=target.repo_name))
749 749 assert response.status_int == 200
750 750 assert 'Pull request updated to' in response.body
751 751 assert 'with 1 added, 1 removed commits.' in response.body
752 752
753 753 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
754 754 backend = backend_git
755 755 commits = [
756 756 {'message': 'master-commit-1'},
757 757 {'message': 'master-commit-2-change-1'},
758 758 {'message': 'master-commit-3-change-2'},
759 759
760 760 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
761 761 {'message': 'feat-commit-2'},
762 762 ]
763 763 commit_ids = backend.create_master_repo(commits)
764 764 target = backend.create_repo(heads=['master-commit-3-change-2'])
765 765 source = backend.create_repo(heads=['feat-commit-2'])
766 766
767 767 # create pr from a in source to A in target
768 768 pull_request = PullRequest()
769 769 pull_request.source_repo = source
770 770 # TODO: johbo: Make sure that we write the source ref this way!
771 771 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
772 772 branch=backend.default_branch_name,
773 773 commit_id=commit_ids['master-commit-3-change-2'])
774 774
775 775 pull_request.target_repo = target
776 776 # TODO: johbo: Target ref should be branch based, since tip can jump
777 777 # from branch to branch
778 778 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
779 779 branch=backend.default_branch_name,
780 780 commit_id=commit_ids['feat-commit-2'])
781 781
782 782 pull_request.revisions = [
783 783 commit_ids['feat-commit-1'],
784 784 commit_ids['feat-commit-2']
785 785 ]
786 786 pull_request.title = u"Test"
787 787 pull_request.description = u"Description"
788 788 pull_request.author = UserModel().get_by_username(
789 789 TEST_USER_ADMIN_LOGIN)
790 790 Session().add(pull_request)
791 791 Session().commit()
792 792 pull_request_id = pull_request.pull_request_id
793 793
794 794 # PR is created, now we simulate a force-push into target,
795 795 # that drops a 2 last commits
796 796 vcsrepo = target.scm_instance()
797 797 vcsrepo.config.clear_section('hooks')
798 798 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
799 799
800 800 # update PR
801 801 self.app.post(
802 802 route_path('pullrequest_update',
803 803 repo_name=target.repo_name,
804 804 pull_request_id=pull_request_id),
805 805 params={'update_commits': 'true',
806 806 'csrf_token': csrf_token},
807 807 status=200)
808 808
809 809 response = self.app.get(route_path(
810 810 'pullrequest_new',
811 811 repo_name=target.repo_name))
812 812 assert response.status_int == 200
813 813 response.mustcontain('Pull request updated to')
814 814 response.mustcontain('with 0 added, 0 removed commits.')
815 815
816 816 def test_update_of_ancestor_reference(self, backend, csrf_token):
817 817 commits = [
818 818 {'message': 'ancestor'},
819 819 {'message': 'change'},
820 820 {'message': 'change-2'},
821 821 {'message': 'ancestor-new', 'parents': ['ancestor']},
822 822 {'message': 'change-rebased'},
823 823 ]
824 824 commit_ids = backend.create_master_repo(commits)
825 825 target = backend.create_repo(heads=['ancestor'])
826 826 source = backend.create_repo(heads=['change'])
827 827
828 828 # create pr from a in source to A in target
829 829 pull_request = PullRequest()
830 830 pull_request.source_repo = source
831 831 # TODO: johbo: Make sure that we write the source ref this way!
832 832 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
833 833 branch=backend.default_branch_name,
834 834 commit_id=commit_ids['change'])
835 835 pull_request.target_repo = target
836 836 # TODO: johbo: Target ref should be branch based, since tip can jump
837 837 # from branch to branch
838 838 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
839 839 branch=backend.default_branch_name,
840 840 commit_id=commit_ids['ancestor'])
841 841 pull_request.revisions = [commit_ids['change']]
842 842 pull_request.title = u"Test"
843 843 pull_request.description = u"Description"
844 844 pull_request.author = UserModel().get_by_username(
845 845 TEST_USER_ADMIN_LOGIN)
846 846 Session().add(pull_request)
847 847 Session().commit()
848 848 pull_request_id = pull_request.pull_request_id
849 849
850 850 # target has ancestor - ancestor-new
851 851 # source has ancestor - ancestor-new - change-rebased
852 852 backend.pull_heads(target, heads=['ancestor-new'])
853 853 backend.pull_heads(source, heads=['change-rebased'])
854 854
855 855 # update PR
856 856 self.app.post(
857 857 route_path('pullrequest_update',
858 858 repo_name=target.repo_name,
859 859 pull_request_id=pull_request_id),
860 860 params={'update_commits': 'true',
861 861 'csrf_token': csrf_token},
862 862 status=200)
863 863
864 864 # Expect the target reference to be updated correctly
865 865 pull_request = PullRequest.get(pull_request_id)
866 866 assert pull_request.revisions == [commit_ids['change-rebased']]
867 867 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
868 868 branch=backend.default_branch_name,
869 869 commit_id=commit_ids['ancestor-new'])
870 870 assert pull_request.target_ref == expected_target_ref
871 871
872 872 def test_remove_pull_request_branch(self, backend_git, csrf_token):
873 873 branch_name = 'development'
874 874 commits = [
875 875 {'message': 'initial-commit'},
876 876 {'message': 'old-feature'},
877 877 {'message': 'new-feature', 'branch': branch_name},
878 878 ]
879 879 repo = backend_git.create_repo(commits)
880 880 commit_ids = backend_git.commit_ids
881 881
882 882 pull_request = PullRequest()
883 883 pull_request.source_repo = repo
884 884 pull_request.target_repo = repo
885 885 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
886 886 branch=branch_name, commit_id=commit_ids['new-feature'])
887 887 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
888 888 branch=backend_git.default_branch_name,
889 889 commit_id=commit_ids['old-feature'])
890 890 pull_request.revisions = [commit_ids['new-feature']]
891 891 pull_request.title = u"Test"
892 892 pull_request.description = u"Description"
893 893 pull_request.author = UserModel().get_by_username(
894 894 TEST_USER_ADMIN_LOGIN)
895 895 Session().add(pull_request)
896 896 Session().commit()
897 897
898 898 vcs = repo.scm_instance()
899 899 vcs.remove_ref('refs/heads/{}'.format(branch_name))
900 900
901 901 response = self.app.get(route_path(
902 902 'pullrequest_show',
903 903 repo_name=repo.repo_name,
904 904 pull_request_id=pull_request.pull_request_id))
905 905
906 906 assert response.status_int == 200
907 907 assert_response = AssertResponse(response)
908 908 assert_response.element_contains(
909 909 '#changeset_compare_view_content .alert strong',
910 910 'Missing commits')
911 911 assert_response.element_contains(
912 912 '#changeset_compare_view_content .alert',
913 913 'This pull request cannot be displayed, because one or more'
914 914 ' commits no longer exist in the source repository.')
915 915
916 916 def test_strip_commits_from_pull_request(
917 917 self, backend, pr_util, csrf_token):
918 918 commits = [
919 919 {'message': 'initial-commit'},
920 920 {'message': 'old-feature'},
921 921 {'message': 'new-feature', 'parents': ['initial-commit']},
922 922 ]
923 923 pull_request = pr_util.create_pull_request(
924 924 commits, target_head='initial-commit', source_head='new-feature',
925 925 revisions=['new-feature'])
926 926
927 927 vcs = pr_util.source_repository.scm_instance()
928 928 if backend.alias == 'git':
929 929 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
930 930 else:
931 931 vcs.strip(pr_util.commit_ids['new-feature'])
932 932
933 933 response = self.app.get(route_path(
934 934 'pullrequest_show',
935 935 repo_name=pr_util.target_repository.repo_name,
936 936 pull_request_id=pull_request.pull_request_id))
937 937
938 938 assert response.status_int == 200
939 939 assert_response = AssertResponse(response)
940 940 assert_response.element_contains(
941 941 '#changeset_compare_view_content .alert strong',
942 942 'Missing commits')
943 943 assert_response.element_contains(
944 944 '#changeset_compare_view_content .alert',
945 945 'This pull request cannot be displayed, because one or more'
946 946 ' commits no longer exist in the source repository.')
947 947 assert_response.element_contains(
948 948 '#update_commits',
949 949 'Update commits')
950 950
951 951 def test_strip_commits_and_update(
952 952 self, backend, pr_util, csrf_token):
953 953 commits = [
954 954 {'message': 'initial-commit'},
955 955 {'message': 'old-feature'},
956 956 {'message': 'new-feature', 'parents': ['old-feature']},
957 957 ]
958 958 pull_request = pr_util.create_pull_request(
959 959 commits, target_head='old-feature', source_head='new-feature',
960 960 revisions=['new-feature'], mergeable=True)
961 961
962 962 vcs = pr_util.source_repository.scm_instance()
963 963 if backend.alias == 'git':
964 964 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
965 965 else:
966 966 vcs.strip(pr_util.commit_ids['new-feature'])
967 967
968 968 response = self.app.post(
969 969 route_path('pullrequest_update',
970 970 repo_name=pull_request.target_repo.repo_name,
971 971 pull_request_id=pull_request.pull_request_id),
972 972 params={'update_commits': 'true',
973 973 'csrf_token': csrf_token})
974 974
975 975 assert response.status_int == 200
976 976 assert response.body == 'true'
977 977
978 978 # Make sure that after update, it won't raise 500 errors
979 979 response = self.app.get(route_path(
980 980 'pullrequest_show',
981 981 repo_name=pr_util.target_repository.repo_name,
982 982 pull_request_id=pull_request.pull_request_id))
983 983
984 984 assert response.status_int == 200
985 985 assert_response = AssertResponse(response)
986 986 assert_response.element_contains(
987 987 '#changeset_compare_view_content .alert strong',
988 988 'Missing commits')
989 989
990 990 def test_branch_is_a_link(self, pr_util):
991 991 pull_request = pr_util.create_pull_request()
992 992 pull_request.source_ref = 'branch:origin:1234567890abcdef'
993 993 pull_request.target_ref = 'branch:target:abcdef1234567890'
994 994 Session().add(pull_request)
995 995 Session().commit()
996 996
997 997 response = self.app.get(route_path(
998 998 'pullrequest_show',
999 999 repo_name=pull_request.target_repo.scm_instance().name,
1000 1000 pull_request_id=pull_request.pull_request_id))
1001 1001 assert response.status_int == 200
1002 1002 assert_response = AssertResponse(response)
1003 1003
1004 1004 origin = assert_response.get_element('.pr-origininfo .tag')
1005 1005 origin_children = origin.getchildren()
1006 1006 assert len(origin_children) == 1
1007 1007 target = assert_response.get_element('.pr-targetinfo .tag')
1008 1008 target_children = target.getchildren()
1009 1009 assert len(target_children) == 1
1010 1010
1011 1011 expected_origin_link = route_path(
1012 1012 'repo_changelog',
1013 1013 repo_name=pull_request.source_repo.scm_instance().name,
1014 1014 params=dict(branch='origin'))
1015 1015 expected_target_link = route_path(
1016 1016 'repo_changelog',
1017 1017 repo_name=pull_request.target_repo.scm_instance().name,
1018 1018 params=dict(branch='target'))
1019 1019 assert origin_children[0].attrib['href'] == expected_origin_link
1020 1020 assert origin_children[0].text == 'branch: origin'
1021 1021 assert target_children[0].attrib['href'] == expected_target_link
1022 1022 assert target_children[0].text == 'branch: target'
1023 1023
1024 1024 def test_bookmark_is_not_a_link(self, pr_util):
1025 1025 pull_request = pr_util.create_pull_request()
1026 1026 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1027 1027 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1028 1028 Session().add(pull_request)
1029 1029 Session().commit()
1030 1030
1031 1031 response = self.app.get(route_path(
1032 1032 'pullrequest_show',
1033 1033 repo_name=pull_request.target_repo.scm_instance().name,
1034 1034 pull_request_id=pull_request.pull_request_id))
1035 1035 assert response.status_int == 200
1036 1036 assert_response = AssertResponse(response)
1037 1037
1038 1038 origin = assert_response.get_element('.pr-origininfo .tag')
1039 1039 assert origin.text.strip() == 'bookmark: origin'
1040 1040 assert origin.getchildren() == []
1041 1041
1042 1042 target = assert_response.get_element('.pr-targetinfo .tag')
1043 1043 assert target.text.strip() == 'bookmark: target'
1044 1044 assert target.getchildren() == []
1045 1045
1046 1046 def test_tag_is_not_a_link(self, pr_util):
1047 1047 pull_request = pr_util.create_pull_request()
1048 1048 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1049 1049 pull_request.target_ref = 'tag:target:abcdef1234567890'
1050 1050 Session().add(pull_request)
1051 1051 Session().commit()
1052 1052
1053 1053 response = self.app.get(route_path(
1054 1054 'pullrequest_show',
1055 1055 repo_name=pull_request.target_repo.scm_instance().name,
1056 1056 pull_request_id=pull_request.pull_request_id))
1057 1057 assert response.status_int == 200
1058 1058 assert_response = AssertResponse(response)
1059 1059
1060 1060 origin = assert_response.get_element('.pr-origininfo .tag')
1061 1061 assert origin.text.strip() == 'tag: origin'
1062 1062 assert origin.getchildren() == []
1063 1063
1064 1064 target = assert_response.get_element('.pr-targetinfo .tag')
1065 1065 assert target.text.strip() == 'tag: target'
1066 1066 assert target.getchildren() == []
1067 1067
1068 1068 @pytest.mark.parametrize('mergeable', [True, False])
1069 1069 def test_shadow_repository_link(
1070 1070 self, mergeable, pr_util, http_host_only_stub):
1071 1071 """
1072 1072 Check that the pull request summary page displays a link to the shadow
1073 1073 repository if the pull request is mergeable. If it is not mergeable
1074 1074 the link should not be displayed.
1075 1075 """
1076 1076 pull_request = pr_util.create_pull_request(
1077 1077 mergeable=mergeable, enable_notifications=False)
1078 1078 target_repo = pull_request.target_repo.scm_instance()
1079 1079 pr_id = pull_request.pull_request_id
1080 1080 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1081 1081 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1082 1082
1083 1083 response = self.app.get(route_path(
1084 1084 'pullrequest_show',
1085 1085 repo_name=target_repo.name,
1086 1086 pull_request_id=pr_id))
1087 1087
1088 1088 assertr = AssertResponse(response)
1089 1089 if mergeable:
1090 1090 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1091 1091 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1092 1092 else:
1093 1093 assertr.no_element_exists('.pr-mergeinfo')
1094 1094
1095 1095
1096 1096 @pytest.mark.usefixtures('app')
1097 1097 @pytest.mark.backends("git", "hg")
1098 1098 class TestPullrequestsControllerDelete(object):
1099 1099 def test_pull_request_delete_button_permissions_admin(
1100 1100 self, autologin_user, user_admin, pr_util):
1101 1101 pull_request = pr_util.create_pull_request(
1102 1102 author=user_admin.username, enable_notifications=False)
1103 1103
1104 1104 response = self.app.get(route_path(
1105 1105 'pullrequest_show',
1106 1106 repo_name=pull_request.target_repo.scm_instance().name,
1107 1107 pull_request_id=pull_request.pull_request_id))
1108 1108
1109 1109 response.mustcontain('id="delete_pullrequest"')
1110 1110 response.mustcontain('Confirm to delete this pull request')
1111 1111
1112 1112 def test_pull_request_delete_button_permissions_owner(
1113 1113 self, autologin_regular_user, user_regular, pr_util):
1114 1114 pull_request = pr_util.create_pull_request(
1115 1115 author=user_regular.username, enable_notifications=False)
1116 1116
1117 1117 response = self.app.get(route_path(
1118 1118 'pullrequest_show',
1119 1119 repo_name=pull_request.target_repo.scm_instance().name,
1120 1120 pull_request_id=pull_request.pull_request_id))
1121 1121
1122 1122 response.mustcontain('id="delete_pullrequest"')
1123 1123 response.mustcontain('Confirm to delete this pull request')
1124 1124
1125 1125 def test_pull_request_delete_button_permissions_forbidden(
1126 1126 self, autologin_regular_user, user_regular, user_admin, pr_util):
1127 1127 pull_request = pr_util.create_pull_request(
1128 1128 author=user_admin.username, enable_notifications=False)
1129 1129
1130 1130 response = self.app.get(route_path(
1131 1131 'pullrequest_show',
1132 1132 repo_name=pull_request.target_repo.scm_instance().name,
1133 1133 pull_request_id=pull_request.pull_request_id))
1134 1134 response.mustcontain(no=['id="delete_pullrequest"'])
1135 1135 response.mustcontain(no=['Confirm to delete this pull request'])
1136 1136
1137 1137 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1138 1138 self, autologin_regular_user, user_regular, user_admin, pr_util,
1139 1139 user_util):
1140 1140
1141 1141 pull_request = pr_util.create_pull_request(
1142 1142 author=user_admin.username, enable_notifications=False)
1143 1143
1144 1144 user_util.grant_user_permission_to_repo(
1145 1145 pull_request.target_repo, user_regular,
1146 1146 'repository.write')
1147 1147
1148 1148 response = self.app.get(route_path(
1149 1149 'pullrequest_show',
1150 1150 repo_name=pull_request.target_repo.scm_instance().name,
1151 1151 pull_request_id=pull_request.pull_request_id))
1152 1152
1153 1153 response.mustcontain('id="open_edit_pullrequest"')
1154 1154 response.mustcontain('id="delete_pullrequest"')
1155 1155 response.mustcontain(no=['Confirm to delete this pull request'])
1156 1156
1157 1157 def test_delete_comment_returns_404_if_comment_does_not_exist(
1158 1158 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1159 1159
1160 1160 pull_request = pr_util.create_pull_request(
1161 1161 author=user_admin.username, enable_notifications=False)
1162 1162
1163 1163 self.app.post(
1164 1164 route_path(
1165 1165 'pullrequest_comment_delete',
1166 1166 repo_name=pull_request.target_repo.scm_instance().name,
1167 1167 pull_request_id=pull_request.pull_request_id,
1168 1168 comment_id=1024404),
1169 1169 extra_environ=xhr_header,
1170 1170 params={'csrf_token': csrf_token},
1171 1171 status=404
1172 1172 )
1173 1173
1174 1174 def test_delete_comment(
1175 1175 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1176 1176
1177 1177 pull_request = pr_util.create_pull_request(
1178 1178 author=user_admin.username, enable_notifications=False)
1179 1179 comment = pr_util.create_comment()
1180 1180 comment_id = comment.comment_id
1181 1181
1182 1182 response = self.app.post(
1183 1183 route_path(
1184 1184 'pullrequest_comment_delete',
1185 1185 repo_name=pull_request.target_repo.scm_instance().name,
1186 1186 pull_request_id=pull_request.pull_request_id,
1187 1187 comment_id=comment_id),
1188 1188 extra_environ=xhr_header,
1189 1189 params={'csrf_token': csrf_token},
1190 1190 status=200
1191 1191 )
1192 1192 assert response.body == 'true'
1193 1193
1194 @pytest.mark.parametrize('url_type', [
1195 'pullrequest_new',
1196 'pullrequest_create',
1197 'pullrequest_update',
1198 'pullrequest_merge',
1199 ])
1200 def test_pull_request_is_forbidden_on_archived_repo(
1201 self, autologin_user, backend, xhr_header, user_util, url_type):
1202
1203 # create a temporary repo
1204 source = user_util.create_repo(repo_type=backend.alias)
1205 repo_name = source.repo_name
1206 repo = Repository.get_by_repo_name(repo_name)
1207 repo.archived = True
1208 Session().commit()
1209
1210 response = self.app.get(
1211 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1212
1213 msg = 'Action not supported for archived repository.'
1214 assert_session_flash(response, msg)
1215
1194 1216
1195 1217 def assert_pull_request_status(pull_request, expected_status):
1196 1218 status = ChangesetStatusModel().calculated_review_status(
1197 1219 pull_request=pull_request)
1198 1220 assert status == expected_status
1199 1221
1200 1222
1201 1223 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1202 1224 @pytest.mark.usefixtures("autologin_user")
1203 1225 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1204 1226 response = app.get(
1205 1227 route_path(route, repo_name=backend_svn.repo_name), status=404)
1206 1228
@@ -1,150 +1,173 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.utils2 import safe_unicode, safe_str
24 24 from rhodecode.model.db import Repository
25 25 from rhodecode.model.repo import RepoModel
26 26 from rhodecode.tests import (
27 27 HG_REPO, GIT_REPO, assert_session_flash, no_newline_id_generator)
28 28 from rhodecode.tests.fixture import Fixture
29 29 from rhodecode.tests.utils import repo_on_filesystem
30 30
31 31 fixture = Fixture()
32 32
33 33
34 34 def route_path(name, params=None, **kwargs):
35 35 import urllib
36 36
37 37 base_url = {
38 38 'repo_summary_explicit': '/{repo_name}/summary',
39 39 'repo_summary': '/{repo_name}',
40 40 'edit_repo_advanced': '/{repo_name}/settings/advanced',
41 41 'edit_repo_advanced_delete': '/{repo_name}/settings/advanced/delete',
42 'edit_repo_advanced_archive': '/{repo_name}/settings/advanced/archive',
42 43 'edit_repo_advanced_fork': '/{repo_name}/settings/advanced/fork',
43 44 'edit_repo_advanced_locking': '/{repo_name}/settings/advanced/locking',
44 45 'edit_repo_advanced_journal': '/{repo_name}/settings/advanced/journal',
45 46
46 47 }[name].format(**kwargs)
47 48
48 49 if params:
49 50 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
50 51 return base_url
51 52
52 53
53 54 @pytest.mark.usefixtures('autologin_user', 'app')
54 55 class TestAdminRepoSettingsAdvanced(object):
55 56
56 57 def test_set_repo_fork_has_no_self_id(self, autologin_user, backend):
57 58 repo = backend.repo
58 59 response = self.app.get(
59 60 route_path('edit_repo_advanced', repo_name=backend.repo_name))
60 61 opt = """<option value="%s">vcs_test_git</option>""" % repo.repo_id
61 62 response.mustcontain(no=[opt])
62 63
63 64 def test_set_fork_of_target_repo(
64 65 self, autologin_user, backend, csrf_token):
65 66 target_repo = 'target_%s' % backend.alias
66 67 fixture.create_repo(target_repo, repo_type=backend.alias)
67 68 repo2 = Repository.get_by_repo_name(target_repo)
68 69 response = self.app.post(
69 70 route_path('edit_repo_advanced_fork', repo_name=backend.repo_name),
70 71 params={'id_fork_of': repo2.repo_id,
71 72 'csrf_token': csrf_token})
72 73 repo = Repository.get_by_repo_name(backend.repo_name)
73 74 repo2 = Repository.get_by_repo_name(target_repo)
74 75 assert_session_flash(
75 76 response,
76 77 'Marked repo %s as fork of %s' % (repo.repo_name, repo2.repo_name))
77 78
78 79 assert repo.fork == repo2
79 80 response = response.follow()
80 81 # check if given repo is selected
81 82
82 83 opt = 'This repository is a fork of <a href="%s">%s</a>' % (
83 84 route_path('repo_summary', repo_name=repo2.repo_name),
84 85 repo2.repo_name)
85 86
86 87 response.mustcontain(opt)
87 88
88 89 fixture.destroy_repo(target_repo, forks='detach')
89 90
90 91 @pytest.mark.backends("hg", "git")
91 92 def test_set_fork_of_other_type_repo(
92 93 self, autologin_user, backend, csrf_token):
93 94 TARGET_REPO_MAP = {
94 95 'git': {
95 96 'type': 'hg',
96 97 'repo_name': HG_REPO},
97 98 'hg': {
98 99 'type': 'git',
99 100 'repo_name': GIT_REPO},
100 101 }
101 102 target_repo = TARGET_REPO_MAP[backend.alias]
102 103
103 104 repo2 = Repository.get_by_repo_name(target_repo['repo_name'])
104 105 response = self.app.post(
105 106 route_path('edit_repo_advanced_fork', repo_name=backend.repo_name),
106 107 params={'id_fork_of': repo2.repo_id,
107 108 'csrf_token': csrf_token})
108 109 assert_session_flash(
109 110 response,
110 111 'Cannot set repository as fork of repository with other type')
111 112
112 113 def test_set_fork_of_none(self, autologin_user, backend, csrf_token):
113 114 # mark it as None
114 115 response = self.app.post(
115 116 route_path('edit_repo_advanced_fork', repo_name=backend.repo_name),
116 117 params={'id_fork_of': None,
117 118 'csrf_token': csrf_token})
118 119 assert_session_flash(
119 120 response,
120 121 'Marked repo %s as fork of %s'
121 122 % (backend.repo_name, "Nothing"))
122 123 assert backend.repo.fork is None
123 124
124 125 def test_set_fork_of_same_repo(self, autologin_user, backend, csrf_token):
125 126 repo = Repository.get_by_repo_name(backend.repo_name)
126 127 response = self.app.post(
127 128 route_path('edit_repo_advanced_fork', repo_name=backend.repo_name),
128 129 params={'id_fork_of': repo.repo_id, 'csrf_token': csrf_token})
129 130 assert_session_flash(
130 131 response, 'An error occurred during this operation')
131 132
132 133 @pytest.mark.parametrize(
133 134 "suffix",
134 135 ['', u'ąęł' , '123'],
135 136 ids=no_newline_id_generator)
136 def test_advanced_delete(self, autologin_user, backend, suffix, csrf_token):
137 def test_advanced_repo_delete(self, autologin_user, backend, suffix, csrf_token):
137 138 repo = backend.create_repo(name_suffix=suffix)
138 139 repo_name = repo.repo_name
139 140 repo_name_str = safe_str(repo.repo_name)
140 141
141 142 response = self.app.post(
142 143 route_path('edit_repo_advanced_delete', repo_name=repo_name_str),
143 144 params={'csrf_token': csrf_token})
144 145 assert_session_flash(response,
145 146 u'Deleted repository `{}`'.format(repo_name))
146 147 response.follow()
147 148
148 149 # check if repo was deleted from db
149 150 assert RepoModel().get_by_repo_name(repo_name) is None
150 151 assert not repo_on_filesystem(repo_name_str)
152
153 @pytest.mark.parametrize(
154 "suffix",
155 ['', u'ąęł' , '123'],
156 ids=no_newline_id_generator)
157 def test_advanced_repo_archive(self, autologin_user, backend, suffix, csrf_token):
158 repo = backend.create_repo(name_suffix=suffix)
159 repo_name = repo.repo_name
160 repo_name_str = safe_str(repo.repo_name)
161
162 response = self.app.post(
163 route_path('edit_repo_advanced_archive', repo_name=repo_name_str),
164 params={'csrf_token': csrf_token})
165
166 assert_session_flash(response,
167 u'Archived repository `{}`'.format(repo_name))
168
169 response = self.app.get(route_path('repo_summary', repo_name=repo_name_str))
170 response.mustcontain('This repository has been archived. It is now read-only.')
171
172 # check if repo was deleted from db
173 assert RepoModel().get_by_repo_name(repo_name).archived is True
@@ -1,263 +1,314 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 from pyramid.view import view_config
24 24 from pyramid.httpexceptions import HTTPFound
25 25
26 from rhodecode import events
26 27 from rhodecode.apps._base import RepoAppView
27 28 from rhodecode.lib import helpers as h
28 29 from rhodecode.lib import audit_logger
29 30 from rhodecode.lib.auth import (
30 31 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired,
31 32 HasRepoPermissionAny)
32 33 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
33 34 from rhodecode.lib.utils2 import safe_int
34 35 from rhodecode.lib.vcs import RepositoryError
35 36 from rhodecode.model.db import Session, UserFollowing, User, Repository
36 37 from rhodecode.model.repo import RepoModel
37 38 from rhodecode.model.scm import ScmModel
38 39
39 40 log = logging.getLogger(__name__)
40 41
41 42
42 43 class RepoSettingsView(RepoAppView):
43 44
44 45 def load_default_context(self):
45 46 c = self._get_local_tmpl_context()
46 47 return c
47 48
49 def _get_users_with_permissions(self):
50 user_permissions = {}
51 for perm in self.db_repo.permissions():
52 user_permissions[perm.user_id] = perm
53
54 return user_permissions
55
48 56 @LoginRequired()
49 57 @HasRepoPermissionAnyDecorator('repository.admin')
50 58 @view_config(
51 59 route_name='edit_repo_advanced', request_method='GET',
52 60 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
53 61 def edit_advanced(self):
54 62 c = self.load_default_context()
55 63 c.active = 'advanced'
56 64
57 65 c.default_user_id = User.get_default_user().user_id
58 66 c.in_public_journal = UserFollowing.query() \
59 67 .filter(UserFollowing.user_id == c.default_user_id) \
60 68 .filter(UserFollowing.follows_repository == self.db_repo).scalar()
61 69
62 70 c.has_origin_repo_read_perm = False
63 71 if self.db_repo.fork:
64 72 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
65 73 'repository.write', 'repository.read', 'repository.admin')(
66 74 self.db_repo.fork.repo_name, 'repo set as fork page')
67 75
68 76 return self._get_template_context(c)
69 77
70 78 @LoginRequired()
71 79 @HasRepoPermissionAnyDecorator('repository.admin')
72 80 @CSRFRequired()
73 81 @view_config(
82 route_name='edit_repo_advanced_archive', request_method='POST',
83 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
84 def edit_advanced_archive(self):
85 """
86 Archives the repository. It will become read-only, and not visible in search
87 or other queries. But still visible for super-admins.
88 """
89
90 _ = self.request.translate
91
92 try:
93 old_data = self.db_repo.get_api_data()
94 RepoModel().archive(self.db_repo)
95
96 repo = audit_logger.RepoWrap(repo_id=None, repo_name=self.db_repo.repo_name)
97 audit_logger.store_web(
98 'repo.archive', action_data={'old_data': old_data},
99 user=self._rhodecode_user, repo=repo)
100
101 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
102 h.flash(
103 _('Archived repository `%s`') % self.db_repo_name,
104 category='success')
105 Session().commit()
106 except Exception:
107 log.exception("Exception during archiving of repository")
108 h.flash(_('An error occurred during archiving of `%s`')
109 % self.db_repo_name, category='error')
110 # redirect to advanced for more deletion options
111 raise HTTPFound(
112 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name,
113 _anchor='advanced-archive'))
114
115 # flush permissions for all users defined in permissions
116 affected_user_ids = self._get_users_with_permissions().keys()
117 events.trigger(events.UserPermissionsChange(affected_user_ids))
118
119 raise HTTPFound(h.route_path('home'))
120
121 @LoginRequired()
122 @HasRepoPermissionAnyDecorator('repository.admin')
123 @CSRFRequired()
124 @view_config(
74 125 route_name='edit_repo_advanced_delete', request_method='POST',
75 126 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
76 127 def edit_advanced_delete(self):
77 128 """
78 129 Deletes the repository, or shows warnings if deletion is not possible
79 130 because of attached forks or other errors.
80 131 """
81 132 _ = self.request.translate
82 133 handle_forks = self.request.POST.get('forks', None)
83 134 if handle_forks == 'detach_forks':
84 135 handle_forks = 'detach'
85 136 elif handle_forks == 'delete_forks':
86 137 handle_forks = 'delete'
87 138
88 139 try:
89 140 old_data = self.db_repo.get_api_data()
90 141 RepoModel().delete(self.db_repo, forks=handle_forks)
91 142
92 143 _forks = self.db_repo.forks.count()
93 144 if _forks and handle_forks:
94 145 if handle_forks == 'detach_forks':
95 146 h.flash(_('Detached %s forks') % _forks, category='success')
96 147 elif handle_forks == 'delete_forks':
97 148 h.flash(_('Deleted %s forks') % _forks, category='success')
98 149
99 150 repo = audit_logger.RepoWrap(repo_id=None, repo_name=self.db_repo.repo_name)
100 151 audit_logger.store_web(
101 152 'repo.delete', action_data={'old_data': old_data},
102 153 user=self._rhodecode_user, repo=repo)
103 154
104 155 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
105 156 h.flash(
106 157 _('Deleted repository `%s`') % self.db_repo_name,
107 158 category='success')
108 159 Session().commit()
109 160 except AttachedForksError:
110 161 repo_advanced_url = h.route_path(
111 162 'edit_repo_advanced', repo_name=self.db_repo_name,
112 163 _anchor='advanced-delete')
113 164 delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url)
114 165 h.flash(_('Cannot delete `{repo}` it still contains attached forks. '
115 166 'Try using {delete_or_detach} option.')
116 167 .format(repo=self.db_repo_name, delete_or_detach=delete_anchor),
117 168 category='warning')
118 169
119 170 # redirect to advanced for forks handle action ?
120 171 raise HTTPFound(repo_advanced_url)
121 172
122 173 except AttachedPullRequestsError:
123 174 repo_advanced_url = h.route_path(
124 175 'edit_repo_advanced', repo_name=self.db_repo_name,
125 176 _anchor='advanced-delete')
126 177 attached_prs = len(self.db_repo.pull_requests_source +
127 178 self.db_repo.pull_requests_target)
128 179 h.flash(
129 180 _('Cannot delete `{repo}` it still contains {num} attached pull requests. '
130 181 'Consider archiving the repository instead.').format(
131 182 repo=self.db_repo_name, num=attached_prs), category='warning')
132 183
133 184 # redirect to advanced for forks handle action ?
134 185 raise HTTPFound(repo_advanced_url)
135 186
136 187 except Exception:
137 188 log.exception("Exception during deletion of repository")
138 189 h.flash(_('An error occurred during deletion of `%s`')
139 190 % self.db_repo_name, category='error')
140 191 # redirect to advanced for more deletion options
141 192 raise HTTPFound(
142 193 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name,
143 194 _anchor='advanced-delete'))
144 195
145 196 raise HTTPFound(h.route_path('home'))
146 197
147 198 @LoginRequired()
148 199 @HasRepoPermissionAnyDecorator('repository.admin')
149 200 @CSRFRequired()
150 201 @view_config(
151 202 route_name='edit_repo_advanced_journal', request_method='POST',
152 203 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
153 204 def edit_advanced_journal(self):
154 205 """
155 206 Set's this repository to be visible in public journal,
156 207 in other words making default user to follow this repo
157 208 """
158 209 _ = self.request.translate
159 210
160 211 try:
161 212 user_id = User.get_default_user().user_id
162 213 ScmModel().toggle_following_repo(self.db_repo.repo_id, user_id)
163 214 h.flash(_('Updated repository visibility in public journal'),
164 215 category='success')
165 216 Session().commit()
166 217 except Exception:
167 218 h.flash(_('An error occurred during setting this '
168 219 'repository in public journal'),
169 220 category='error')
170 221
171 222 raise HTTPFound(
172 223 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
173 224
174 225 @LoginRequired()
175 226 @HasRepoPermissionAnyDecorator('repository.admin')
176 227 @CSRFRequired()
177 228 @view_config(
178 229 route_name='edit_repo_advanced_fork', request_method='POST',
179 230 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
180 231 def edit_advanced_fork(self):
181 232 """
182 233 Mark given repository as a fork of another
183 234 """
184 235 _ = self.request.translate
185 236
186 237 new_fork_id = safe_int(self.request.POST.get('id_fork_of'))
187 238
188 239 # valid repo, re-check permissions
189 240 if new_fork_id:
190 241 repo = Repository.get(new_fork_id)
191 242 # ensure we have at least read access to the repo we mark
192 243 perm_check = HasRepoPermissionAny(
193 244 'repository.read', 'repository.write', 'repository.admin')
194 245
195 246 if repo and perm_check(repo_name=repo.repo_name):
196 247 new_fork_id = repo.repo_id
197 248 else:
198 249 new_fork_id = None
199 250
200 251 try:
201 252 repo = ScmModel().mark_as_fork(
202 253 self.db_repo_name, new_fork_id, self._rhodecode_user.user_id)
203 254 fork = repo.fork.repo_name if repo.fork else _('Nothing')
204 255 Session().commit()
205 256 h.flash(
206 257 _('Marked repo %s as fork of %s') % (self.db_repo_name, fork),
207 258 category='success')
208 259 except RepositoryError as e:
209 260 log.exception("Repository Error occurred")
210 261 h.flash(str(e), category='error')
211 262 except Exception:
212 263 log.exception("Exception while editing fork")
213 264 h.flash(_('An error occurred during this operation'),
214 265 category='error')
215 266
216 267 raise HTTPFound(
217 268 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
218 269
219 270 @LoginRequired()
220 271 @HasRepoPermissionAnyDecorator('repository.admin')
221 272 @CSRFRequired()
222 273 @view_config(
223 274 route_name='edit_repo_advanced_locking', request_method='POST',
224 275 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
225 276 def edit_advanced_locking(self):
226 277 """
227 278 Toggle locking of repository
228 279 """
229 280 _ = self.request.translate
230 281 set_lock = self.request.POST.get('set_lock')
231 282 set_unlock = self.request.POST.get('set_unlock')
232 283
233 284 try:
234 285 if set_lock:
235 286 Repository.lock(self.db_repo, self._rhodecode_user.user_id,
236 287 lock_reason=Repository.LOCK_WEB)
237 288 h.flash(_('Locked repository'), category='success')
238 289 elif set_unlock:
239 290 Repository.unlock(self.db_repo)
240 291 h.flash(_('Unlocked repository'), category='success')
241 292 except Exception as e:
242 293 log.exception("Exception during unlocking")
243 294 h.flash(_('An error occurred during unlocking'), category='error')
244 295
245 296 raise HTTPFound(
246 297 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
247 298
248 299 @LoginRequired()
249 300 @HasRepoPermissionAnyDecorator('repository.admin')
250 301 @view_config(
251 302 route_name='edit_repo_advanced_hooks', request_method='GET',
252 303 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
253 304 def edit_advanced_install_hooks(self):
254 305 """
255 306 Install Hooks for repository
256 307 """
257 308 _ = self.request.translate
258 309 self.load_default_context()
259 310 self.rhodecode_vcs_repo.install_hooks(force=True)
260 311 h.flash(_('installed updated hooks into this repository'),
261 312 category='success')
262 313 raise HTTPFound(
263 314 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
@@ -1,287 +1,288 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2017-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import datetime
23 23
24 24 from rhodecode.lib.jsonalchemy import JsonRaw
25 25 from rhodecode.model import meta
26 26 from rhodecode.model.db import User, UserLog, Repository
27 27
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31 # action as key, and expected action_data as value
32 32 ACTIONS_V1 = {
33 33 'user.login.success': {'user_agent': ''},
34 34 'user.login.failure': {'user_agent': ''},
35 35 'user.logout': {'user_agent': ''},
36 36 'user.register': {},
37 37 'user.password.reset_request': {},
38 38 'user.push': {'user_agent': '', 'commit_ids': []},
39 39 'user.pull': {'user_agent': ''},
40 40
41 41 'user.create': {'data': {}},
42 42 'user.delete': {'old_data': {}},
43 43 'user.edit': {'old_data': {}},
44 44 'user.edit.permissions': {},
45 45 'user.edit.ip.add': {'ip': {}, 'user': {}},
46 46 'user.edit.ip.delete': {'ip': {}, 'user': {}},
47 47 'user.edit.token.add': {'token': {}, 'user': {}},
48 48 'user.edit.token.delete': {'token': {}, 'user': {}},
49 49 'user.edit.email.add': {'email': ''},
50 50 'user.edit.email.delete': {'email': ''},
51 51 'user.edit.ssh_key.add': {'token': {}, 'user': {}},
52 52 'user.edit.ssh_key.delete': {'token': {}, 'user': {}},
53 53 'user.edit.password_reset.enabled': {},
54 54 'user.edit.password_reset.disabled': {},
55 55
56 56 'user_group.create': {'data': {}},
57 57 'user_group.delete': {'old_data': {}},
58 58 'user_group.edit': {'old_data': {}},
59 59 'user_group.edit.permissions': {},
60 60 'user_group.edit.member.add': {'user': {}},
61 61 'user_group.edit.member.delete': {'user': {}},
62 62
63 63 'repo.create': {'data': {}},
64 64 'repo.fork': {'data': {}},
65 65 'repo.edit': {'old_data': {}},
66 66 'repo.edit.permissions': {},
67 67 'repo.edit.permissions.branch': {},
68 'repo.archive': {'old_data': {}},
68 69 'repo.delete': {'old_data': {}},
69 70
70 71 'repo.archive.download': {'user_agent': '', 'archive_name': '',
71 72 'archive_spec': '', 'archive_cached': ''},
72 73
73 74 'repo.permissions.branch_rule.create': {},
74 75 'repo.permissions.branch_rule.edit': {},
75 76 'repo.permissions.branch_rule.delete': {},
76 77
77 78 'repo.pull_request.create': '',
78 79 'repo.pull_request.edit': '',
79 80 'repo.pull_request.delete': '',
80 81 'repo.pull_request.close': '',
81 82 'repo.pull_request.merge': '',
82 83 'repo.pull_request.vote': '',
83 84 'repo.pull_request.comment.create': '',
84 85 'repo.pull_request.comment.delete': '',
85 86
86 87 'repo.pull_request.reviewer.add': '',
87 88 'repo.pull_request.reviewer.delete': '',
88 89
89 90 'repo.commit.strip': {'commit_id': ''},
90 91 'repo.commit.comment.create': {'data': {}},
91 92 'repo.commit.comment.delete': {'data': {}},
92 93 'repo.commit.vote': '',
93 94
94 95 'repo_group.create': {'data': {}},
95 96 'repo_group.edit': {'old_data': {}},
96 97 'repo_group.edit.permissions': {},
97 98 'repo_group.delete': {'old_data': {}},
98 99 }
99 100
100 101 ACTIONS = ACTIONS_V1
101 102
102 103 SOURCE_WEB = 'source_web'
103 104 SOURCE_API = 'source_api'
104 105
105 106
106 107 class UserWrap(object):
107 108 """
108 109 Fake object used to imitate AuthUser
109 110 """
110 111
111 112 def __init__(self, user_id=None, username=None, ip_addr=None):
112 113 self.user_id = user_id
113 114 self.username = username
114 115 self.ip_addr = ip_addr
115 116
116 117
117 118 class RepoWrap(object):
118 119 """
119 120 Fake object used to imitate RepoObject that audit logger requires
120 121 """
121 122
122 123 def __init__(self, repo_id=None, repo_name=None):
123 124 self.repo_id = repo_id
124 125 self.repo_name = repo_name
125 126
126 127
127 128 def _store_log(action_name, action_data, user_id, username, user_data,
128 129 ip_address, repository_id, repository_name):
129 130 user_log = UserLog()
130 131 user_log.version = UserLog.VERSION_2
131 132
132 133 user_log.action = action_name
133 134 user_log.action_data = action_data or JsonRaw(u'{}')
134 135
135 136 user_log.user_ip = ip_address
136 137
137 138 user_log.user_id = user_id
138 139 user_log.username = username
139 140 user_log.user_data = user_data or JsonRaw(u'{}')
140 141
141 142 user_log.repository_id = repository_id
142 143 user_log.repository_name = repository_name
143 144
144 145 user_log.action_date = datetime.datetime.now()
145 146
146 147 return user_log
147 148
148 149
149 150 def store_web(*args, **kwargs):
150 151 if 'action_data' not in kwargs:
151 152 kwargs['action_data'] = {}
152 153 kwargs['action_data'].update({
153 154 'source': SOURCE_WEB
154 155 })
155 156 return store(*args, **kwargs)
156 157
157 158
158 159 def store_api(*args, **kwargs):
159 160 if 'action_data' not in kwargs:
160 161 kwargs['action_data'] = {}
161 162 kwargs['action_data'].update({
162 163 'source': SOURCE_API
163 164 })
164 165 return store(*args, **kwargs)
165 166
166 167
167 168 def store(action, user, action_data=None, user_data=None, ip_addr=None,
168 169 repo=None, sa_session=None, commit=False):
169 170 """
170 171 Audit logger for various actions made by users, typically this
171 172 results in a call such::
172 173
173 174 from rhodecode.lib import audit_logger
174 175
175 176 audit_logger.store(
176 177 'repo.edit', user=self._rhodecode_user)
177 178 audit_logger.store(
178 179 'repo.delete', action_data={'data': repo_data},
179 180 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'))
180 181
181 182 # repo action
182 183 audit_logger.store(
183 184 'repo.delete',
184 185 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'),
185 186 repo=audit_logger.RepoWrap(repo_name='some-repo'))
186 187
187 188 # repo action, when we know and have the repository object already
188 189 audit_logger.store(
189 190 'repo.delete', action_data={'source': audit_logger.SOURCE_WEB, },
190 191 user=self._rhodecode_user,
191 192 repo=repo_object)
192 193
193 194 # alternative wrapper to the above
194 195 audit_logger.store_web(
195 196 'repo.delete', action_data={},
196 197 user=self._rhodecode_user,
197 198 repo=repo_object)
198 199
199 200 # without an user ?
200 201 audit_logger.store(
201 202 'user.login.failure',
202 203 user=audit_logger.UserWrap(
203 204 username=self.request.params.get('username'),
204 205 ip_addr=self.request.remote_addr))
205 206
206 207 """
207 208 from rhodecode.lib.utils2 import safe_unicode
208 209 from rhodecode.lib.auth import AuthUser
209 210
210 211 action_spec = ACTIONS.get(action, None)
211 212 if action_spec is None:
212 213 raise ValueError('Action `{}` is not supported'.format(action))
213 214
214 215 if not sa_session:
215 216 sa_session = meta.Session()
216 217
217 218 try:
218 219 username = getattr(user, 'username', None)
219 220 if not username:
220 221 pass
221 222
222 223 user_id = getattr(user, 'user_id', None)
223 224 if not user_id:
224 225 # maybe we have username ? Try to figure user_id from username
225 226 if username:
226 227 user_id = getattr(
227 228 User.get_by_username(username), 'user_id', None)
228 229
229 230 ip_addr = ip_addr or getattr(user, 'ip_addr', None)
230 231 if not ip_addr:
231 232 pass
232 233
233 234 if not user_data:
234 235 # try to get this from the auth user
235 236 if isinstance(user, AuthUser):
236 237 user_data = {
237 238 'username': user.username,
238 239 'email': user.email,
239 240 }
240 241
241 242 repository_name = getattr(repo, 'repo_name', None)
242 243 repository_id = getattr(repo, 'repo_id', None)
243 244 if not repository_id:
244 245 # maybe we have repo_name ? Try to figure repo_id from repo_name
245 246 if repository_name:
246 247 repository_id = getattr(
247 248 Repository.get_by_repo_name(repository_name), 'repo_id', None)
248 249
249 250 action_name = safe_unicode(action)
250 251 ip_address = safe_unicode(ip_addr)
251 252
252 253 with sa_session.no_autoflush:
253 254 update_user_last_activity(sa_session, user_id)
254 255
255 256 user_log = _store_log(
256 257 action_name=action_name,
257 258 action_data=action_data or {},
258 259 user_id=user_id,
259 260 username=username,
260 261 user_data=user_data or {},
261 262 ip_address=ip_address,
262 263 repository_id=repository_id,
263 264 repository_name=repository_name
264 265 )
265 266
266 267 sa_session.add(user_log)
267 268
268 269 if commit:
269 270 sa_session.commit()
270 271
271 272 entry_id = user_log.entry_id or ''
272 273 log.info('AUDIT[%s]: Logging action: `%s` by user:id:%s[%s] ip:%s',
273 274 entry_id, action_name, user_id, username, ip_address)
274 275
275 276 except Exception:
276 277 log.exception('AUDIT: failed to store audit log')
277 278
278 279
279 280 def update_user_last_activity(sa_session, user_id):
280 281 _last_activity = datetime.datetime.now()
281 282 try:
282 283 sa_session.query(User).filter(User.user_id == user_id).update(
283 284 {"last_activity": _last_activity})
284 285 log.debug(
285 286 'updated user `%s` last activity to:%s', user_id, _last_activity)
286 287 except Exception:
287 288 log.exception("Failed last activity update")
@@ -1,2338 +1,2355 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 authentication and permission libraries
23 23 """
24 24
25 25 import os
26 26 import time
27 27 import inspect
28 28 import collections
29 29 import fnmatch
30 30 import hashlib
31 31 import itertools
32 32 import logging
33 33 import random
34 34 import traceback
35 35 from functools import wraps
36 36
37 37 import ipaddress
38 38
39 39 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
40 40 from sqlalchemy.orm.exc import ObjectDeletedError
41 41 from sqlalchemy.orm import joinedload
42 42 from zope.cachedescriptors.property import Lazy as LazyProperty
43 43
44 44 import rhodecode
45 45 from rhodecode.model import meta
46 46 from rhodecode.model.meta import Session
47 47 from rhodecode.model.user import UserModel
48 48 from rhodecode.model.db import (
49 49 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
50 50 UserIpMap, UserApiKeys, RepoGroup, UserGroup)
51 51 from rhodecode.lib import rc_cache
52 52 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5, safe_int, sha1
53 53 from rhodecode.lib.utils import (
54 54 get_repo_slug, get_repo_group_slug, get_user_group_slug)
55 55 from rhodecode.lib.caching_query import FromCache
56 56
57 57
58 58 if rhodecode.is_unix:
59 59 import bcrypt
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63 csrf_token_key = "csrf_token"
64 64
65 65
66 66 class PasswordGenerator(object):
67 67 """
68 68 This is a simple class for generating password from different sets of
69 69 characters
70 70 usage::
71 71
72 72 passwd_gen = PasswordGenerator()
73 73 #print 8-letter password containing only big and small letters
74 74 of alphabet
75 75 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
76 76 """
77 77 ALPHABETS_NUM = r'''1234567890'''
78 78 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
79 79 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
80 80 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
81 81 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
82 82 + ALPHABETS_NUM + ALPHABETS_SPECIAL
83 83 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
84 84 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
85 85 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
86 86 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
87 87
88 88 def __init__(self, passwd=''):
89 89 self.passwd = passwd
90 90
91 91 def gen_password(self, length, type_=None):
92 92 if type_ is None:
93 93 type_ = self.ALPHABETS_FULL
94 94 self.passwd = ''.join([random.choice(type_) for _ in range(length)])
95 95 return self.passwd
96 96
97 97
98 98 class _RhodeCodeCryptoBase(object):
99 99 ENC_PREF = None
100 100
101 101 def hash_create(self, str_):
102 102 """
103 103 hash the string using
104 104
105 105 :param str_: password to hash
106 106 """
107 107 raise NotImplementedError
108 108
109 109 def hash_check_with_upgrade(self, password, hashed):
110 110 """
111 111 Returns tuple in which first element is boolean that states that
112 112 given password matches it's hashed version, and the second is new hash
113 113 of the password, in case this password should be migrated to new
114 114 cipher.
115 115 """
116 116 checked_hash = self.hash_check(password, hashed)
117 117 return checked_hash, None
118 118
119 119 def hash_check(self, password, hashed):
120 120 """
121 121 Checks matching password with it's hashed value.
122 122
123 123 :param password: password
124 124 :param hashed: password in hashed form
125 125 """
126 126 raise NotImplementedError
127 127
128 128 def _assert_bytes(self, value):
129 129 """
130 130 Passing in an `unicode` object can lead to hard to detect issues
131 131 if passwords contain non-ascii characters. Doing a type check
132 132 during runtime, so that such mistakes are detected early on.
133 133 """
134 134 if not isinstance(value, str):
135 135 raise TypeError(
136 136 "Bytestring required as input, got %r." % (value, ))
137 137
138 138
139 139 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
140 140 ENC_PREF = ('$2a$10', '$2b$10')
141 141
142 142 def hash_create(self, str_):
143 143 self._assert_bytes(str_)
144 144 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
145 145
146 146 def hash_check_with_upgrade(self, password, hashed):
147 147 """
148 148 Returns tuple in which first element is boolean that states that
149 149 given password matches it's hashed version, and the second is new hash
150 150 of the password, in case this password should be migrated to new
151 151 cipher.
152 152
153 153 This implements special upgrade logic which works like that:
154 154 - check if the given password == bcrypted hash, if yes then we
155 155 properly used password and it was already in bcrypt. Proceed
156 156 without any changes
157 157 - if bcrypt hash check is not working try with sha256. If hash compare
158 158 is ok, it means we using correct but old hashed password. indicate
159 159 hash change and proceed
160 160 """
161 161
162 162 new_hash = None
163 163
164 164 # regular pw check
165 165 password_match_bcrypt = self.hash_check(password, hashed)
166 166
167 167 # now we want to know if the password was maybe from sha256
168 168 # basically calling _RhodeCodeCryptoSha256().hash_check()
169 169 if not password_match_bcrypt:
170 170 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
171 171 new_hash = self.hash_create(password) # make new bcrypt hash
172 172 password_match_bcrypt = True
173 173
174 174 return password_match_bcrypt, new_hash
175 175
176 176 def hash_check(self, password, hashed):
177 177 """
178 178 Checks matching password with it's hashed value.
179 179
180 180 :param password: password
181 181 :param hashed: password in hashed form
182 182 """
183 183 self._assert_bytes(password)
184 184 try:
185 185 return bcrypt.hashpw(password, hashed) == hashed
186 186 except ValueError as e:
187 187 # we're having a invalid salt here probably, we should not crash
188 188 # just return with False as it would be a wrong password.
189 189 log.debug('Failed to check password hash using bcrypt %s',
190 190 safe_str(e))
191 191
192 192 return False
193 193
194 194
195 195 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
196 196 ENC_PREF = '_'
197 197
198 198 def hash_create(self, str_):
199 199 self._assert_bytes(str_)
200 200 return hashlib.sha256(str_).hexdigest()
201 201
202 202 def hash_check(self, password, hashed):
203 203 """
204 204 Checks matching password with it's hashed value.
205 205
206 206 :param password: password
207 207 :param hashed: password in hashed form
208 208 """
209 209 self._assert_bytes(password)
210 210 return hashlib.sha256(password).hexdigest() == hashed
211 211
212 212
213 213 class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase):
214 214 ENC_PREF = '_'
215 215
216 216 def hash_create(self, str_):
217 217 self._assert_bytes(str_)
218 218 return sha1(str_)
219 219
220 220 def hash_check(self, password, hashed):
221 221 """
222 222 Checks matching password with it's hashed value.
223 223
224 224 :param password: password
225 225 :param hashed: password in hashed form
226 226 """
227 227 self._assert_bytes(password)
228 228 return sha1(password) == hashed
229 229
230 230
231 231 def crypto_backend():
232 232 """
233 233 Return the matching crypto backend.
234 234
235 235 Selection is based on if we run tests or not, we pick sha1-test backend to run
236 236 tests faster since BCRYPT is expensive to calculate
237 237 """
238 238 if rhodecode.is_test:
239 239 RhodeCodeCrypto = _RhodeCodeCryptoTest()
240 240 else:
241 241 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
242 242
243 243 return RhodeCodeCrypto
244 244
245 245
246 246 def get_crypt_password(password):
247 247 """
248 248 Create the hash of `password` with the active crypto backend.
249 249
250 250 :param password: The cleartext password.
251 251 :type password: unicode
252 252 """
253 253 password = safe_str(password)
254 254 return crypto_backend().hash_create(password)
255 255
256 256
257 257 def check_password(password, hashed):
258 258 """
259 259 Check if the value in `password` matches the hash in `hashed`.
260 260
261 261 :param password: The cleartext password.
262 262 :type password: unicode
263 263
264 264 :param hashed: The expected hashed version of the password.
265 265 :type hashed: The hash has to be passed in in text representation.
266 266 """
267 267 password = safe_str(password)
268 268 return crypto_backend().hash_check(password, hashed)
269 269
270 270
271 271 def generate_auth_token(data, salt=None):
272 272 """
273 273 Generates API KEY from given string
274 274 """
275 275
276 276 if salt is None:
277 277 salt = os.urandom(16)
278 278 return hashlib.sha1(safe_str(data) + salt).hexdigest()
279 279
280 280
281 281 def get_came_from(request):
282 282 """
283 283 get query_string+path from request sanitized after removing auth_token
284 284 """
285 285 _req = request
286 286
287 287 path = _req.path
288 288 if 'auth_token' in _req.GET:
289 289 # sanitize the request and remove auth_token for redirection
290 290 _req.GET.pop('auth_token')
291 291 qs = _req.query_string
292 292 if qs:
293 293 path += '?' + qs
294 294
295 295 return path
296 296
297 297
298 298 class CookieStoreWrapper(object):
299 299
300 300 def __init__(self, cookie_store):
301 301 self.cookie_store = cookie_store
302 302
303 303 def __repr__(self):
304 304 return 'CookieStore<%s>' % (self.cookie_store)
305 305
306 306 def get(self, key, other=None):
307 307 if isinstance(self.cookie_store, dict):
308 308 return self.cookie_store.get(key, other)
309 309 elif isinstance(self.cookie_store, AuthUser):
310 310 return self.cookie_store.__dict__.get(key, other)
311 311
312 312
313 313 def _cached_perms_data(user_id, scope, user_is_admin,
314 314 user_inherit_default_permissions, explicit, algo,
315 315 calculate_super_admin):
316 316
317 317 permissions = PermissionCalculator(
318 318 user_id, scope, user_is_admin, user_inherit_default_permissions,
319 319 explicit, algo, calculate_super_admin)
320 320 return permissions.calculate()
321 321
322 322
323 323 class PermOrigin(object):
324 324 SUPER_ADMIN = 'superadmin'
325 ARCHIVED = 'archived'
325 326
326 327 REPO_USER = 'user:%s'
327 328 REPO_USERGROUP = 'usergroup:%s'
328 329 REPO_OWNER = 'repo.owner'
329 330 REPO_DEFAULT = 'repo.default'
330 331 REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit'
331 332 REPO_PRIVATE = 'repo.private'
332 333
333 334 REPOGROUP_USER = 'user:%s'
334 335 REPOGROUP_USERGROUP = 'usergroup:%s'
335 336 REPOGROUP_OWNER = 'group.owner'
336 337 REPOGROUP_DEFAULT = 'group.default'
337 338 REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit'
338 339
339 340 USERGROUP_USER = 'user:%s'
340 341 USERGROUP_USERGROUP = 'usergroup:%s'
341 342 USERGROUP_OWNER = 'usergroup.owner'
342 343 USERGROUP_DEFAULT = 'usergroup.default'
343 344 USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit'
344 345
345 346
346 347 class PermOriginDict(dict):
347 348 """
348 349 A special dict used for tracking permissions along with their origins.
349 350
350 351 `__setitem__` has been overridden to expect a tuple(perm, origin)
351 352 `__getitem__` will return only the perm
352 353 `.perm_origin_stack` will return the stack of (perm, origin) set per key
353 354
354 355 >>> perms = PermOriginDict()
355 356 >>> perms['resource'] = 'read', 'default'
356 357 >>> perms['resource']
357 358 'read'
358 359 >>> perms['resource'] = 'write', 'admin'
359 360 >>> perms['resource']
360 361 'write'
361 362 >>> perms.perm_origin_stack
362 363 {'resource': [('read', 'default'), ('write', 'admin')]}
363 364 """
364 365
365 366 def __init__(self, *args, **kw):
366 367 dict.__init__(self, *args, **kw)
367 368 self.perm_origin_stack = collections.OrderedDict()
368 369
369 370 def __setitem__(self, key, (perm, origin)):
370 371 self.perm_origin_stack.setdefault(key, []).append(
371 372 (perm, origin))
372 373 dict.__setitem__(self, key, perm)
373 374
374 375
375 376 class BranchPermOriginDict(PermOriginDict):
376 377 """
377 378 Dedicated branch permissions dict, with tracking of patterns and origins.
378 379
379 380 >>> perms = BranchPermOriginDict()
380 381 >>> perms['resource'] = '*pattern', 'read', 'default'
381 382 >>> perms['resource']
382 383 {'*pattern': 'read'}
383 384 >>> perms['resource'] = '*pattern', 'write', 'admin'
384 385 >>> perms['resource']
385 386 {'*pattern': 'write'}
386 387 >>> perms.perm_origin_stack
387 388 {'resource': {'*pattern': [('read', 'default'), ('write', 'admin')]}}
388 389 """
389 390 def __setitem__(self, key, (pattern, perm, origin)):
390 391
391 392 self.perm_origin_stack.setdefault(key, {}) \
392 393 .setdefault(pattern, []).append((perm, origin))
393 394
394 395 if key in self:
395 396 self[key].__setitem__(pattern, perm)
396 397 else:
397 398 patterns = collections.OrderedDict()
398 399 patterns[pattern] = perm
399 400 dict.__setitem__(self, key, patterns)
400 401
401 402
402 403 class PermissionCalculator(object):
403 404
404 405 def __init__(
405 406 self, user_id, scope, user_is_admin,
406 407 user_inherit_default_permissions, explicit, algo,
407 408 calculate_super_admin_as_user=False):
408 409
409 410 self.user_id = user_id
410 411 self.user_is_admin = user_is_admin
411 412 self.inherit_default_permissions = user_inherit_default_permissions
412 413 self.explicit = explicit
413 414 self.algo = algo
414 415 self.calculate_super_admin_as_user = calculate_super_admin_as_user
415 416
416 417 scope = scope or {}
417 418 self.scope_repo_id = scope.get('repo_id')
418 419 self.scope_repo_group_id = scope.get('repo_group_id')
419 420 self.scope_user_group_id = scope.get('user_group_id')
420 421
421 422 self.default_user_id = User.get_default_user(cache=True).user_id
422 423
423 424 self.permissions_repositories = PermOriginDict()
424 425 self.permissions_repository_groups = PermOriginDict()
425 426 self.permissions_user_groups = PermOriginDict()
426 427 self.permissions_repository_branches = BranchPermOriginDict()
427 428 self.permissions_global = set()
428 429
429 430 self.default_repo_perms = Permission.get_default_repo_perms(
430 431 self.default_user_id, self.scope_repo_id)
431 432 self.default_repo_groups_perms = Permission.get_default_group_perms(
432 433 self.default_user_id, self.scope_repo_group_id)
433 434 self.default_user_group_perms = \
434 435 Permission.get_default_user_group_perms(
435 436 self.default_user_id, self.scope_user_group_id)
436 437
437 438 # default branch perms
438 439 self.default_branch_repo_perms = \
439 440 Permission.get_default_repo_branch_perms(
440 441 self.default_user_id, self.scope_repo_id)
441 442
442 443 def calculate(self):
443 444 if self.user_is_admin and not self.calculate_super_admin_as_user:
444 445 return self._calculate_admin_permissions()
445 446
446 447 self._calculate_global_default_permissions()
447 448 self._calculate_global_permissions()
448 449 self._calculate_default_permissions()
449 450 self._calculate_repository_permissions()
450 451 self._calculate_repository_branch_permissions()
451 452 self._calculate_repository_group_permissions()
452 453 self._calculate_user_group_permissions()
453 454 return self._permission_structure()
454 455
455 456 def _calculate_admin_permissions(self):
456 457 """
457 458 admin user have all default rights for repositories
458 459 and groups set to admin
459 460 """
460 461 self.permissions_global.add('hg.admin')
461 462 self.permissions_global.add('hg.create.write_on_repogroup.true')
462 463
463 464 # repositories
464 465 for perm in self.default_repo_perms:
465 466 r_k = perm.UserRepoToPerm.repository.repo_name
467 archived = perm.UserRepoToPerm.repository.archived
466 468 p = 'repository.admin'
467 469 self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN
470 # special case for archived repositories, which we block still even for
471 # super admins
472 if archived:
473 p = 'repository.read'
474 self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED
468 475
469 476 # repository groups
470 477 for perm in self.default_repo_groups_perms:
471 478 rg_k = perm.UserRepoGroupToPerm.group.group_name
472 479 p = 'group.admin'
473 480 self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN
474 481
475 482 # user groups
476 483 for perm in self.default_user_group_perms:
477 484 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
478 485 p = 'usergroup.admin'
479 486 self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN
480 487
481 488 # branch permissions
482 489 # since super-admin also can have custom rule permissions
483 490 # we *always* need to calculate those inherited from default, and also explicit
484 491 self._calculate_default_permissions_repository_branches(
485 492 user_inherit_object_permissions=False)
486 493 self._calculate_repository_branch_permissions()
487 494
488 495 return self._permission_structure()
489 496
490 497 def _calculate_global_default_permissions(self):
491 498 """
492 499 global permissions taken from the default user
493 500 """
494 501 default_global_perms = UserToPerm.query()\
495 502 .filter(UserToPerm.user_id == self.default_user_id)\
496 503 .options(joinedload(UserToPerm.permission))
497 504
498 505 for perm in default_global_perms:
499 506 self.permissions_global.add(perm.permission.permission_name)
500 507
501 508 if self.user_is_admin:
502 509 self.permissions_global.add('hg.admin')
503 510 self.permissions_global.add('hg.create.write_on_repogroup.true')
504 511
505 512 def _calculate_global_permissions(self):
506 513 """
507 514 Set global system permissions with user permissions or permissions
508 515 taken from the user groups of the current user.
509 516
510 517 The permissions include repo creating, repo group creating, forking
511 518 etc.
512 519 """
513 520
514 521 # now we read the defined permissions and overwrite what we have set
515 522 # before those can be configured from groups or users explicitly.
516 523
517 524 # In case we want to extend this list we should make sure
518 525 # this is in sync with User.DEFAULT_USER_PERMISSIONS definitions
519 526 _configurable = frozenset([
520 527 'hg.fork.none', 'hg.fork.repository',
521 528 'hg.create.none', 'hg.create.repository',
522 529 'hg.usergroup.create.false', 'hg.usergroup.create.true',
523 530 'hg.repogroup.create.false', 'hg.repogroup.create.true',
524 531 'hg.create.write_on_repogroup.false', 'hg.create.write_on_repogroup.true',
525 532 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
526 533 ])
527 534
528 535 # USER GROUPS comes first user group global permissions
529 536 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
530 537 .options(joinedload(UserGroupToPerm.permission))\
531 538 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
532 539 UserGroupMember.users_group_id))\
533 540 .filter(UserGroupMember.user_id == self.user_id)\
534 541 .order_by(UserGroupToPerm.users_group_id)\
535 542 .all()
536 543
537 544 # need to group here by groups since user can be in more than
538 545 # one group, so we get all groups
539 546 _explicit_grouped_perms = [
540 547 [x, list(y)] for x, y in
541 548 itertools.groupby(user_perms_from_users_groups,
542 549 lambda _x: _x.users_group)]
543 550
544 551 for gr, perms in _explicit_grouped_perms:
545 552 # since user can be in multiple groups iterate over them and
546 553 # select the lowest permissions first (more explicit)
547 554 # TODO(marcink): do this^^
548 555
549 556 # group doesn't inherit default permissions so we actually set them
550 557 if not gr.inherit_default_permissions:
551 558 # NEED TO IGNORE all previously set configurable permissions
552 559 # and replace them with explicitly set from this user
553 560 # group permissions
554 561 self.permissions_global = self.permissions_global.difference(
555 562 _configurable)
556 563 for perm in perms:
557 564 self.permissions_global.add(perm.permission.permission_name)
558 565
559 566 # user explicit global permissions
560 567 user_perms = Session().query(UserToPerm)\
561 568 .options(joinedload(UserToPerm.permission))\
562 569 .filter(UserToPerm.user_id == self.user_id).all()
563 570
564 571 if not self.inherit_default_permissions:
565 572 # NEED TO IGNORE all configurable permissions and
566 573 # replace them with explicitly set from this user permissions
567 574 self.permissions_global = self.permissions_global.difference(
568 575 _configurable)
569 576 for perm in user_perms:
570 577 self.permissions_global.add(perm.permission.permission_name)
571 578
572 579 def _calculate_default_permissions_repositories(self, user_inherit_object_permissions):
573 580 for perm in self.default_repo_perms:
574 581 r_k = perm.UserRepoToPerm.repository.repo_name
582 archived = perm.UserRepoToPerm.repository.archived
575 583 p = perm.Permission.permission_name
576 584 o = PermOrigin.REPO_DEFAULT
577 585 self.permissions_repositories[r_k] = p, o
578 586
579 587 # if we decide this user isn't inheriting permissions from
580 588 # default user we set him to .none so only explicit
581 589 # permissions work
582 590 if not user_inherit_object_permissions:
583 591 p = 'repository.none'
584 592 o = PermOrigin.REPO_DEFAULT_NO_INHERIT
585 593 self.permissions_repositories[r_k] = p, o
586 594
587 595 if perm.Repository.private and not (
588 596 perm.Repository.user_id == self.user_id):
589 597 # disable defaults for private repos,
590 598 p = 'repository.none'
591 599 o = PermOrigin.REPO_PRIVATE
592 600 self.permissions_repositories[r_k] = p, o
593 601
594 602 elif perm.Repository.user_id == self.user_id:
595 603 # set admin if owner
596 604 p = 'repository.admin'
597 605 o = PermOrigin.REPO_OWNER
598 606 self.permissions_repositories[r_k] = p, o
599 607
600 608 if self.user_is_admin:
601 609 p = 'repository.admin'
602 610 o = PermOrigin.SUPER_ADMIN
603 611 self.permissions_repositories[r_k] = p, o
604 612
613 # finally in case of archived repositories, we downgrade higher
614 # permissions to read
615 if archived:
616 current_perm = self.permissions_repositories[r_k]
617 if current_perm in ['repository.write', 'repository.admin']:
618 p = 'repository.read'
619 o = PermOrigin.ARCHIVED
620 self.permissions_repositories[r_k] = p, o
621
605 622 def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions):
606 623 for perm in self.default_branch_repo_perms:
607 624
608 625 r_k = perm.UserRepoToPerm.repository.repo_name
609 626 p = perm.Permission.permission_name
610 627 pattern = perm.UserToRepoBranchPermission.branch_pattern
611 628 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
612 629
613 630 if not self.explicit:
614 631 # TODO(marcink): fix this for multiple entries
615 632 cur_perm = self.permissions_repository_branches.get(r_k) or 'branch.none'
616 633 p = self._choose_permission(p, cur_perm)
617 634
618 635 # NOTE(marcink): register all pattern/perm instances in this
619 636 # special dict that aggregates entries
620 637 self.permissions_repository_branches[r_k] = pattern, p, o
621 638
622 639 def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions):
623 640 for perm in self.default_repo_groups_perms:
624 641 rg_k = perm.UserRepoGroupToPerm.group.group_name
625 642 p = perm.Permission.permission_name
626 643 o = PermOrigin.REPOGROUP_DEFAULT
627 644 self.permissions_repository_groups[rg_k] = p, o
628 645
629 646 # if we decide this user isn't inheriting permissions from default
630 647 # user we set him to .none so only explicit permissions work
631 648 if not user_inherit_object_permissions:
632 649 p = 'group.none'
633 650 o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT
634 651 self.permissions_repository_groups[rg_k] = p, o
635 652
636 653 if perm.RepoGroup.user_id == self.user_id:
637 654 # set admin if owner
638 655 p = 'group.admin'
639 656 o = PermOrigin.REPOGROUP_OWNER
640 657 self.permissions_repository_groups[rg_k] = p, o
641 658
642 659 if self.user_is_admin:
643 660 p = 'group.admin'
644 661 o = PermOrigin.SUPER_ADMIN
645 662 self.permissions_repository_groups[rg_k] = p, o
646 663
647 664 def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions):
648 665 for perm in self.default_user_group_perms:
649 666 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
650 667 p = perm.Permission.permission_name
651 668 o = PermOrigin.USERGROUP_DEFAULT
652 669 self.permissions_user_groups[u_k] = p, o
653 670
654 671 # if we decide this user isn't inheriting permissions from default
655 672 # user we set him to .none so only explicit permissions work
656 673 if not user_inherit_object_permissions:
657 674 p = 'usergroup.none'
658 675 o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT
659 676 self.permissions_user_groups[u_k] = p, o
660 677
661 678 if perm.UserGroup.user_id == self.user_id:
662 679 # set admin if owner
663 680 p = 'usergroup.admin'
664 681 o = PermOrigin.USERGROUP_OWNER
665 682 self.permissions_user_groups[u_k] = p, o
666 683
667 684 if self.user_is_admin:
668 685 p = 'usergroup.admin'
669 686 o = PermOrigin.SUPER_ADMIN
670 687 self.permissions_user_groups[u_k] = p, o
671 688
672 689 def _calculate_default_permissions(self):
673 690 """
674 691 Set default user permissions for repositories, repository branches,
675 692 repository groups, user groups taken from the default user.
676 693
677 694 Calculate inheritance of object permissions based on what we have now
678 695 in GLOBAL permissions. We check if .false is in GLOBAL since this is
679 696 explicitly set. Inherit is the opposite of .false being there.
680 697
681 698 .. note::
682 699
683 700 the syntax is little bit odd but what we need to check here is
684 701 the opposite of .false permission being in the list so even for
685 702 inconsistent state when both .true/.false is there
686 703 .false is more important
687 704
688 705 """
689 706 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
690 707 in self.permissions_global)
691 708
692 709 # default permissions inherited from `default` user permissions
693 710 self._calculate_default_permissions_repositories(
694 711 user_inherit_object_permissions)
695 712
696 713 self._calculate_default_permissions_repository_branches(
697 714 user_inherit_object_permissions)
698 715
699 716 self._calculate_default_permissions_repository_groups(
700 717 user_inherit_object_permissions)
701 718
702 719 self._calculate_default_permissions_user_groups(
703 720 user_inherit_object_permissions)
704 721
705 722 def _calculate_repository_permissions(self):
706 723 """
707 724 Repository permissions for the current user.
708 725
709 726 Check if the user is part of user groups for this repository and
710 727 fill in the permission from it. `_choose_permission` decides of which
711 728 permission should be selected based on selected method.
712 729 """
713 730
714 731 # user group for repositories permissions
715 732 user_repo_perms_from_user_group = Permission\
716 733 .get_default_repo_perms_from_user_group(
717 734 self.user_id, self.scope_repo_id)
718 735
719 736 multiple_counter = collections.defaultdict(int)
720 737 for perm in user_repo_perms_from_user_group:
721 738 r_k = perm.UserGroupRepoToPerm.repository.repo_name
722 739 multiple_counter[r_k] += 1
723 740 p = perm.Permission.permission_name
724 741 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
725 742 .users_group.users_group_name
726 743
727 744 if multiple_counter[r_k] > 1:
728 745 cur_perm = self.permissions_repositories[r_k]
729 746 p = self._choose_permission(p, cur_perm)
730 747
731 748 self.permissions_repositories[r_k] = p, o
732 749
733 750 if perm.Repository.user_id == self.user_id:
734 751 # set admin if owner
735 752 p = 'repository.admin'
736 753 o = PermOrigin.REPO_OWNER
737 754 self.permissions_repositories[r_k] = p, o
738 755
739 756 if self.user_is_admin:
740 757 p = 'repository.admin'
741 758 o = PermOrigin.SUPER_ADMIN
742 759 self.permissions_repositories[r_k] = p, o
743 760
744 761 # user explicit permissions for repositories, overrides any specified
745 762 # by the group permission
746 763 user_repo_perms = Permission.get_default_repo_perms(
747 764 self.user_id, self.scope_repo_id)
748 765 for perm in user_repo_perms:
749 766 r_k = perm.UserRepoToPerm.repository.repo_name
750 767 p = perm.Permission.permission_name
751 768 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
752 769
753 770 if not self.explicit:
754 771 cur_perm = self.permissions_repositories.get(
755 772 r_k, 'repository.none')
756 773 p = self._choose_permission(p, cur_perm)
757 774
758 775 self.permissions_repositories[r_k] = p, o
759 776
760 777 if perm.Repository.user_id == self.user_id:
761 778 # set admin if owner
762 779 p = 'repository.admin'
763 780 o = PermOrigin.REPO_OWNER
764 781 self.permissions_repositories[r_k] = p, o
765 782
766 783 if self.user_is_admin:
767 784 p = 'repository.admin'
768 785 o = PermOrigin.SUPER_ADMIN
769 786 self.permissions_repositories[r_k] = p, o
770 787
771 788 def _calculate_repository_branch_permissions(self):
772 789 # user group for repositories permissions
773 790 user_repo_branch_perms_from_user_group = Permission\
774 791 .get_default_repo_branch_perms_from_user_group(
775 792 self.user_id, self.scope_repo_id)
776 793
777 794 multiple_counter = collections.defaultdict(int)
778 795 for perm in user_repo_branch_perms_from_user_group:
779 796 r_k = perm.UserGroupRepoToPerm.repository.repo_name
780 797 p = perm.Permission.permission_name
781 798 pattern = perm.UserGroupToRepoBranchPermission.branch_pattern
782 799 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
783 800 .users_group.users_group_name
784 801
785 802 multiple_counter[r_k] += 1
786 803 if multiple_counter[r_k] > 1:
787 804 # TODO(marcink): fix this for multi branch support, and multiple entries
788 805 cur_perm = self.permissions_repository_branches[r_k]
789 806 p = self._choose_permission(p, cur_perm)
790 807
791 808 self.permissions_repository_branches[r_k] = pattern, p, o
792 809
793 810 # user explicit branch permissions for repositories, overrides
794 811 # any specified by the group permission
795 812 user_repo_branch_perms = Permission.get_default_repo_branch_perms(
796 813 self.user_id, self.scope_repo_id)
797 814
798 815 for perm in user_repo_branch_perms:
799 816
800 817 r_k = perm.UserRepoToPerm.repository.repo_name
801 818 p = perm.Permission.permission_name
802 819 pattern = perm.UserToRepoBranchPermission.branch_pattern
803 820 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
804 821
805 822 if not self.explicit:
806 823 # TODO(marcink): fix this for multiple entries
807 824 cur_perm = self.permissions_repository_branches.get(r_k) or 'branch.none'
808 825 p = self._choose_permission(p, cur_perm)
809 826
810 827 # NOTE(marcink): register all pattern/perm instances in this
811 828 # special dict that aggregates entries
812 829 self.permissions_repository_branches[r_k] = pattern, p, o
813 830
814 831 def _calculate_repository_group_permissions(self):
815 832 """
816 833 Repository group permissions for the current user.
817 834
818 835 Check if the user is part of user groups for repository groups and
819 836 fill in the permissions from it. `_choose_permission` decides of which
820 837 permission should be selected based on selected method.
821 838 """
822 839 # user group for repo groups permissions
823 840 user_repo_group_perms_from_user_group = Permission\
824 841 .get_default_group_perms_from_user_group(
825 842 self.user_id, self.scope_repo_group_id)
826 843
827 844 multiple_counter = collections.defaultdict(int)
828 845 for perm in user_repo_group_perms_from_user_group:
829 846 rg_k = perm.UserGroupRepoGroupToPerm.group.group_name
830 847 multiple_counter[rg_k] += 1
831 848 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\
832 849 .users_group.users_group_name
833 850 p = perm.Permission.permission_name
834 851
835 852 if multiple_counter[rg_k] > 1:
836 853 cur_perm = self.permissions_repository_groups[rg_k]
837 854 p = self._choose_permission(p, cur_perm)
838 855 self.permissions_repository_groups[rg_k] = p, o
839 856
840 857 if perm.RepoGroup.user_id == self.user_id:
841 858 # set admin if owner, even for member of other user group
842 859 p = 'group.admin'
843 860 o = PermOrigin.REPOGROUP_OWNER
844 861 self.permissions_repository_groups[rg_k] = p, o
845 862
846 863 if self.user_is_admin:
847 864 p = 'group.admin'
848 865 o = PermOrigin.SUPER_ADMIN
849 866 self.permissions_repository_groups[rg_k] = p, o
850 867
851 868 # user explicit permissions for repository groups
852 869 user_repo_groups_perms = Permission.get_default_group_perms(
853 870 self.user_id, self.scope_repo_group_id)
854 871 for perm in user_repo_groups_perms:
855 872 rg_k = perm.UserRepoGroupToPerm.group.group_name
856 873 o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\
857 874 .user.username
858 875 p = perm.Permission.permission_name
859 876
860 877 if not self.explicit:
861 878 cur_perm = self.permissions_repository_groups.get(
862 879 rg_k, 'group.none')
863 880 p = self._choose_permission(p, cur_perm)
864 881
865 882 self.permissions_repository_groups[rg_k] = p, o
866 883
867 884 if perm.RepoGroup.user_id == self.user_id:
868 885 # set admin if owner
869 886 p = 'group.admin'
870 887 o = PermOrigin.REPOGROUP_OWNER
871 888 self.permissions_repository_groups[rg_k] = p, o
872 889
873 890 if self.user_is_admin:
874 891 p = 'group.admin'
875 892 o = PermOrigin.SUPER_ADMIN
876 893 self.permissions_repository_groups[rg_k] = p, o
877 894
878 895 def _calculate_user_group_permissions(self):
879 896 """
880 897 User group permissions for the current user.
881 898 """
882 899 # user group for user group permissions
883 900 user_group_from_user_group = Permission\
884 901 .get_default_user_group_perms_from_user_group(
885 902 self.user_id, self.scope_user_group_id)
886 903
887 904 multiple_counter = collections.defaultdict(int)
888 905 for perm in user_group_from_user_group:
889 906 ug_k = perm.UserGroupUserGroupToPerm\
890 907 .target_user_group.users_group_name
891 908 multiple_counter[ug_k] += 1
892 909 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\
893 910 .user_group.users_group_name
894 911 p = perm.Permission.permission_name
895 912
896 913 if multiple_counter[ug_k] > 1:
897 914 cur_perm = self.permissions_user_groups[ug_k]
898 915 p = self._choose_permission(p, cur_perm)
899 916
900 917 self.permissions_user_groups[ug_k] = p, o
901 918
902 919 if perm.UserGroup.user_id == self.user_id:
903 920 # set admin if owner, even for member of other user group
904 921 p = 'usergroup.admin'
905 922 o = PermOrigin.USERGROUP_OWNER
906 923 self.permissions_user_groups[ug_k] = p, o
907 924
908 925 if self.user_is_admin:
909 926 p = 'usergroup.admin'
910 927 o = PermOrigin.SUPER_ADMIN
911 928 self.permissions_user_groups[ug_k] = p, o
912 929
913 930 # user explicit permission for user groups
914 931 user_user_groups_perms = Permission.get_default_user_group_perms(
915 932 self.user_id, self.scope_user_group_id)
916 933 for perm in user_user_groups_perms:
917 934 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
918 935 o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\
919 936 .user.username
920 937 p = perm.Permission.permission_name
921 938
922 939 if not self.explicit:
923 940 cur_perm = self.permissions_user_groups.get(
924 941 ug_k, 'usergroup.none')
925 942 p = self._choose_permission(p, cur_perm)
926 943
927 944 self.permissions_user_groups[ug_k] = p, o
928 945
929 946 if perm.UserGroup.user_id == self.user_id:
930 947 # set admin if owner
931 948 p = 'usergroup.admin'
932 949 o = PermOrigin.USERGROUP_OWNER
933 950 self.permissions_user_groups[ug_k] = p, o
934 951
935 952 if self.user_is_admin:
936 953 p = 'usergroup.admin'
937 954 o = PermOrigin.SUPER_ADMIN
938 955 self.permissions_user_groups[ug_k] = p, o
939 956
940 957 def _choose_permission(self, new_perm, cur_perm):
941 958 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
942 959 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
943 960 if self.algo == 'higherwin':
944 961 if new_perm_val > cur_perm_val:
945 962 return new_perm
946 963 return cur_perm
947 964 elif self.algo == 'lowerwin':
948 965 if new_perm_val < cur_perm_val:
949 966 return new_perm
950 967 return cur_perm
951 968
952 969 def _permission_structure(self):
953 970 return {
954 971 'global': self.permissions_global,
955 972 'repositories': self.permissions_repositories,
956 973 'repository_branches': self.permissions_repository_branches,
957 974 'repositories_groups': self.permissions_repository_groups,
958 975 'user_groups': self.permissions_user_groups,
959 976 }
960 977
961 978
962 979 def allowed_auth_token_access(view_name, auth_token, whitelist=None):
963 980 """
964 981 Check if given controller_name is in whitelist of auth token access
965 982 """
966 983 if not whitelist:
967 984 from rhodecode import CONFIG
968 985 whitelist = aslist(
969 986 CONFIG.get('api_access_controllers_whitelist'), sep=',')
970 987 # backward compat translation
971 988 compat = {
972 989 # old controller, new VIEW
973 990 'ChangesetController:*': 'RepoCommitsView:*',
974 991 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch',
975 992 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw',
976 993 'FilesController:raw': 'RepoCommitsView:repo_commit_raw',
977 994 'FilesController:archivefile': 'RepoFilesView:repo_archivefile',
978 995 'GistsController:*': 'GistView:*',
979 996 }
980 997
981 998 log.debug(
982 999 'Allowed views for AUTH TOKEN access: %s', whitelist)
983 1000 auth_token_access_valid = False
984 1001
985 1002 for entry in whitelist:
986 1003 token_match = True
987 1004 if entry in compat:
988 1005 # translate from old Controllers to Pyramid Views
989 1006 entry = compat[entry]
990 1007
991 1008 if '@' in entry:
992 1009 # specific AuthToken
993 1010 entry, allowed_token = entry.split('@', 1)
994 1011 token_match = auth_token == allowed_token
995 1012
996 1013 if fnmatch.fnmatch(view_name, entry) and token_match:
997 1014 auth_token_access_valid = True
998 1015 break
999 1016
1000 1017 if auth_token_access_valid:
1001 1018 log.debug('view: `%s` matches entry in whitelist: %s',
1002 1019 view_name, whitelist)
1003 1020
1004 1021 else:
1005 1022 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
1006 1023 % (view_name, whitelist))
1007 1024 if auth_token:
1008 1025 # if we use auth token key and don't have access it's a warning
1009 1026 log.warning(msg)
1010 1027 else:
1011 1028 log.debug(msg)
1012 1029
1013 1030 return auth_token_access_valid
1014 1031
1015 1032
1016 1033 class AuthUser(object):
1017 1034 """
1018 1035 A simple object that handles all attributes of user in RhodeCode
1019 1036
1020 1037 It does lookup based on API key,given user, or user present in session
1021 1038 Then it fills all required information for such user. It also checks if
1022 1039 anonymous access is enabled and if so, it returns default user as logged in
1023 1040 """
1024 1041 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
1025 1042
1026 1043 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
1027 1044
1028 1045 self.user_id = user_id
1029 1046 self._api_key = api_key
1030 1047
1031 1048 self.api_key = None
1032 1049 self.username = username
1033 1050 self.ip_addr = ip_addr
1034 1051 self.name = ''
1035 1052 self.lastname = ''
1036 1053 self.first_name = ''
1037 1054 self.last_name = ''
1038 1055 self.email = ''
1039 1056 self.is_authenticated = False
1040 1057 self.admin = False
1041 1058 self.inherit_default_permissions = False
1042 1059 self.password = ''
1043 1060
1044 1061 self.anonymous_user = None # propagated on propagate_data
1045 1062 self.propagate_data()
1046 1063 self._instance = None
1047 1064 self._permissions_scoped_cache = {} # used to bind scoped calculation
1048 1065
1049 1066 @LazyProperty
1050 1067 def permissions(self):
1051 1068 return self.get_perms(user=self, cache=None)
1052 1069
1053 1070 @LazyProperty
1054 1071 def permissions_safe(self):
1055 1072 """
1056 1073 Filtered permissions excluding not allowed repositories
1057 1074 """
1058 1075 perms = self.get_perms(user=self, cache=None)
1059 1076
1060 1077 perms['repositories'] = {
1061 1078 k: v for k, v in perms['repositories'].items()
1062 1079 if v != 'repository.none'}
1063 1080 perms['repositories_groups'] = {
1064 1081 k: v for k, v in perms['repositories_groups'].items()
1065 1082 if v != 'group.none'}
1066 1083 perms['user_groups'] = {
1067 1084 k: v for k, v in perms['user_groups'].items()
1068 1085 if v != 'usergroup.none'}
1069 1086 perms['repository_branches'] = {
1070 1087 k: v for k, v in perms['repository_branches'].iteritems()
1071 1088 if v != 'branch.none'}
1072 1089 return perms
1073 1090
1074 1091 @LazyProperty
1075 1092 def permissions_full_details(self):
1076 1093 return self.get_perms(
1077 1094 user=self, cache=None, calculate_super_admin=True)
1078 1095
1079 1096 def permissions_with_scope(self, scope):
1080 1097 """
1081 1098 Call the get_perms function with scoped data. The scope in that function
1082 1099 narrows the SQL calls to the given ID of objects resulting in fetching
1083 1100 Just particular permission we want to obtain. If scope is an empty dict
1084 1101 then it basically narrows the scope to GLOBAL permissions only.
1085 1102
1086 1103 :param scope: dict
1087 1104 """
1088 1105 if 'repo_name' in scope:
1089 1106 obj = Repository.get_by_repo_name(scope['repo_name'])
1090 1107 if obj:
1091 1108 scope['repo_id'] = obj.repo_id
1092 1109 _scope = collections.OrderedDict()
1093 1110 _scope['repo_id'] = -1
1094 1111 _scope['user_group_id'] = -1
1095 1112 _scope['repo_group_id'] = -1
1096 1113
1097 1114 for k in sorted(scope.keys()):
1098 1115 _scope[k] = scope[k]
1099 1116
1100 1117 # store in cache to mimic how the @LazyProperty works,
1101 1118 # the difference here is that we use the unique key calculated
1102 1119 # from params and values
1103 1120 return self.get_perms(user=self, cache=None, scope=_scope)
1104 1121
1105 1122 def get_instance(self):
1106 1123 return User.get(self.user_id)
1107 1124
1108 1125 def propagate_data(self):
1109 1126 """
1110 1127 Fills in user data and propagates values to this instance. Maps fetched
1111 1128 user attributes to this class instance attributes
1112 1129 """
1113 1130 log.debug('AuthUser: starting data propagation for new potential user')
1114 1131 user_model = UserModel()
1115 1132 anon_user = self.anonymous_user = User.get_default_user(cache=True)
1116 1133 is_user_loaded = False
1117 1134
1118 1135 # lookup by userid
1119 1136 if self.user_id is not None and self.user_id != anon_user.user_id:
1120 1137 log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id)
1121 1138 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
1122 1139
1123 1140 # try go get user by api key
1124 1141 elif self._api_key and self._api_key != anon_user.api_key:
1125 1142 log.debug('Trying Auth User lookup by API KEY: `%s`', self._api_key)
1126 1143 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
1127 1144
1128 1145 # lookup by username
1129 1146 elif self.username:
1130 1147 log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username)
1131 1148 is_user_loaded = user_model.fill_data(self, username=self.username)
1132 1149 else:
1133 1150 log.debug('No data in %s that could been used to log in', self)
1134 1151
1135 1152 if not is_user_loaded:
1136 1153 log.debug(
1137 1154 'Failed to load user. Fallback to default user %s', anon_user)
1138 1155 # if we cannot authenticate user try anonymous
1139 1156 if anon_user.active:
1140 1157 log.debug('default user is active, using it as a session user')
1141 1158 user_model.fill_data(self, user_id=anon_user.user_id)
1142 1159 # then we set this user is logged in
1143 1160 self.is_authenticated = True
1144 1161 else:
1145 1162 log.debug('default user is NOT active')
1146 1163 # in case of disabled anonymous user we reset some of the
1147 1164 # parameters so such user is "corrupted", skipping the fill_data
1148 1165 for attr in ['user_id', 'username', 'admin', 'active']:
1149 1166 setattr(self, attr, None)
1150 1167 self.is_authenticated = False
1151 1168
1152 1169 if not self.username:
1153 1170 self.username = 'None'
1154 1171
1155 1172 log.debug('AuthUser: propagated user is now %s', self)
1156 1173
1157 1174 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
1158 1175 calculate_super_admin=False, cache=None):
1159 1176 """
1160 1177 Fills user permission attribute with permissions taken from database
1161 1178 works for permissions given for repositories, and for permissions that
1162 1179 are granted to groups
1163 1180
1164 1181 :param user: instance of User object from database
1165 1182 :param explicit: In case there are permissions both for user and a group
1166 1183 that user is part of, explicit flag will defiine if user will
1167 1184 explicitly override permissions from group, if it's False it will
1168 1185 make decision based on the algo
1169 1186 :param algo: algorithm to decide what permission should be choose if
1170 1187 it's multiple defined, eg user in two different groups. It also
1171 1188 decides if explicit flag is turned off how to specify the permission
1172 1189 for case when user is in a group + have defined separate permission
1173 1190 :param calculate_super_admin: calculate permissions for super-admin in the
1174 1191 same way as for regular user without speedups
1175 1192 :param cache: Use caching for calculation, None = let the cache backend decide
1176 1193 """
1177 1194 user_id = user.user_id
1178 1195 user_is_admin = user.is_admin
1179 1196
1180 1197 # inheritance of global permissions like create repo/fork repo etc
1181 1198 user_inherit_default_permissions = user.inherit_default_permissions
1182 1199
1183 1200 cache_seconds = safe_int(
1184 1201 rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time'))
1185 1202
1186 1203 if cache is None:
1187 1204 # let the backend cache decide
1188 1205 cache_on = cache_seconds > 0
1189 1206 else:
1190 1207 cache_on = cache
1191 1208
1192 1209 log.debug(
1193 1210 'Computing PERMISSION tree for user %s scope `%s` '
1194 1211 'with caching: %s[TTL: %ss]', user, scope, cache_on, cache_seconds or 0)
1195 1212
1196 1213 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
1197 1214 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1198 1215
1199 1216 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
1200 1217 condition=cache_on)
1201 1218 def compute_perm_tree(cache_name,
1202 1219 user_id, scope, user_is_admin,user_inherit_default_permissions,
1203 1220 explicit, algo, calculate_super_admin):
1204 1221 return _cached_perms_data(
1205 1222 user_id, scope, user_is_admin, user_inherit_default_permissions,
1206 1223 explicit, algo, calculate_super_admin)
1207 1224
1208 1225 start = time.time()
1209 1226 result = compute_perm_tree(
1210 1227 'permissions', user_id, scope, user_is_admin,
1211 1228 user_inherit_default_permissions, explicit, algo,
1212 1229 calculate_super_admin)
1213 1230
1214 1231 result_repr = []
1215 1232 for k in result:
1216 1233 result_repr.append((k, len(result[k])))
1217 1234 total = time.time() - start
1218 1235 log.debug('PERMISSION tree for user %s computed in %.3fs: %s',
1219 1236 user, total, result_repr)
1220 1237
1221 1238 return result
1222 1239
1223 1240 @property
1224 1241 def is_default(self):
1225 1242 return self.username == User.DEFAULT_USER
1226 1243
1227 1244 @property
1228 1245 def is_admin(self):
1229 1246 return self.admin
1230 1247
1231 1248 @property
1232 1249 def is_user_object(self):
1233 1250 return self.user_id is not None
1234 1251
1235 1252 @property
1236 1253 def repositories_admin(self):
1237 1254 """
1238 1255 Returns list of repositories you're an admin of
1239 1256 """
1240 1257 return [
1241 1258 x[0] for x in self.permissions['repositories'].items()
1242 1259 if x[1] == 'repository.admin']
1243 1260
1244 1261 @property
1245 1262 def repository_groups_admin(self):
1246 1263 """
1247 1264 Returns list of repository groups you're an admin of
1248 1265 """
1249 1266 return [
1250 1267 x[0] for x in self.permissions['repositories_groups'].items()
1251 1268 if x[1] == 'group.admin']
1252 1269
1253 1270 @property
1254 1271 def user_groups_admin(self):
1255 1272 """
1256 1273 Returns list of user groups you're an admin of
1257 1274 """
1258 1275 return [
1259 1276 x[0] for x in self.permissions['user_groups'].items()
1260 1277 if x[1] == 'usergroup.admin']
1261 1278
1262 1279 def repo_acl_ids(self, perms=None, name_filter=None, cache=False):
1263 1280 """
1264 1281 Returns list of repository ids that user have access to based on given
1265 1282 perms. The cache flag should be only used in cases that are used for
1266 1283 display purposes, NOT IN ANY CASE for permission checks.
1267 1284 """
1268 1285 from rhodecode.model.scm import RepoList
1269 1286 if not perms:
1270 1287 perms = [
1271 1288 'repository.read', 'repository.write', 'repository.admin']
1272 1289
1273 1290 def _cached_repo_acl(user_id, perm_def, _name_filter):
1274 1291 qry = Repository.query()
1275 1292 if _name_filter:
1276 1293 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1277 1294 qry = qry.filter(
1278 1295 Repository.repo_name.ilike(ilike_expression))
1279 1296
1280 1297 return [x.repo_id for x in
1281 1298 RepoList(qry, perm_set=perm_def)]
1282 1299
1283 1300 return _cached_repo_acl(self.user_id, perms, name_filter)
1284 1301
1285 1302 def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1286 1303 """
1287 1304 Returns list of repository group ids that user have access to based on given
1288 1305 perms. The cache flag should be only used in cases that are used for
1289 1306 display purposes, NOT IN ANY CASE for permission checks.
1290 1307 """
1291 1308 from rhodecode.model.scm import RepoGroupList
1292 1309 if not perms:
1293 1310 perms = [
1294 1311 'group.read', 'group.write', 'group.admin']
1295 1312
1296 1313 def _cached_repo_group_acl(user_id, perm_def, _name_filter):
1297 1314 qry = RepoGroup.query()
1298 1315 if _name_filter:
1299 1316 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1300 1317 qry = qry.filter(
1301 1318 RepoGroup.group_name.ilike(ilike_expression))
1302 1319
1303 1320 return [x.group_id for x in
1304 1321 RepoGroupList(qry, perm_set=perm_def)]
1305 1322
1306 1323 return _cached_repo_group_acl(self.user_id, perms, name_filter)
1307 1324
1308 1325 def user_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1309 1326 """
1310 1327 Returns list of user group ids that user have access to based on given
1311 1328 perms. The cache flag should be only used in cases that are used for
1312 1329 display purposes, NOT IN ANY CASE for permission checks.
1313 1330 """
1314 1331 from rhodecode.model.scm import UserGroupList
1315 1332 if not perms:
1316 1333 perms = [
1317 1334 'usergroup.read', 'usergroup.write', 'usergroup.admin']
1318 1335
1319 1336 def _cached_user_group_acl(user_id, perm_def, name_filter):
1320 1337 qry = UserGroup.query()
1321 1338 if name_filter:
1322 1339 ilike_expression = u'%{}%'.format(safe_unicode(name_filter))
1323 1340 qry = qry.filter(
1324 1341 UserGroup.users_group_name.ilike(ilike_expression))
1325 1342
1326 1343 return [x.users_group_id for x in
1327 1344 UserGroupList(qry, perm_set=perm_def)]
1328 1345
1329 1346 return _cached_user_group_acl(self.user_id, perms, name_filter)
1330 1347
1331 1348 @property
1332 1349 def ip_allowed(self):
1333 1350 """
1334 1351 Checks if ip_addr used in constructor is allowed from defined list of
1335 1352 allowed ip_addresses for user
1336 1353
1337 1354 :returns: boolean, True if ip is in allowed ip range
1338 1355 """
1339 1356 # check IP
1340 1357 inherit = self.inherit_default_permissions
1341 1358 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
1342 1359 inherit_from_default=inherit)
1343 1360 @property
1344 1361 def personal_repo_group(self):
1345 1362 return RepoGroup.get_user_personal_repo_group(self.user_id)
1346 1363
1347 1364 @LazyProperty
1348 1365 def feed_token(self):
1349 1366 return self.get_instance().feed_token
1350 1367
1351 1368 @classmethod
1352 1369 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1353 1370 allowed_ips = AuthUser.get_allowed_ips(
1354 1371 user_id, cache=True, inherit_from_default=inherit_from_default)
1355 1372 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1356 1373 log.debug('IP:%s for user %s is in range of %s',
1357 1374 ip_addr, user_id, allowed_ips)
1358 1375 return True
1359 1376 else:
1360 1377 log.info('Access for IP:%s forbidden for user %s, '
1361 1378 'not in %s', ip_addr, user_id, allowed_ips)
1362 1379 return False
1363 1380
1364 1381 def get_branch_permissions(self, repo_name, perms=None):
1365 1382 perms = perms or self.permissions_with_scope({'repo_name': repo_name})
1366 1383 branch_perms = perms.get('repository_branches', {})
1367 1384 if not branch_perms:
1368 1385 return {}
1369 1386 repo_branch_perms = branch_perms.get(repo_name)
1370 1387 return repo_branch_perms or {}
1371 1388
1372 1389 def get_rule_and_branch_permission(self, repo_name, branch_name):
1373 1390 """
1374 1391 Check if this AuthUser has defined any permissions for branches. If any of
1375 1392 the rules match in order, we return the matching permissions
1376 1393 """
1377 1394
1378 1395 rule = default_perm = ''
1379 1396
1380 1397 repo_branch_perms = self.get_branch_permissions(repo_name=repo_name)
1381 1398 if not repo_branch_perms:
1382 1399 return rule, default_perm
1383 1400
1384 1401 # now calculate the permissions
1385 1402 for pattern, branch_perm in repo_branch_perms.items():
1386 1403 if fnmatch.fnmatch(branch_name, pattern):
1387 1404 rule = '`{}`=>{}'.format(pattern, branch_perm)
1388 1405 return rule, branch_perm
1389 1406
1390 1407 return rule, default_perm
1391 1408
1392 1409 def __repr__(self):
1393 1410 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1394 1411 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1395 1412
1396 1413 def set_authenticated(self, authenticated=True):
1397 1414 if self.user_id != self.anonymous_user.user_id:
1398 1415 self.is_authenticated = authenticated
1399 1416
1400 1417 def get_cookie_store(self):
1401 1418 return {
1402 1419 'username': self.username,
1403 1420 'password': md5(self.password or ''),
1404 1421 'user_id': self.user_id,
1405 1422 'is_authenticated': self.is_authenticated
1406 1423 }
1407 1424
1408 1425 @classmethod
1409 1426 def from_cookie_store(cls, cookie_store):
1410 1427 """
1411 1428 Creates AuthUser from a cookie store
1412 1429
1413 1430 :param cls:
1414 1431 :param cookie_store:
1415 1432 """
1416 1433 user_id = cookie_store.get('user_id')
1417 1434 username = cookie_store.get('username')
1418 1435 api_key = cookie_store.get('api_key')
1419 1436 return AuthUser(user_id, api_key, username)
1420 1437
1421 1438 @classmethod
1422 1439 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1423 1440 _set = set()
1424 1441
1425 1442 if inherit_from_default:
1426 1443 def_user_id = User.get_default_user(cache=True).user_id
1427 1444 default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id)
1428 1445 if cache:
1429 1446 default_ips = default_ips.options(
1430 1447 FromCache("sql_cache_short", "get_user_ips_default"))
1431 1448
1432 1449 # populate from default user
1433 1450 for ip in default_ips:
1434 1451 try:
1435 1452 _set.add(ip.ip_addr)
1436 1453 except ObjectDeletedError:
1437 1454 # since we use heavy caching sometimes it happens that
1438 1455 # we get deleted objects here, we just skip them
1439 1456 pass
1440 1457
1441 1458 # NOTE:(marcink) we don't want to load any rules for empty
1442 1459 # user_id which is the case of access of non logged users when anonymous
1443 1460 # access is disabled
1444 1461 user_ips = []
1445 1462 if user_id:
1446 1463 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1447 1464 if cache:
1448 1465 user_ips = user_ips.options(
1449 1466 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1450 1467
1451 1468 for ip in user_ips:
1452 1469 try:
1453 1470 _set.add(ip.ip_addr)
1454 1471 except ObjectDeletedError:
1455 1472 # since we use heavy caching sometimes it happens that we get
1456 1473 # deleted objects here, we just skip them
1457 1474 pass
1458 1475 return _set or {ip for ip in ['0.0.0.0/0', '::/0']}
1459 1476
1460 1477
1461 1478 def set_available_permissions(settings):
1462 1479 """
1463 1480 This function will propagate pyramid settings with all available defined
1464 1481 permission given in db. We don't want to check each time from db for new
1465 1482 permissions since adding a new permission also requires application restart
1466 1483 ie. to decorate new views with the newly created permission
1467 1484
1468 1485 :param settings: current pyramid registry.settings
1469 1486
1470 1487 """
1471 1488 log.debug('auth: getting information about all available permissions')
1472 1489 try:
1473 1490 sa = meta.Session
1474 1491 all_perms = sa.query(Permission).all()
1475 1492 settings.setdefault('available_permissions',
1476 1493 [x.permission_name for x in all_perms])
1477 1494 log.debug('auth: set available permissions')
1478 1495 except Exception:
1479 1496 log.exception('Failed to fetch permissions from the database.')
1480 1497 raise
1481 1498
1482 1499
1483 1500 def get_csrf_token(session, force_new=False, save_if_missing=True):
1484 1501 """
1485 1502 Return the current authentication token, creating one if one doesn't
1486 1503 already exist and the save_if_missing flag is present.
1487 1504
1488 1505 :param session: pass in the pyramid session, else we use the global ones
1489 1506 :param force_new: force to re-generate the token and store it in session
1490 1507 :param save_if_missing: save the newly generated token if it's missing in
1491 1508 session
1492 1509 """
1493 1510 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1494 1511 # from pyramid.csrf import get_csrf_token
1495 1512
1496 1513 if (csrf_token_key not in session and save_if_missing) or force_new:
1497 1514 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1498 1515 session[csrf_token_key] = token
1499 1516 if hasattr(session, 'save'):
1500 1517 session.save()
1501 1518 return session.get(csrf_token_key)
1502 1519
1503 1520
1504 1521 def get_request(perm_class_instance):
1505 1522 from pyramid.threadlocal import get_current_request
1506 1523 pyramid_request = get_current_request()
1507 1524 return pyramid_request
1508 1525
1509 1526
1510 1527 # CHECK DECORATORS
1511 1528 class CSRFRequired(object):
1512 1529 """
1513 1530 Decorator for authenticating a form
1514 1531
1515 1532 This decorator uses an authorization token stored in the client's
1516 1533 session for prevention of certain Cross-site request forgery (CSRF)
1517 1534 attacks (See
1518 1535 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1519 1536 information).
1520 1537
1521 1538 For use with the ``webhelpers.secure_form`` helper functions.
1522 1539
1523 1540 """
1524 1541 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1525 1542 except_methods=None):
1526 1543 self.token = token
1527 1544 self.header = header
1528 1545 self.except_methods = except_methods or []
1529 1546
1530 1547 def __call__(self, func):
1531 1548 return get_cython_compat_decorator(self.__wrapper, func)
1532 1549
1533 1550 def _get_csrf(self, _request):
1534 1551 return _request.POST.get(self.token, _request.headers.get(self.header))
1535 1552
1536 1553 def check_csrf(self, _request, cur_token):
1537 1554 supplied_token = self._get_csrf(_request)
1538 1555 return supplied_token and supplied_token == cur_token
1539 1556
1540 1557 def _get_request(self):
1541 1558 return get_request(self)
1542 1559
1543 1560 def __wrapper(self, func, *fargs, **fkwargs):
1544 1561 request = self._get_request()
1545 1562
1546 1563 if request.method in self.except_methods:
1547 1564 return func(*fargs, **fkwargs)
1548 1565
1549 1566 cur_token = get_csrf_token(request.session, save_if_missing=False)
1550 1567 if self.check_csrf(request, cur_token):
1551 1568 if request.POST.get(self.token):
1552 1569 del request.POST[self.token]
1553 1570 return func(*fargs, **fkwargs)
1554 1571 else:
1555 1572 reason = 'token-missing'
1556 1573 supplied_token = self._get_csrf(request)
1557 1574 if supplied_token and cur_token != supplied_token:
1558 1575 reason = 'token-mismatch [%s:%s]' % (
1559 1576 cur_token or ''[:6], supplied_token or ''[:6])
1560 1577
1561 1578 csrf_message = \
1562 1579 ("Cross-site request forgery detected, request denied. See "
1563 1580 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1564 1581 "more information.")
1565 1582 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1566 1583 'REMOTE_ADDR:%s, HEADERS:%s' % (
1567 1584 request, reason, request.remote_addr, request.headers))
1568 1585
1569 1586 raise HTTPForbidden(explanation=csrf_message)
1570 1587
1571 1588
1572 1589 class LoginRequired(object):
1573 1590 """
1574 1591 Must be logged in to execute this function else
1575 1592 redirect to login page
1576 1593
1577 1594 :param api_access: if enabled this checks only for valid auth token
1578 1595 and grants access based on valid token
1579 1596 """
1580 1597 def __init__(self, auth_token_access=None):
1581 1598 self.auth_token_access = auth_token_access
1582 1599
1583 1600 def __call__(self, func):
1584 1601 return get_cython_compat_decorator(self.__wrapper, func)
1585 1602
1586 1603 def _get_request(self):
1587 1604 return get_request(self)
1588 1605
1589 1606 def __wrapper(self, func, *fargs, **fkwargs):
1590 1607 from rhodecode.lib import helpers as h
1591 1608 cls = fargs[0]
1592 1609 user = cls._rhodecode_user
1593 1610 request = self._get_request()
1594 1611 _ = request.translate
1595 1612
1596 1613 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1597 1614 log.debug('Starting login restriction checks for user: %s', user)
1598 1615 # check if our IP is allowed
1599 1616 ip_access_valid = True
1600 1617 if not user.ip_allowed:
1601 1618 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1602 1619 category='warning')
1603 1620 ip_access_valid = False
1604 1621
1605 1622 # check if we used an APIKEY and it's a valid one
1606 1623 # defined white-list of controllers which API access will be enabled
1607 1624 _auth_token = request.GET.get(
1608 1625 'auth_token', '') or request.GET.get('api_key', '')
1609 1626 auth_token_access_valid = allowed_auth_token_access(
1610 1627 loc, auth_token=_auth_token)
1611 1628
1612 1629 # explicit controller is enabled or API is in our whitelist
1613 1630 if self.auth_token_access or auth_token_access_valid:
1614 1631 log.debug('Checking AUTH TOKEN access for %s', cls)
1615 1632 db_user = user.get_instance()
1616 1633
1617 1634 if db_user:
1618 1635 if self.auth_token_access:
1619 1636 roles = self.auth_token_access
1620 1637 else:
1621 1638 roles = [UserApiKeys.ROLE_HTTP]
1622 1639 token_match = db_user.authenticate_by_token(
1623 1640 _auth_token, roles=roles)
1624 1641 else:
1625 1642 log.debug('Unable to fetch db instance for auth user: %s', user)
1626 1643 token_match = False
1627 1644
1628 1645 if _auth_token and token_match:
1629 1646 auth_token_access_valid = True
1630 1647 log.debug('AUTH TOKEN ****%s is VALID', _auth_token[-4:])
1631 1648 else:
1632 1649 auth_token_access_valid = False
1633 1650 if not _auth_token:
1634 1651 log.debug("AUTH TOKEN *NOT* present in request")
1635 1652 else:
1636 1653 log.warning("AUTH TOKEN ****%s *NOT* valid", _auth_token[-4:])
1637 1654
1638 1655 log.debug('Checking if %s is authenticated @ %s', user.username, loc)
1639 1656 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1640 1657 else 'AUTH_TOKEN_AUTH'
1641 1658
1642 1659 if ip_access_valid and (
1643 1660 user.is_authenticated or auth_token_access_valid):
1644 1661 log.info('user %s authenticating with:%s IS authenticated on func %s',
1645 1662 user, reason, loc)
1646 1663
1647 1664 return func(*fargs, **fkwargs)
1648 1665 else:
1649 1666 log.warning(
1650 1667 'user %s authenticating with:%s NOT authenticated on '
1651 1668 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s',
1652 1669 user, reason, loc, ip_access_valid, auth_token_access_valid)
1653 1670 # we preserve the get PARAM
1654 1671 came_from = get_came_from(request)
1655 1672
1656 1673 log.debug('redirecting to login page with %s', came_from)
1657 1674 raise HTTPFound(
1658 1675 h.route_path('login', _query={'came_from': came_from}))
1659 1676
1660 1677
1661 1678 class NotAnonymous(object):
1662 1679 """
1663 1680 Must be logged in to execute this function else
1664 1681 redirect to login page
1665 1682 """
1666 1683
1667 1684 def __call__(self, func):
1668 1685 return get_cython_compat_decorator(self.__wrapper, func)
1669 1686
1670 1687 def _get_request(self):
1671 1688 return get_request(self)
1672 1689
1673 1690 def __wrapper(self, func, *fargs, **fkwargs):
1674 1691 import rhodecode.lib.helpers as h
1675 1692 cls = fargs[0]
1676 1693 self.user = cls._rhodecode_user
1677 1694 request = self._get_request()
1678 1695 _ = request.translate
1679 1696 log.debug('Checking if user is not anonymous @%s', cls)
1680 1697
1681 1698 anonymous = self.user.username == User.DEFAULT_USER
1682 1699
1683 1700 if anonymous:
1684 1701 came_from = get_came_from(request)
1685 1702 h.flash(_('You need to be a registered user to '
1686 1703 'perform this action'),
1687 1704 category='warning')
1688 1705 raise HTTPFound(
1689 1706 h.route_path('login', _query={'came_from': came_from}))
1690 1707 else:
1691 1708 return func(*fargs, **fkwargs)
1692 1709
1693 1710
1694 1711 class PermsDecorator(object):
1695 1712 """
1696 1713 Base class for controller decorators, we extract the current user from
1697 1714 the class itself, which has it stored in base controllers
1698 1715 """
1699 1716
1700 1717 def __init__(self, *required_perms):
1701 1718 self.required_perms = set(required_perms)
1702 1719
1703 1720 def __call__(self, func):
1704 1721 return get_cython_compat_decorator(self.__wrapper, func)
1705 1722
1706 1723 def _get_request(self):
1707 1724 return get_request(self)
1708 1725
1709 1726 def __wrapper(self, func, *fargs, **fkwargs):
1710 1727 import rhodecode.lib.helpers as h
1711 1728 cls = fargs[0]
1712 1729 _user = cls._rhodecode_user
1713 1730 request = self._get_request()
1714 1731 _ = request.translate
1715 1732
1716 1733 log.debug('checking %s permissions %s for %s %s',
1717 1734 self.__class__.__name__, self.required_perms, cls, _user)
1718 1735
1719 1736 if self.check_permissions(_user):
1720 1737 log.debug('Permission granted for %s %s', cls, _user)
1721 1738 return func(*fargs, **fkwargs)
1722 1739
1723 1740 else:
1724 1741 log.debug('Permission denied for %s %s', cls, _user)
1725 1742 anonymous = _user.username == User.DEFAULT_USER
1726 1743
1727 1744 if anonymous:
1728 1745 came_from = get_came_from(self._get_request())
1729 1746 h.flash(_('You need to be signed in to view this page'),
1730 1747 category='warning')
1731 1748 raise HTTPFound(
1732 1749 h.route_path('login', _query={'came_from': came_from}))
1733 1750
1734 1751 else:
1735 1752 # redirect with 404 to prevent resource discovery
1736 1753 raise HTTPNotFound()
1737 1754
1738 1755 def check_permissions(self, user):
1739 1756 """Dummy function for overriding"""
1740 1757 raise NotImplementedError(
1741 1758 'You have to write this function in child class')
1742 1759
1743 1760
1744 1761 class HasPermissionAllDecorator(PermsDecorator):
1745 1762 """
1746 1763 Checks for access permission for all given predicates. All of them
1747 1764 have to be meet in order to fulfill the request
1748 1765 """
1749 1766
1750 1767 def check_permissions(self, user):
1751 1768 perms = user.permissions_with_scope({})
1752 1769 if self.required_perms.issubset(perms['global']):
1753 1770 return True
1754 1771 return False
1755 1772
1756 1773
1757 1774 class HasPermissionAnyDecorator(PermsDecorator):
1758 1775 """
1759 1776 Checks for access permission for any of given predicates. In order to
1760 1777 fulfill the request any of predicates must be meet
1761 1778 """
1762 1779
1763 1780 def check_permissions(self, user):
1764 1781 perms = user.permissions_with_scope({})
1765 1782 if self.required_perms.intersection(perms['global']):
1766 1783 return True
1767 1784 return False
1768 1785
1769 1786
1770 1787 class HasRepoPermissionAllDecorator(PermsDecorator):
1771 1788 """
1772 1789 Checks for access permission for all given predicates for specific
1773 1790 repository. All of them have to be meet in order to fulfill the request
1774 1791 """
1775 1792 def _get_repo_name(self):
1776 1793 _request = self._get_request()
1777 1794 return get_repo_slug(_request)
1778 1795
1779 1796 def check_permissions(self, user):
1780 1797 perms = user.permissions
1781 1798 repo_name = self._get_repo_name()
1782 1799
1783 1800 try:
1784 1801 user_perms = {perms['repositories'][repo_name]}
1785 1802 except KeyError:
1786 1803 log.debug('cannot locate repo with name: `%s` in permissions defs',
1787 1804 repo_name)
1788 1805 return False
1789 1806
1790 1807 log.debug('checking `%s` permissions for repo `%s`',
1791 1808 user_perms, repo_name)
1792 1809 if self.required_perms.issubset(user_perms):
1793 1810 return True
1794 1811 return False
1795 1812
1796 1813
1797 1814 class HasRepoPermissionAnyDecorator(PermsDecorator):
1798 1815 """
1799 1816 Checks for access permission for any of given predicates for specific
1800 1817 repository. In order to fulfill the request any of predicates must be meet
1801 1818 """
1802 1819 def _get_repo_name(self):
1803 1820 _request = self._get_request()
1804 1821 return get_repo_slug(_request)
1805 1822
1806 1823 def check_permissions(self, user):
1807 1824 perms = user.permissions
1808 1825 repo_name = self._get_repo_name()
1809 1826
1810 1827 try:
1811 1828 user_perms = {perms['repositories'][repo_name]}
1812 1829 except KeyError:
1813 1830 log.debug(
1814 1831 'cannot locate repo with name: `%s` in permissions defs',
1815 1832 repo_name)
1816 1833 return False
1817 1834
1818 1835 log.debug('checking `%s` permissions for repo `%s`',
1819 1836 user_perms, repo_name)
1820 1837 if self.required_perms.intersection(user_perms):
1821 1838 return True
1822 1839 return False
1823 1840
1824 1841
1825 1842 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1826 1843 """
1827 1844 Checks for access permission for all given predicates for specific
1828 1845 repository group. All of them have to be meet in order to
1829 1846 fulfill the request
1830 1847 """
1831 1848 def _get_repo_group_name(self):
1832 1849 _request = self._get_request()
1833 1850 return get_repo_group_slug(_request)
1834 1851
1835 1852 def check_permissions(self, user):
1836 1853 perms = user.permissions
1837 1854 group_name = self._get_repo_group_name()
1838 1855 try:
1839 1856 user_perms = {perms['repositories_groups'][group_name]}
1840 1857 except KeyError:
1841 1858 log.debug(
1842 1859 'cannot locate repo group with name: `%s` in permissions defs',
1843 1860 group_name)
1844 1861 return False
1845 1862
1846 1863 log.debug('checking `%s` permissions for repo group `%s`',
1847 1864 user_perms, group_name)
1848 1865 if self.required_perms.issubset(user_perms):
1849 1866 return True
1850 1867 return False
1851 1868
1852 1869
1853 1870 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1854 1871 """
1855 1872 Checks for access permission for any of given predicates for specific
1856 1873 repository group. In order to fulfill the request any
1857 1874 of predicates must be met
1858 1875 """
1859 1876 def _get_repo_group_name(self):
1860 1877 _request = self._get_request()
1861 1878 return get_repo_group_slug(_request)
1862 1879
1863 1880 def check_permissions(self, user):
1864 1881 perms = user.permissions
1865 1882 group_name = self._get_repo_group_name()
1866 1883
1867 1884 try:
1868 1885 user_perms = {perms['repositories_groups'][group_name]}
1869 1886 except KeyError:
1870 1887 log.debug(
1871 1888 'cannot locate repo group with name: `%s` in permissions defs',
1872 1889 group_name)
1873 1890 return False
1874 1891
1875 1892 log.debug('checking `%s` permissions for repo group `%s`',
1876 1893 user_perms, group_name)
1877 1894 if self.required_perms.intersection(user_perms):
1878 1895 return True
1879 1896 return False
1880 1897
1881 1898
1882 1899 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1883 1900 """
1884 1901 Checks for access permission for all given predicates for specific
1885 1902 user group. All of them have to be meet in order to fulfill the request
1886 1903 """
1887 1904 def _get_user_group_name(self):
1888 1905 _request = self._get_request()
1889 1906 return get_user_group_slug(_request)
1890 1907
1891 1908 def check_permissions(self, user):
1892 1909 perms = user.permissions
1893 1910 group_name = self._get_user_group_name()
1894 1911 try:
1895 1912 user_perms = {perms['user_groups'][group_name]}
1896 1913 except KeyError:
1897 1914 return False
1898 1915
1899 1916 if self.required_perms.issubset(user_perms):
1900 1917 return True
1901 1918 return False
1902 1919
1903 1920
1904 1921 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1905 1922 """
1906 1923 Checks for access permission for any of given predicates for specific
1907 1924 user group. In order to fulfill the request any of predicates must be meet
1908 1925 """
1909 1926 def _get_user_group_name(self):
1910 1927 _request = self._get_request()
1911 1928 return get_user_group_slug(_request)
1912 1929
1913 1930 def check_permissions(self, user):
1914 1931 perms = user.permissions
1915 1932 group_name = self._get_user_group_name()
1916 1933 try:
1917 1934 user_perms = {perms['user_groups'][group_name]}
1918 1935 except KeyError:
1919 1936 return False
1920 1937
1921 1938 if self.required_perms.intersection(user_perms):
1922 1939 return True
1923 1940 return False
1924 1941
1925 1942
1926 1943 # CHECK FUNCTIONS
1927 1944 class PermsFunction(object):
1928 1945 """Base function for other check functions"""
1929 1946
1930 1947 def __init__(self, *perms):
1931 1948 self.required_perms = set(perms)
1932 1949 self.repo_name = None
1933 1950 self.repo_group_name = None
1934 1951 self.user_group_name = None
1935 1952
1936 1953 def __bool__(self):
1937 1954 frame = inspect.currentframe()
1938 1955 stack_trace = traceback.format_stack(frame)
1939 1956 log.error('Checking bool value on a class instance of perm '
1940 1957 'function is not allowed: %s', ''.join(stack_trace))
1941 1958 # rather than throwing errors, here we always return False so if by
1942 1959 # accident someone checks truth for just an instance it will always end
1943 1960 # up in returning False
1944 1961 return False
1945 1962 __nonzero__ = __bool__
1946 1963
1947 1964 def __call__(self, check_location='', user=None):
1948 1965 if not user:
1949 1966 log.debug('Using user attribute from global request')
1950 1967 request = self._get_request()
1951 1968 user = request.user
1952 1969
1953 1970 # init auth user if not already given
1954 1971 if not isinstance(user, AuthUser):
1955 1972 log.debug('Wrapping user %s into AuthUser', user)
1956 1973 user = AuthUser(user.user_id)
1957 1974
1958 1975 cls_name = self.__class__.__name__
1959 1976 check_scope = self._get_check_scope(cls_name)
1960 1977 check_location = check_location or 'unspecified location'
1961 1978
1962 1979 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1963 1980 self.required_perms, user, check_scope, check_location)
1964 1981 if not user:
1965 1982 log.warning('Empty user given for permission check')
1966 1983 return False
1967 1984
1968 1985 if self.check_permissions(user):
1969 1986 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1970 1987 check_scope, user, check_location)
1971 1988 return True
1972 1989
1973 1990 else:
1974 1991 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1975 1992 check_scope, user, check_location)
1976 1993 return False
1977 1994
1978 1995 def _get_request(self):
1979 1996 return get_request(self)
1980 1997
1981 1998 def _get_check_scope(self, cls_name):
1982 1999 return {
1983 2000 'HasPermissionAll': 'GLOBAL',
1984 2001 'HasPermissionAny': 'GLOBAL',
1985 2002 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1986 2003 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1987 2004 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1988 2005 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1989 2006 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1990 2007 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1991 2008 }.get(cls_name, '?:%s' % cls_name)
1992 2009
1993 2010 def check_permissions(self, user):
1994 2011 """Dummy function for overriding"""
1995 2012 raise Exception('You have to write this function in child class')
1996 2013
1997 2014
1998 2015 class HasPermissionAll(PermsFunction):
1999 2016 def check_permissions(self, user):
2000 2017 perms = user.permissions_with_scope({})
2001 2018 if self.required_perms.issubset(perms.get('global')):
2002 2019 return True
2003 2020 return False
2004 2021
2005 2022
2006 2023 class HasPermissionAny(PermsFunction):
2007 2024 def check_permissions(self, user):
2008 2025 perms = user.permissions_with_scope({})
2009 2026 if self.required_perms.intersection(perms.get('global')):
2010 2027 return True
2011 2028 return False
2012 2029
2013 2030
2014 2031 class HasRepoPermissionAll(PermsFunction):
2015 2032 def __call__(self, repo_name=None, check_location='', user=None):
2016 2033 self.repo_name = repo_name
2017 2034 return super(HasRepoPermissionAll, self).__call__(check_location, user)
2018 2035
2019 2036 def _get_repo_name(self):
2020 2037 if not self.repo_name:
2021 2038 _request = self._get_request()
2022 2039 self.repo_name = get_repo_slug(_request)
2023 2040 return self.repo_name
2024 2041
2025 2042 def check_permissions(self, user):
2026 2043 self.repo_name = self._get_repo_name()
2027 2044 perms = user.permissions
2028 2045 try:
2029 2046 user_perms = {perms['repositories'][self.repo_name]}
2030 2047 except KeyError:
2031 2048 return False
2032 2049 if self.required_perms.issubset(user_perms):
2033 2050 return True
2034 2051 return False
2035 2052
2036 2053
2037 2054 class HasRepoPermissionAny(PermsFunction):
2038 2055 def __call__(self, repo_name=None, check_location='', user=None):
2039 2056 self.repo_name = repo_name
2040 2057 return super(HasRepoPermissionAny, self).__call__(check_location, user)
2041 2058
2042 2059 def _get_repo_name(self):
2043 2060 if not self.repo_name:
2044 2061 _request = self._get_request()
2045 2062 self.repo_name = get_repo_slug(_request)
2046 2063 return self.repo_name
2047 2064
2048 2065 def check_permissions(self, user):
2049 2066 self.repo_name = self._get_repo_name()
2050 2067 perms = user.permissions
2051 2068 try:
2052 2069 user_perms = {perms['repositories'][self.repo_name]}
2053 2070 except KeyError:
2054 2071 return False
2055 2072 if self.required_perms.intersection(user_perms):
2056 2073 return True
2057 2074 return False
2058 2075
2059 2076
2060 2077 class HasRepoGroupPermissionAny(PermsFunction):
2061 2078 def __call__(self, group_name=None, check_location='', user=None):
2062 2079 self.repo_group_name = group_name
2063 2080 return super(HasRepoGroupPermissionAny, self).__call__(
2064 2081 check_location, user)
2065 2082
2066 2083 def check_permissions(self, user):
2067 2084 perms = user.permissions
2068 2085 try:
2069 2086 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2070 2087 except KeyError:
2071 2088 return False
2072 2089 if self.required_perms.intersection(user_perms):
2073 2090 return True
2074 2091 return False
2075 2092
2076 2093
2077 2094 class HasRepoGroupPermissionAll(PermsFunction):
2078 2095 def __call__(self, group_name=None, check_location='', user=None):
2079 2096 self.repo_group_name = group_name
2080 2097 return super(HasRepoGroupPermissionAll, self).__call__(
2081 2098 check_location, user)
2082 2099
2083 2100 def check_permissions(self, user):
2084 2101 perms = user.permissions
2085 2102 try:
2086 2103 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2087 2104 except KeyError:
2088 2105 return False
2089 2106 if self.required_perms.issubset(user_perms):
2090 2107 return True
2091 2108 return False
2092 2109
2093 2110
2094 2111 class HasUserGroupPermissionAny(PermsFunction):
2095 2112 def __call__(self, user_group_name=None, check_location='', user=None):
2096 2113 self.user_group_name = user_group_name
2097 2114 return super(HasUserGroupPermissionAny, self).__call__(
2098 2115 check_location, user)
2099 2116
2100 2117 def check_permissions(self, user):
2101 2118 perms = user.permissions
2102 2119 try:
2103 2120 user_perms = {perms['user_groups'][self.user_group_name]}
2104 2121 except KeyError:
2105 2122 return False
2106 2123 if self.required_perms.intersection(user_perms):
2107 2124 return True
2108 2125 return False
2109 2126
2110 2127
2111 2128 class HasUserGroupPermissionAll(PermsFunction):
2112 2129 def __call__(self, user_group_name=None, check_location='', user=None):
2113 2130 self.user_group_name = user_group_name
2114 2131 return super(HasUserGroupPermissionAll, self).__call__(
2115 2132 check_location, user)
2116 2133
2117 2134 def check_permissions(self, user):
2118 2135 perms = user.permissions
2119 2136 try:
2120 2137 user_perms = {perms['user_groups'][self.user_group_name]}
2121 2138 except KeyError:
2122 2139 return False
2123 2140 if self.required_perms.issubset(user_perms):
2124 2141 return True
2125 2142 return False
2126 2143
2127 2144
2128 2145 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
2129 2146 class HasPermissionAnyMiddleware(object):
2130 2147 def __init__(self, *perms):
2131 2148 self.required_perms = set(perms)
2132 2149
2133 2150 def __call__(self, auth_user, repo_name):
2134 2151 # repo_name MUST be unicode, since we handle keys in permission
2135 2152 # dict by unicode
2136 2153 repo_name = safe_unicode(repo_name)
2137 2154 log.debug(
2138 2155 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
2139 2156 self.required_perms, auth_user, repo_name)
2140 2157
2141 2158 if self.check_permissions(auth_user, repo_name):
2142 2159 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
2143 2160 repo_name, auth_user, 'PermissionMiddleware')
2144 2161 return True
2145 2162
2146 2163 else:
2147 2164 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
2148 2165 repo_name, auth_user, 'PermissionMiddleware')
2149 2166 return False
2150 2167
2151 2168 def check_permissions(self, user, repo_name):
2152 2169 perms = user.permissions_with_scope({'repo_name': repo_name})
2153 2170
2154 2171 try:
2155 2172 user_perms = {perms['repositories'][repo_name]}
2156 2173 except Exception:
2157 2174 log.exception('Error while accessing user permissions')
2158 2175 return False
2159 2176
2160 2177 if self.required_perms.intersection(user_perms):
2161 2178 return True
2162 2179 return False
2163 2180
2164 2181
2165 2182 # SPECIAL VERSION TO HANDLE API AUTH
2166 2183 class _BaseApiPerm(object):
2167 2184 def __init__(self, *perms):
2168 2185 self.required_perms = set(perms)
2169 2186
2170 2187 def __call__(self, check_location=None, user=None, repo_name=None,
2171 2188 group_name=None, user_group_name=None):
2172 2189 cls_name = self.__class__.__name__
2173 2190 check_scope = 'global:%s' % (self.required_perms,)
2174 2191 if repo_name:
2175 2192 check_scope += ', repo_name:%s' % (repo_name,)
2176 2193
2177 2194 if group_name:
2178 2195 check_scope += ', repo_group_name:%s' % (group_name,)
2179 2196
2180 2197 if user_group_name:
2181 2198 check_scope += ', user_group_name:%s' % (user_group_name,)
2182 2199
2183 2200 log.debug('checking cls:%s %s %s @ %s',
2184 2201 cls_name, self.required_perms, check_scope, check_location)
2185 2202 if not user:
2186 2203 log.debug('Empty User passed into arguments')
2187 2204 return False
2188 2205
2189 2206 # process user
2190 2207 if not isinstance(user, AuthUser):
2191 2208 user = AuthUser(user.user_id)
2192 2209 if not check_location:
2193 2210 check_location = 'unspecified'
2194 2211 if self.check_permissions(user.permissions, repo_name, group_name,
2195 2212 user_group_name):
2196 2213 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2197 2214 check_scope, user, check_location)
2198 2215 return True
2199 2216
2200 2217 else:
2201 2218 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2202 2219 check_scope, user, check_location)
2203 2220 return False
2204 2221
2205 2222 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2206 2223 user_group_name=None):
2207 2224 """
2208 2225 implement in child class should return True if permissions are ok,
2209 2226 False otherwise
2210 2227
2211 2228 :param perm_defs: dict with permission definitions
2212 2229 :param repo_name: repo name
2213 2230 """
2214 2231 raise NotImplementedError()
2215 2232
2216 2233
2217 2234 class HasPermissionAllApi(_BaseApiPerm):
2218 2235 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2219 2236 user_group_name=None):
2220 2237 if self.required_perms.issubset(perm_defs.get('global')):
2221 2238 return True
2222 2239 return False
2223 2240
2224 2241
2225 2242 class HasPermissionAnyApi(_BaseApiPerm):
2226 2243 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2227 2244 user_group_name=None):
2228 2245 if self.required_perms.intersection(perm_defs.get('global')):
2229 2246 return True
2230 2247 return False
2231 2248
2232 2249
2233 2250 class HasRepoPermissionAllApi(_BaseApiPerm):
2234 2251 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2235 2252 user_group_name=None):
2236 2253 try:
2237 2254 _user_perms = {perm_defs['repositories'][repo_name]}
2238 2255 except KeyError:
2239 2256 log.warning(traceback.format_exc())
2240 2257 return False
2241 2258 if self.required_perms.issubset(_user_perms):
2242 2259 return True
2243 2260 return False
2244 2261
2245 2262
2246 2263 class HasRepoPermissionAnyApi(_BaseApiPerm):
2247 2264 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2248 2265 user_group_name=None):
2249 2266 try:
2250 2267 _user_perms = {perm_defs['repositories'][repo_name]}
2251 2268 except KeyError:
2252 2269 log.warning(traceback.format_exc())
2253 2270 return False
2254 2271 if self.required_perms.intersection(_user_perms):
2255 2272 return True
2256 2273 return False
2257 2274
2258 2275
2259 2276 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
2260 2277 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2261 2278 user_group_name=None):
2262 2279 try:
2263 2280 _user_perms = {perm_defs['repositories_groups'][group_name]}
2264 2281 except KeyError:
2265 2282 log.warning(traceback.format_exc())
2266 2283 return False
2267 2284 if self.required_perms.intersection(_user_perms):
2268 2285 return True
2269 2286 return False
2270 2287
2271 2288
2272 2289 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
2273 2290 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2274 2291 user_group_name=None):
2275 2292 try:
2276 2293 _user_perms = {perm_defs['repositories_groups'][group_name]}
2277 2294 except KeyError:
2278 2295 log.warning(traceback.format_exc())
2279 2296 return False
2280 2297 if self.required_perms.issubset(_user_perms):
2281 2298 return True
2282 2299 return False
2283 2300
2284 2301
2285 2302 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
2286 2303 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2287 2304 user_group_name=None):
2288 2305 try:
2289 2306 _user_perms = {perm_defs['user_groups'][user_group_name]}
2290 2307 except KeyError:
2291 2308 log.warning(traceback.format_exc())
2292 2309 return False
2293 2310 if self.required_perms.intersection(_user_perms):
2294 2311 return True
2295 2312 return False
2296 2313
2297 2314
2298 2315 def check_ip_access(source_ip, allowed_ips=None):
2299 2316 """
2300 2317 Checks if source_ip is a subnet of any of allowed_ips.
2301 2318
2302 2319 :param source_ip:
2303 2320 :param allowed_ips: list of allowed ips together with mask
2304 2321 """
2305 2322 log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
2306 2323 source_ip_address = ipaddress.ip_address(safe_unicode(source_ip))
2307 2324 if isinstance(allowed_ips, (tuple, list, set)):
2308 2325 for ip in allowed_ips:
2309 2326 ip = safe_unicode(ip)
2310 2327 try:
2311 2328 network_address = ipaddress.ip_network(ip, strict=False)
2312 2329 if source_ip_address in network_address:
2313 2330 log.debug('IP %s is network %s', source_ip_address, network_address)
2314 2331 return True
2315 2332 # for any case we cannot determine the IP, don't crash just
2316 2333 # skip it and log as error, we want to say forbidden still when
2317 2334 # sending bad IP
2318 2335 except Exception:
2319 2336 log.error(traceback.format_exc())
2320 2337 continue
2321 2338 return False
2322 2339
2323 2340
2324 2341 def get_cython_compat_decorator(wrapper, func):
2325 2342 """
2326 2343 Creates a cython compatible decorator. The previously used
2327 2344 decorator.decorator() function seems to be incompatible with cython.
2328 2345
2329 2346 :param wrapper: __wrapper method of the decorator class
2330 2347 :param func: decorated function
2331 2348 """
2332 2349 @wraps(func)
2333 2350 def local_wrapper(*args, **kwds):
2334 2351 return wrapper(func, *args, **kwds)
2335 2352 local_wrapper.__wrapped__ = func
2336 2353 return local_wrapper
2337 2354
2338 2355
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1053 +1,1072 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import shutil
24 24 import time
25 25 import logging
26 26 import traceback
27 27 import datetime
28 28
29 29 from pyramid.threadlocal import get_current_request
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode import events
33 33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 34 from rhodecode.lib.caching_query import FromCache
35 35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 36 from rhodecode.lib.hooks_base import log_delete_repository
37 37 from rhodecode.lib.user_log_filter import user_log_filter
38 38 from rhodecode.lib.utils import make_db_config
39 39 from rhodecode.lib.utils2 import (
40 40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 41 get_current_rhodecode_user, safe_int, datetime_to_time,
42 42 action_logger_generic)
43 43 from rhodecode.lib.vcs.backends import get_backend
44 44 from rhodecode.model import BaseModel
45 45 from rhodecode.model.db import (
46 46 _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 49
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 class RepoModel(BaseModel):
57 57
58 58 cls = Repository
59 59
60 60 def _get_user_group(self, users_group):
61 61 return self._get_instance(UserGroup, users_group,
62 62 callback=UserGroup.get_by_group_name)
63 63
64 64 def _get_repo_group(self, repo_group):
65 65 return self._get_instance(RepoGroup, repo_group,
66 66 callback=RepoGroup.get_by_group_name)
67 67
68 68 def _create_default_perms(self, repository, private):
69 69 # create default permission
70 70 default = 'repository.read'
71 71 def_user = User.get_default_user()
72 72 for p in def_user.user_perms:
73 73 if p.permission.permission_name.startswith('repository.'):
74 74 default = p.permission.permission_name
75 75 break
76 76
77 77 default_perm = 'repository.none' if private else default
78 78
79 79 repo_to_perm = UserRepoToPerm()
80 80 repo_to_perm.permission = Permission.get_by_key(default_perm)
81 81
82 82 repo_to_perm.repository = repository
83 83 repo_to_perm.user_id = def_user.user_id
84 84
85 85 return repo_to_perm
86 86
87 87 @LazyProperty
88 88 def repos_path(self):
89 89 """
90 90 Gets the repositories root path from database
91 91 """
92 92 settings_model = VcsSettingsModel(sa=self.sa)
93 93 return settings_model.get_repos_location()
94 94
95 95 def get(self, repo_id):
96 96 repo = self.sa.query(Repository) \
97 97 .filter(Repository.repo_id == repo_id)
98 98
99 99 return repo.scalar()
100 100
101 101 def get_repo(self, repository):
102 102 return self._get_repo(repository)
103 103
104 104 def get_by_repo_name(self, repo_name, cache=False):
105 105 repo = self.sa.query(Repository) \
106 106 .filter(Repository.repo_name == repo_name)
107 107
108 108 if cache:
109 109 name_key = _hash_key(repo_name)
110 110 repo = repo.options(
111 111 FromCache("sql_cache_short", "get_repo_%s" % name_key))
112 112 return repo.scalar()
113 113
114 114 def _extract_id_from_repo_name(self, repo_name):
115 115 if repo_name.startswith('/'):
116 116 repo_name = repo_name.lstrip('/')
117 117 by_id_match = re.match(r'^_(\d{1,})', repo_name)
118 118 if by_id_match:
119 119 return by_id_match.groups()[0]
120 120
121 121 def get_repo_by_id(self, repo_name):
122 122 """
123 123 Extracts repo_name by id from special urls.
124 124 Example url is _11/repo_name
125 125
126 126 :param repo_name:
127 127 :return: repo object if matched else None
128 128 """
129 129
130 130 try:
131 131 _repo_id = self._extract_id_from_repo_name(repo_name)
132 132 if _repo_id:
133 133 return self.get(_repo_id)
134 134 except Exception:
135 135 log.exception('Failed to extract repo_name from URL')
136 136
137 137 return None
138 138
139 139 def get_repos_for_root(self, root, traverse=False):
140 140 if traverse:
141 141 like_expression = u'{}%'.format(safe_unicode(root))
142 142 repos = Repository.query().filter(
143 143 Repository.repo_name.like(like_expression)).all()
144 144 else:
145 145 if root and not isinstance(root, RepoGroup):
146 146 raise ValueError(
147 147 'Root must be an instance '
148 148 'of RepoGroup, got:{} instead'.format(type(root)))
149 149 repos = Repository.query().filter(Repository.group == root).all()
150 150 return repos
151 151
152 152 def get_url(self, repo, request=None, permalink=False):
153 153 if not request:
154 154 request = get_current_request()
155 155
156 156 if not request:
157 157 return
158 158
159 159 if permalink:
160 160 return request.route_url(
161 161 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
162 162 else:
163 163 return request.route_url(
164 164 'repo_summary', repo_name=safe_str(repo.repo_name))
165 165
166 166 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
167 167 if not request:
168 168 request = get_current_request()
169 169
170 170 if not request:
171 171 return
172 172
173 173 if permalink:
174 174 return request.route_url(
175 175 'repo_commit', repo_name=safe_str(repo.repo_id),
176 176 commit_id=commit_id)
177 177
178 178 else:
179 179 return request.route_url(
180 180 'repo_commit', repo_name=safe_str(repo.repo_name),
181 181 commit_id=commit_id)
182 182
183 183 def get_repo_log(self, repo, filter_term):
184 184 repo_log = UserLog.query()\
185 185 .filter(or_(UserLog.repository_id == repo.repo_id,
186 186 UserLog.repository_name == repo.repo_name))\
187 187 .options(joinedload(UserLog.user))\
188 188 .options(joinedload(UserLog.repository))\
189 189 .order_by(UserLog.action_date.desc())
190 190
191 191 repo_log = user_log_filter(repo_log, filter_term)
192 192 return repo_log
193 193
194 194 @classmethod
195 195 def update_repoinfo(cls, repositories=None):
196 196 if not repositories:
197 197 repositories = Repository.getAll()
198 198 for repo in repositories:
199 199 repo.update_commit_cache()
200 200
201 201 def get_repos_as_dict(self, repo_list=None, admin=False,
202 202 super_user_actions=False):
203 203 _render = get_current_request().get_partial_renderer(
204 204 'rhodecode:templates/data_table/_dt_elements.mako')
205 205 c = _render.get_call_context()
206 206
207 207 def quick_menu(repo_name):
208 208 return _render('quick_menu', repo_name)
209 209
210 def repo_lnk(name, rtype, rstate, private, fork_of):
211 return _render('repo_name', name, rtype, rstate, private, fork_of,
210 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
211 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
212 212 short_name=not admin, admin=False)
213 213
214 214 def last_change(last_change):
215 215 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
216 216 last_change = last_change + datetime.timedelta(seconds=
217 217 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
218 218 return _render("last_change", last_change)
219 219
220 220 def rss_lnk(repo_name):
221 221 return _render("rss", repo_name)
222 222
223 223 def atom_lnk(repo_name):
224 224 return _render("atom", repo_name)
225 225
226 226 def last_rev(repo_name, cs_cache):
227 227 return _render('revision', repo_name, cs_cache.get('revision'),
228 228 cs_cache.get('raw_id'), cs_cache.get('author'),
229 229 cs_cache.get('message'), cs_cache.get('date'))
230 230
231 231 def desc(desc):
232 232 return _render('repo_desc', desc, c.visual.stylify_metatags)
233 233
234 234 def state(repo_state):
235 235 return _render("repo_state", repo_state)
236 236
237 237 def repo_actions(repo_name):
238 238 return _render('repo_actions', repo_name, super_user_actions)
239 239
240 240 def user_profile(username):
241 241 return _render('user_profile', username)
242 242
243 243 repos_data = []
244 244 for repo in repo_list:
245 245 cs_cache = repo.changeset_cache
246 246 row = {
247 247 "menu": quick_menu(repo.repo_name),
248 248
249 "name": repo_lnk(repo.repo_name, repo.repo_type,
250 repo.repo_state, repo.private, repo.fork),
249 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
250 repo.private, repo.archived, repo.fork),
251 251 "name_raw": repo.repo_name.lower(),
252 252
253 253 "last_change": last_change(repo.last_db_change),
254 254 "last_change_raw": datetime_to_time(repo.last_db_change),
255 255
256 256 "last_changeset": last_rev(repo.repo_name, cs_cache),
257 257 "last_changeset_raw": cs_cache.get('revision'),
258 258
259 259 "desc": desc(repo.description_safe),
260 260 "owner": user_profile(repo.user.username),
261 261
262 262 "state": state(repo.repo_state),
263 263 "rss": rss_lnk(repo.repo_name),
264 264
265 265 "atom": atom_lnk(repo.repo_name),
266 266 }
267 267 if admin:
268 268 row.update({
269 269 "action": repo_actions(repo.repo_name),
270 270 })
271 271 repos_data.append(row)
272 272
273 273 return repos_data
274 274
275 275 def _get_defaults(self, repo_name):
276 276 """
277 277 Gets information about repository, and returns a dict for
278 278 usage in forms
279 279
280 280 :param repo_name:
281 281 """
282 282
283 283 repo_info = Repository.get_by_repo_name(repo_name)
284 284
285 285 if repo_info is None:
286 286 return None
287 287
288 288 defaults = repo_info.get_dict()
289 289 defaults['repo_name'] = repo_info.just_name
290 290
291 291 groups = repo_info.groups_with_parents
292 292 parent_group = groups[-1] if groups else None
293 293
294 294 # we use -1 as this is how in HTML, we mark an empty group
295 295 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
296 296
297 297 keys_to_process = (
298 298 {'k': 'repo_type', 'strip': False},
299 299 {'k': 'repo_enable_downloads', 'strip': True},
300 300 {'k': 'repo_description', 'strip': True},
301 301 {'k': 'repo_enable_locking', 'strip': True},
302 302 {'k': 'repo_landing_rev', 'strip': True},
303 303 {'k': 'clone_uri', 'strip': False},
304 304 {'k': 'push_uri', 'strip': False},
305 305 {'k': 'repo_private', 'strip': True},
306 306 {'k': 'repo_enable_statistics', 'strip': True}
307 307 )
308 308
309 309 for item in keys_to_process:
310 310 attr = item['k']
311 311 if item['strip']:
312 312 attr = remove_prefix(item['k'], 'repo_')
313 313
314 314 val = defaults[attr]
315 315 if item['k'] == 'repo_landing_rev':
316 316 val = ':'.join(defaults[attr])
317 317 defaults[item['k']] = val
318 318 if item['k'] == 'clone_uri':
319 319 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
320 320 if item['k'] == 'push_uri':
321 321 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
322 322
323 323 # fill owner
324 324 if repo_info.user:
325 325 defaults.update({'user': repo_info.user.username})
326 326 else:
327 327 replacement_user = User.get_first_super_admin().username
328 328 defaults.update({'user': replacement_user})
329 329
330 330 return defaults
331 331
332 332 def update(self, repo, **kwargs):
333 333 try:
334 334 cur_repo = self._get_repo(repo)
335 335 source_repo_name = cur_repo.repo_name
336 336 if 'user' in kwargs:
337 337 cur_repo.user = User.get_by_username(kwargs['user'])
338 338
339 339 if 'repo_group' in kwargs:
340 340 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
341 341 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
342 342
343 343 update_keys = [
344 344 (1, 'repo_description'),
345 345 (1, 'repo_landing_rev'),
346 346 (1, 'repo_private'),
347 347 (1, 'repo_enable_downloads'),
348 348 (1, 'repo_enable_locking'),
349 349 (1, 'repo_enable_statistics'),
350 350 (0, 'clone_uri'),
351 351 (0, 'push_uri'),
352 352 (0, 'fork_id')
353 353 ]
354 354 for strip, k in update_keys:
355 355 if k in kwargs:
356 356 val = kwargs[k]
357 357 if strip:
358 358 k = remove_prefix(k, 'repo_')
359 359
360 360 setattr(cur_repo, k, val)
361 361
362 362 new_name = cur_repo.get_new_name(kwargs['repo_name'])
363 363 cur_repo.repo_name = new_name
364 364
365 365 # if private flag is set, reset default permission to NONE
366 366 if kwargs.get('repo_private'):
367 367 EMPTY_PERM = 'repository.none'
368 368 RepoModel().grant_user_permission(
369 369 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
370 370 )
371 371
372 372 # handle extra fields
373 373 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
374 374 kwargs):
375 375 k = RepositoryField.un_prefix_key(field)
376 376 ex_field = RepositoryField.get_by_key_name(
377 377 key=k, repo=cur_repo)
378 378 if ex_field:
379 379 ex_field.field_value = kwargs[field]
380 380 self.sa.add(ex_field)
381 381 cur_repo.updated_on = datetime.datetime.now()
382 382 self.sa.add(cur_repo)
383 383
384 384 if source_repo_name != new_name:
385 385 # rename repository
386 386 self._rename_filesystem_repo(
387 387 old=source_repo_name, new=new_name)
388 388
389 389 return cur_repo
390 390 except Exception:
391 391 log.error(traceback.format_exc())
392 392 raise
393 393
394 394 def _create_repo(self, repo_name, repo_type, description, owner,
395 395 private=False, clone_uri=None, repo_group=None,
396 396 landing_rev='rev:tip', fork_of=None,
397 397 copy_fork_permissions=False, enable_statistics=False,
398 398 enable_locking=False, enable_downloads=False,
399 399 copy_group_permissions=False,
400 400 state=Repository.STATE_PENDING):
401 401 """
402 402 Create repository inside database with PENDING state, this should be
403 403 only executed by create() repo. With exception of importing existing
404 404 repos
405 405 """
406 406 from rhodecode.model.scm import ScmModel
407 407
408 408 owner = self._get_user(owner)
409 409 fork_of = self._get_repo(fork_of)
410 410 repo_group = self._get_repo_group(safe_int(repo_group))
411 411
412 412 try:
413 413 repo_name = safe_unicode(repo_name)
414 414 description = safe_unicode(description)
415 415 # repo name is just a name of repository
416 416 # while repo_name_full is a full qualified name that is combined
417 417 # with name and path of group
418 418 repo_name_full = repo_name
419 419 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
420 420
421 421 new_repo = Repository()
422 422 new_repo.repo_state = state
423 423 new_repo.enable_statistics = False
424 424 new_repo.repo_name = repo_name_full
425 425 new_repo.repo_type = repo_type
426 426 new_repo.user = owner
427 427 new_repo.group = repo_group
428 428 new_repo.description = description or repo_name
429 429 new_repo.private = private
430 new_repo.archived = False
430 431 new_repo.clone_uri = clone_uri
431 432 new_repo.landing_rev = landing_rev
432 433
433 434 new_repo.enable_statistics = enable_statistics
434 435 new_repo.enable_locking = enable_locking
435 436 new_repo.enable_downloads = enable_downloads
436 437
437 438 if repo_group:
438 439 new_repo.enable_locking = repo_group.enable_locking
439 440
440 441 if fork_of:
441 442 parent_repo = fork_of
442 443 new_repo.fork = parent_repo
443 444
444 445 events.trigger(events.RepoPreCreateEvent(new_repo))
445 446
446 447 self.sa.add(new_repo)
447 448
448 449 EMPTY_PERM = 'repository.none'
449 450 if fork_of and copy_fork_permissions:
450 451 repo = fork_of
451 452 user_perms = UserRepoToPerm.query() \
452 453 .filter(UserRepoToPerm.repository == repo).all()
453 454 group_perms = UserGroupRepoToPerm.query() \
454 455 .filter(UserGroupRepoToPerm.repository == repo).all()
455 456
456 457 for perm in user_perms:
457 458 UserRepoToPerm.create(
458 459 perm.user, new_repo, perm.permission)
459 460
460 461 for perm in group_perms:
461 462 UserGroupRepoToPerm.create(
462 463 perm.users_group, new_repo, perm.permission)
463 464 # in case we copy permissions and also set this repo to private
464 465 # override the default user permission to make it a private
465 466 # repo
466 467 if private:
467 468 RepoModel(self.sa).grant_user_permission(
468 469 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
469 470
470 471 elif repo_group and copy_group_permissions:
471 472 user_perms = UserRepoGroupToPerm.query() \
472 473 .filter(UserRepoGroupToPerm.group == repo_group).all()
473 474
474 475 group_perms = UserGroupRepoGroupToPerm.query() \
475 476 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
476 477
477 478 for perm in user_perms:
478 479 perm_name = perm.permission.permission_name.replace(
479 480 'group.', 'repository.')
480 481 perm_obj = Permission.get_by_key(perm_name)
481 482 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
482 483
483 484 for perm in group_perms:
484 485 perm_name = perm.permission.permission_name.replace(
485 486 'group.', 'repository.')
486 487 perm_obj = Permission.get_by_key(perm_name)
487 488 UserGroupRepoToPerm.create(
488 489 perm.users_group, new_repo, perm_obj)
489 490
490 491 if private:
491 492 RepoModel(self.sa).grant_user_permission(
492 493 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
493 494
494 495 else:
495 496 perm_obj = self._create_default_perms(new_repo, private)
496 497 self.sa.add(perm_obj)
497 498
498 499 # now automatically start following this repository as owner
499 500 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
500 501 owner.user_id)
501 502
502 503 # we need to flush here, in order to check if database won't
503 504 # throw any exceptions, create filesystem dirs at the very end
504 505 self.sa.flush()
505 506 events.trigger(events.RepoCreateEvent(new_repo))
506 507 return new_repo
507 508
508 509 except Exception:
509 510 log.error(traceback.format_exc())
510 511 raise
511 512
512 513 def create(self, form_data, cur_user):
513 514 """
514 515 Create repository using celery tasks
515 516
516 517 :param form_data:
517 518 :param cur_user:
518 519 """
519 520 from rhodecode.lib.celerylib import tasks, run_task
520 521 return run_task(tasks.create_repo, form_data, cur_user)
521 522
522 523 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
523 524 perm_deletions=None, check_perms=True,
524 525 cur_user=None):
525 526 if not perm_additions:
526 527 perm_additions = []
527 528 if not perm_updates:
528 529 perm_updates = []
529 530 if not perm_deletions:
530 531 perm_deletions = []
531 532
532 533 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
533 534
534 535 changes = {
535 536 'added': [],
536 537 'updated': [],
537 538 'deleted': []
538 539 }
539 540 # update permissions
540 541 for member_id, perm, member_type in perm_updates:
541 542 member_id = int(member_id)
542 543 if member_type == 'user':
543 544 member_name = User.get(member_id).username
544 545 # this updates also current one if found
545 546 self.grant_user_permission(
546 547 repo=repo, user=member_id, perm=perm)
547 548 elif member_type == 'user_group':
548 549 # check if we have permissions to alter this usergroup
549 550 member_name = UserGroup.get(member_id).users_group_name
550 551 if not check_perms or HasUserGroupPermissionAny(
551 552 *req_perms)(member_name, user=cur_user):
552 553 self.grant_user_group_permission(
553 554 repo=repo, group_name=member_id, perm=perm)
554 555 else:
555 556 raise ValueError("member_type must be 'user' or 'user_group' "
556 557 "got {} instead".format(member_type))
557 558 changes['updated'].append({'type': member_type, 'id': member_id,
558 559 'name': member_name, 'new_perm': perm})
559 560
560 561 # set new permissions
561 562 for member_id, perm, member_type in perm_additions:
562 563 member_id = int(member_id)
563 564 if member_type == 'user':
564 565 member_name = User.get(member_id).username
565 566 self.grant_user_permission(
566 567 repo=repo, user=member_id, perm=perm)
567 568 elif member_type == 'user_group':
568 569 # check if we have permissions to alter this usergroup
569 570 member_name = UserGroup.get(member_id).users_group_name
570 571 if not check_perms or HasUserGroupPermissionAny(
571 572 *req_perms)(member_name, user=cur_user):
572 573 self.grant_user_group_permission(
573 574 repo=repo, group_name=member_id, perm=perm)
574 575 else:
575 576 raise ValueError("member_type must be 'user' or 'user_group' "
576 577 "got {} instead".format(member_type))
577 578
578 579 changes['added'].append({'type': member_type, 'id': member_id,
579 580 'name': member_name, 'new_perm': perm})
580 581 # delete permissions
581 582 for member_id, perm, member_type in perm_deletions:
582 583 member_id = int(member_id)
583 584 if member_type == 'user':
584 585 member_name = User.get(member_id).username
585 586 self.revoke_user_permission(repo=repo, user=member_id)
586 587 elif member_type == 'user_group':
587 588 # check if we have permissions to alter this usergroup
588 589 member_name = UserGroup.get(member_id).users_group_name
589 590 if not check_perms or HasUserGroupPermissionAny(
590 591 *req_perms)(member_name, user=cur_user):
591 592 self.revoke_user_group_permission(
592 593 repo=repo, group_name=member_id)
593 594 else:
594 595 raise ValueError("member_type must be 'user' or 'user_group' "
595 596 "got {} instead".format(member_type))
596 597
597 598 changes['deleted'].append({'type': member_type, 'id': member_id,
598 599 'name': member_name, 'new_perm': perm})
599 600 return changes
600 601
601 602 def create_fork(self, form_data, cur_user):
602 603 """
603 604 Simple wrapper into executing celery task for fork creation
604 605
605 606 :param form_data:
606 607 :param cur_user:
607 608 """
608 609 from rhodecode.lib.celerylib import tasks, run_task
609 610 return run_task(tasks.create_repo_fork, form_data, cur_user)
610 611
612 def archive(self, repo):
613 """
614 Archive given repository. Set archive flag.
615
616 :param repo:
617 """
618 repo = self._get_repo(repo)
619 if repo:
620
621 try:
622 repo.archived = True
623 self.sa.add(repo)
624 self.sa.commit()
625 except Exception:
626 log.error(traceback.format_exc())
627 raise
628
611 629 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
612 630 """
613 631 Delete given repository, forks parameter defines what do do with
614 632 attached forks. Throws AttachedForksError if deleted repo has attached
615 633 forks
616 634
617 635 :param repo:
618 636 :param forks: str 'delete' or 'detach'
637 :param pull_requests: str 'delete' or None
619 638 :param fs_remove: remove(archive) repo from filesystem
620 639 """
621 640 if not cur_user:
622 641 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
623 642 repo = self._get_repo(repo)
624 643 if repo:
625 644 if forks == 'detach':
626 645 for r in repo.forks:
627 646 r.fork = None
628 647 self.sa.add(r)
629 648 elif forks == 'delete':
630 649 for r in repo.forks:
631 650 self.delete(r, forks='delete')
632 651 elif [f for f in repo.forks]:
633 652 raise AttachedForksError()
634 653
635 654 # check for pull requests
636 655 pr_sources = repo.pull_requests_source
637 656 pr_targets = repo.pull_requests_target
638 657 if pull_requests != 'delete' and (pr_sources or pr_targets):
639 658 raise AttachedPullRequestsError()
640 659
641 660 old_repo_dict = repo.get_dict()
642 661 events.trigger(events.RepoPreDeleteEvent(repo))
643 662 try:
644 663 self.sa.delete(repo)
645 664 if fs_remove:
646 665 self._delete_filesystem_repo(repo)
647 666 else:
648 667 log.debug('skipping removal from filesystem')
649 668 old_repo_dict.update({
650 669 'deleted_by': cur_user,
651 670 'deleted_on': time.time(),
652 671 })
653 672 log_delete_repository(**old_repo_dict)
654 673 events.trigger(events.RepoDeleteEvent(repo))
655 674 except Exception:
656 675 log.error(traceback.format_exc())
657 676 raise
658 677
659 678 def grant_user_permission(self, repo, user, perm):
660 679 """
661 680 Grant permission for user on given repository, or update existing one
662 681 if found
663 682
664 683 :param repo: Instance of Repository, repository_id, or repository name
665 684 :param user: Instance of User, user_id or username
666 685 :param perm: Instance of Permission, or permission_name
667 686 """
668 687 user = self._get_user(user)
669 688 repo = self._get_repo(repo)
670 689 permission = self._get_perm(perm)
671 690
672 691 # check if we have that permission already
673 692 obj = self.sa.query(UserRepoToPerm) \
674 693 .filter(UserRepoToPerm.user == user) \
675 694 .filter(UserRepoToPerm.repository == repo) \
676 695 .scalar()
677 696 if obj is None:
678 697 # create new !
679 698 obj = UserRepoToPerm()
680 699 obj.repository = repo
681 700 obj.user = user
682 701 obj.permission = permission
683 702 self.sa.add(obj)
684 703 log.debug('Granted perm %s to %s on %s', perm, user, repo)
685 704 action_logger_generic(
686 705 'granted permission: {} to user: {} on repo: {}'.format(
687 706 perm, user, repo), namespace='security.repo')
688 707 return obj
689 708
690 709 def revoke_user_permission(self, repo, user):
691 710 """
692 711 Revoke permission for user on given repository
693 712
694 713 :param repo: Instance of Repository, repository_id, or repository name
695 714 :param user: Instance of User, user_id or username
696 715 """
697 716
698 717 user = self._get_user(user)
699 718 repo = self._get_repo(repo)
700 719
701 720 obj = self.sa.query(UserRepoToPerm) \
702 721 .filter(UserRepoToPerm.repository == repo) \
703 722 .filter(UserRepoToPerm.user == user) \
704 723 .scalar()
705 724 if obj:
706 725 self.sa.delete(obj)
707 726 log.debug('Revoked perm on %s on %s', repo, user)
708 727 action_logger_generic(
709 728 'revoked permission from user: {} on repo: {}'.format(
710 729 user, repo), namespace='security.repo')
711 730
712 731 def grant_user_group_permission(self, repo, group_name, perm):
713 732 """
714 733 Grant permission for user group on given repository, or update
715 734 existing one if found
716 735
717 736 :param repo: Instance of Repository, repository_id, or repository name
718 737 :param group_name: Instance of UserGroup, users_group_id,
719 738 or user group name
720 739 :param perm: Instance of Permission, or permission_name
721 740 """
722 741 repo = self._get_repo(repo)
723 742 group_name = self._get_user_group(group_name)
724 743 permission = self._get_perm(perm)
725 744
726 745 # check if we have that permission already
727 746 obj = self.sa.query(UserGroupRepoToPerm) \
728 747 .filter(UserGroupRepoToPerm.users_group == group_name) \
729 748 .filter(UserGroupRepoToPerm.repository == repo) \
730 749 .scalar()
731 750
732 751 if obj is None:
733 752 # create new
734 753 obj = UserGroupRepoToPerm()
735 754
736 755 obj.repository = repo
737 756 obj.users_group = group_name
738 757 obj.permission = permission
739 758 self.sa.add(obj)
740 759 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
741 760 action_logger_generic(
742 761 'granted permission: {} to usergroup: {} on repo: {}'.format(
743 762 perm, group_name, repo), namespace='security.repo')
744 763
745 764 return obj
746 765
747 766 def revoke_user_group_permission(self, repo, group_name):
748 767 """
749 768 Revoke permission for user group on given repository
750 769
751 770 :param repo: Instance of Repository, repository_id, or repository name
752 771 :param group_name: Instance of UserGroup, users_group_id,
753 772 or user group name
754 773 """
755 774 repo = self._get_repo(repo)
756 775 group_name = self._get_user_group(group_name)
757 776
758 777 obj = self.sa.query(UserGroupRepoToPerm) \
759 778 .filter(UserGroupRepoToPerm.repository == repo) \
760 779 .filter(UserGroupRepoToPerm.users_group == group_name) \
761 780 .scalar()
762 781 if obj:
763 782 self.sa.delete(obj)
764 783 log.debug('Revoked perm to %s on %s', repo, group_name)
765 784 action_logger_generic(
766 785 'revoked permission from usergroup: {} on repo: {}'.format(
767 786 group_name, repo), namespace='security.repo')
768 787
769 788 def delete_stats(self, repo_name):
770 789 """
771 790 removes stats for given repo
772 791
773 792 :param repo_name:
774 793 """
775 794 repo = self._get_repo(repo_name)
776 795 try:
777 796 obj = self.sa.query(Statistics) \
778 797 .filter(Statistics.repository == repo).scalar()
779 798 if obj:
780 799 self.sa.delete(obj)
781 800 except Exception:
782 801 log.error(traceback.format_exc())
783 802 raise
784 803
785 804 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
786 805 field_type='str', field_desc=''):
787 806
788 807 repo = self._get_repo(repo_name)
789 808
790 809 new_field = RepositoryField()
791 810 new_field.repository = repo
792 811 new_field.field_key = field_key
793 812 new_field.field_type = field_type # python type
794 813 new_field.field_value = field_value
795 814 new_field.field_desc = field_desc
796 815 new_field.field_label = field_label
797 816 self.sa.add(new_field)
798 817 return new_field
799 818
800 819 def delete_repo_field(self, repo_name, field_key):
801 820 repo = self._get_repo(repo_name)
802 821 field = RepositoryField.get_by_key_name(field_key, repo)
803 822 if field:
804 823 self.sa.delete(field)
805 824
806 825 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
807 826 clone_uri=None, repo_store_location=None,
808 827 use_global_config=False):
809 828 """
810 829 makes repository on filesystem. It's group aware means it'll create
811 830 a repository within a group, and alter the paths accordingly of
812 831 group location
813 832
814 833 :param repo_name:
815 834 :param alias:
816 835 :param parent:
817 836 :param clone_uri:
818 837 :param repo_store_location:
819 838 """
820 839 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
821 840 from rhodecode.model.scm import ScmModel
822 841
823 842 if Repository.NAME_SEP in repo_name:
824 843 raise ValueError(
825 844 'repo_name must not contain groups got `%s`' % repo_name)
826 845
827 846 if isinstance(repo_group, RepoGroup):
828 847 new_parent_path = os.sep.join(repo_group.full_path_splitted)
829 848 else:
830 849 new_parent_path = repo_group or ''
831 850
832 851 if repo_store_location:
833 852 _paths = [repo_store_location]
834 853 else:
835 854 _paths = [self.repos_path, new_parent_path, repo_name]
836 855 # we need to make it str for mercurial
837 856 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
838 857
839 858 # check if this path is not a repository
840 859 if is_valid_repo(repo_path, self.repos_path):
841 860 raise Exception('This path %s is a valid repository' % repo_path)
842 861
843 862 # check if this path is a group
844 863 if is_valid_repo_group(repo_path, self.repos_path):
845 864 raise Exception('This path %s is a valid group' % repo_path)
846 865
847 866 log.info('creating repo %s in %s from url: `%s`',
848 867 repo_name, safe_unicode(repo_path),
849 868 obfuscate_url_pw(clone_uri))
850 869
851 870 backend = get_backend(repo_type)
852 871
853 872 config_repo = None if use_global_config else repo_name
854 873 if config_repo and new_parent_path:
855 874 config_repo = Repository.NAME_SEP.join(
856 875 (new_parent_path, config_repo))
857 876 config = make_db_config(clear_session=False, repo=config_repo)
858 877 config.set('extensions', 'largefiles', '')
859 878
860 879 # patch and reset hooks section of UI config to not run any
861 880 # hooks on creating remote repo
862 881 config.clear_section('hooks')
863 882
864 883 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
865 884 if repo_type == 'git':
866 885 repo = backend(
867 886 repo_path, config=config, create=True, src_url=clone_uri,
868 887 bare=True)
869 888 else:
870 889 repo = backend(
871 890 repo_path, config=config, create=True, src_url=clone_uri)
872 891
873 892 repo.install_hooks()
874 893
875 894 log.debug('Created repo %s with %s backend',
876 895 safe_unicode(repo_name), safe_unicode(repo_type))
877 896 return repo
878 897
879 898 def _rename_filesystem_repo(self, old, new):
880 899 """
881 900 renames repository on filesystem
882 901
883 902 :param old: old name
884 903 :param new: new name
885 904 """
886 905 log.info('renaming repo from %s to %s', old, new)
887 906
888 907 old_path = os.path.join(self.repos_path, old)
889 908 new_path = os.path.join(self.repos_path, new)
890 909 if os.path.isdir(new_path):
891 910 raise Exception(
892 911 'Was trying to rename to already existing dir %s' % new_path
893 912 )
894 913 shutil.move(old_path, new_path)
895 914
896 915 def _delete_filesystem_repo(self, repo):
897 916 """
898 917 removes repo from filesystem, the removal is acctually made by
899 918 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
900 919 repository is no longer valid for rhodecode, can be undeleted later on
901 920 by reverting the renames on this repository
902 921
903 922 :param repo: repo object
904 923 """
905 924 rm_path = os.path.join(self.repos_path, repo.repo_name)
906 925 repo_group = repo.group
907 926 log.info("Removing repository %s", rm_path)
908 927 # disable hg/git internal that it doesn't get detected as repo
909 928 alias = repo.repo_type
910 929
911 930 config = make_db_config(clear_session=False)
912 931 config.set('extensions', 'largefiles', '')
913 932 bare = getattr(repo.scm_instance(config=config), 'bare', False)
914 933
915 934 # skip this for bare git repos
916 935 if not bare:
917 936 # disable VCS repo
918 937 vcs_path = os.path.join(rm_path, '.%s' % alias)
919 938 if os.path.exists(vcs_path):
920 939 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
921 940
922 941 _now = datetime.datetime.now()
923 942 _ms = str(_now.microsecond).rjust(6, '0')
924 943 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
925 944 repo.just_name)
926 945 if repo_group:
927 946 # if repository is in group, prefix the removal path with the group
928 947 args = repo_group.full_path_splitted + [_d]
929 948 _d = os.path.join(*args)
930 949
931 950 if os.path.isdir(rm_path):
932 951 shutil.move(rm_path, os.path.join(self.repos_path, _d))
933 952
934 953 # finally cleanup diff-cache if it exists
935 954 cached_diffs_dir = repo.cached_diffs_dir
936 955 if os.path.isdir(cached_diffs_dir):
937 956 shutil.rmtree(cached_diffs_dir)
938 957
939 958
940 959 class ReadmeFinder:
941 960 """
942 961 Utility which knows how to find a readme for a specific commit.
943 962
944 963 The main idea is that this is a configurable algorithm. When creating an
945 964 instance you can define parameters, currently only the `default_renderer`.
946 965 Based on this configuration the method :meth:`search` behaves slightly
947 966 different.
948 967 """
949 968
950 969 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
951 970 path_re = re.compile(r'^docs?', re.IGNORECASE)
952 971
953 972 default_priorities = {
954 973 None: 0,
955 974 '.text': 2,
956 975 '.txt': 3,
957 976 '.rst': 1,
958 977 '.rest': 2,
959 978 '.md': 1,
960 979 '.mkdn': 2,
961 980 '.mdown': 3,
962 981 '.markdown': 4,
963 982 }
964 983
965 984 path_priority = {
966 985 'doc': 0,
967 986 'docs': 1,
968 987 }
969 988
970 989 FALLBACK_PRIORITY = 99
971 990
972 991 RENDERER_TO_EXTENSION = {
973 992 'rst': ['.rst', '.rest'],
974 993 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
975 994 }
976 995
977 996 def __init__(self, default_renderer=None):
978 997 self._default_renderer = default_renderer
979 998 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
980 999 default_renderer, [])
981 1000
982 1001 def search(self, commit, path='/'):
983 1002 """
984 1003 Find a readme in the given `commit`.
985 1004 """
986 1005 nodes = commit.get_nodes(path)
987 1006 matches = self._match_readmes(nodes)
988 1007 matches = self._sort_according_to_priority(matches)
989 1008 if matches:
990 1009 return matches[0].node
991 1010
992 1011 paths = self._match_paths(nodes)
993 1012 paths = self._sort_paths_according_to_priority(paths)
994 1013 for path in paths:
995 1014 match = self.search(commit, path=path)
996 1015 if match:
997 1016 return match
998 1017
999 1018 return None
1000 1019
1001 1020 def _match_readmes(self, nodes):
1002 1021 for node in nodes:
1003 1022 if not node.is_file():
1004 1023 continue
1005 1024 path = node.path.rsplit('/', 1)[-1]
1006 1025 match = self.readme_re.match(path)
1007 1026 if match:
1008 1027 extension = match.group(1)
1009 1028 yield ReadmeMatch(node, match, self._priority(extension))
1010 1029
1011 1030 def _match_paths(self, nodes):
1012 1031 for node in nodes:
1013 1032 if not node.is_dir():
1014 1033 continue
1015 1034 match = self.path_re.match(node.path)
1016 1035 if match:
1017 1036 yield node.path
1018 1037
1019 1038 def _priority(self, extension):
1020 1039 renderer_priority = (
1021 1040 0 if extension in self._renderer_extensions else 1)
1022 1041 extension_priority = self.default_priorities.get(
1023 1042 extension, self.FALLBACK_PRIORITY)
1024 1043 return (renderer_priority, extension_priority)
1025 1044
1026 1045 def _sort_according_to_priority(self, matches):
1027 1046
1028 1047 def priority_and_path(match):
1029 1048 return (match.priority, match.path)
1030 1049
1031 1050 return sorted(matches, key=priority_and_path)
1032 1051
1033 1052 def _sort_paths_according_to_priority(self, paths):
1034 1053
1035 1054 def priority_and_path(path):
1036 1055 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1037 1056
1038 1057 return sorted(paths, key=priority_and_path)
1039 1058
1040 1059
1041 1060 class ReadmeMatch:
1042 1061
1043 1062 def __init__(self, node, match, priority):
1044 1063 self.node = node
1045 1064 self._match = match
1046 1065 self.priority = priority
1047 1066
1048 1067 @property
1049 1068 def path(self):
1050 1069 return self.node.path
1051 1070
1052 1071 def __repr__(self):
1053 1072 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,335 +1,336 b''
1 1
2 2 /******************************************************************************
3 3 * *
4 4 * DO NOT CHANGE THIS FILE MANUALLY *
5 5 * *
6 6 * *
7 7 * This file is automatically generated when the app starts up with *
8 8 * generate_js_files = true *
9 9 * *
10 10 * To add a route here pass jsroute=True to the route definition in the app *
11 11 * *
12 12 ******************************************************************************/
13 13 function registerRCRoutes() {
14 14 // routes registration
15 15 pyroutes.register('favicon', '/favicon.ico', []);
16 16 pyroutes.register('robots', '/robots.txt', []);
17 17 pyroutes.register('auth_home', '/_admin/auth*traverse', []);
18 18 pyroutes.register('global_integrations_new', '/_admin/integrations/new', []);
19 19 pyroutes.register('global_integrations_home', '/_admin/integrations', []);
20 20 pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']);
21 21 pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']);
22 22 pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']);
23 23 pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']);
24 24 pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']);
25 25 pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']);
26 26 pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']);
27 27 pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']);
28 28 pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']);
29 29 pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']);
30 30 pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']);
31 31 pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']);
32 32 pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']);
33 33 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
34 34 pyroutes.register('ops_error_test', '/_admin/ops/error', []);
35 35 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
36 36 pyroutes.register('ops_ping_legacy', '/_admin/ping', []);
37 37 pyroutes.register('ops_error_test_legacy', '/_admin/error_test', []);
38 38 pyroutes.register('admin_home', '/_admin', []);
39 39 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
40 40 pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']);
41 41 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
42 42 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
43 43 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
44 44 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
45 45 pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []);
46 46 pyroutes.register('admin_settings_system', '/_admin/settings/system', []);
47 47 pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []);
48 48 pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []);
49 49 pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions/delete', []);
50 50 pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']);
51 51 pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']);
52 52 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
53 53 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
54 54 pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []);
55 55 pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []);
56 56 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
57 57 pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []);
58 58 pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []);
59 59 pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []);
60 60 pyroutes.register('admin_settings', '/_admin/settings', []);
61 61 pyroutes.register('admin_settings_update', '/_admin/settings/update', []);
62 62 pyroutes.register('admin_settings_global', '/_admin/settings/global', []);
63 63 pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []);
64 64 pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []);
65 65 pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []);
66 66 pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []);
67 67 pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []);
68 68 pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []);
69 69 pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []);
70 70 pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []);
71 71 pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []);
72 72 pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []);
73 73 pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []);
74 74 pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []);
75 75 pyroutes.register('admin_settings_email', '/_admin/settings/email', []);
76 76 pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []);
77 77 pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []);
78 78 pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []);
79 79 pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []);
80 80 pyroutes.register('admin_settings_search', '/_admin/settings/search', []);
81 81 pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []);
82 82 pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []);
83 83 pyroutes.register('admin_settings_automation', '/_admin/_admin/settings/automation', []);
84 84 pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []);
85 85 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
86 86 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
87 87 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
88 88 pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []);
89 89 pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []);
90 90 pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []);
91 91 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
92 92 pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []);
93 93 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
94 94 pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []);
95 95 pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []);
96 96 pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []);
97 97 pyroutes.register('users', '/_admin/users', []);
98 98 pyroutes.register('users_data', '/_admin/users_data', []);
99 99 pyroutes.register('users_create', '/_admin/users/create', []);
100 100 pyroutes.register('users_new', '/_admin/users/new', []);
101 101 pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']);
102 102 pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']);
103 103 pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']);
104 104 pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']);
105 105 pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']);
106 106 pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']);
107 107 pyroutes.register('user_force_password_reset', '/_admin/users/%(user_id)s/password_reset', ['user_id']);
108 108 pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']);
109 109 pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']);
110 110 pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']);
111 111 pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']);
112 112 pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']);
113 113 pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']);
114 114 pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']);
115 115 pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']);
116 116 pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']);
117 117 pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']);
118 118 pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']);
119 119 pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']);
120 120 pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']);
121 121 pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']);
122 122 pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']);
123 123 pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']);
124 124 pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']);
125 125 pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']);
126 126 pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']);
127 127 pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']);
128 128 pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']);
129 129 pyroutes.register('user_groups', '/_admin/user_groups', []);
130 130 pyroutes.register('user_groups_data', '/_admin/user_groups_data', []);
131 131 pyroutes.register('user_groups_new', '/_admin/user_groups/new', []);
132 132 pyroutes.register('user_groups_create', '/_admin/user_groups/create', []);
133 133 pyroutes.register('repos', '/_admin/repos', []);
134 134 pyroutes.register('repo_new', '/_admin/repos/new', []);
135 135 pyroutes.register('repo_create', '/_admin/repos/create', []);
136 136 pyroutes.register('repo_groups', '/_admin/repo_groups', []);
137 137 pyroutes.register('repo_group_new', '/_admin/repo_group/new', []);
138 138 pyroutes.register('repo_group_create', '/_admin/repo_group/create', []);
139 139 pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
140 140 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
141 141 pyroutes.register('channelstream_proxy', '/_channelstream', []);
142 142 pyroutes.register('login', '/_admin/login', []);
143 143 pyroutes.register('logout', '/_admin/logout', []);
144 144 pyroutes.register('register', '/_admin/register', []);
145 145 pyroutes.register('reset_password', '/_admin/password_reset', []);
146 146 pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []);
147 147 pyroutes.register('home', '/', []);
148 148 pyroutes.register('user_autocomplete_data', '/_users', []);
149 149 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
150 150 pyroutes.register('repo_list_data', '/_repos', []);
151 151 pyroutes.register('goto_switcher_data', '/_goto_data', []);
152 152 pyroutes.register('markup_preview', '/_markup_preview', []);
153 153 pyroutes.register('store_user_session_value', '/_store_session_attr', []);
154 154 pyroutes.register('journal', '/_admin/journal', []);
155 155 pyroutes.register('journal_rss', '/_admin/journal/rss', []);
156 156 pyroutes.register('journal_atom', '/_admin/journal/atom', []);
157 157 pyroutes.register('journal_public', '/_admin/public_journal', []);
158 158 pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []);
159 159 pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []);
160 160 pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []);
161 161 pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []);
162 162 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
163 163 pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']);
164 164 pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']);
165 165 pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']);
166 166 pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']);
167 167 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
168 168 pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']);
169 169 pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']);
170 170 pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']);
171 171 pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']);
172 172 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
173 173 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
174 174 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
175 175 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
176 176 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
177 177 pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']);
178 178 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
179 179 pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']);
180 180 pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']);
181 181 pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
182 182 pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']);
183 183 pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']);
184 184 pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
185 185 pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
186 186 pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
187 187 pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
188 188 pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']);
189 189 pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
190 190 pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
191 191 pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
192 192 pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
193 193 pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
194 194 pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
195 195 pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
196 196 pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
197 197 pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
198 198 pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
199 199 pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
200 200 pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
201 201 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
202 202 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
203 203 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
204 204 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
205 205 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
206 206 pyroutes.register('repo_changelog_elements', '/%(repo_name)s/changelog_elements', ['repo_name']);
207 207 pyroutes.register('repo_changelog_elements_file', '/%(repo_name)s/changelog_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
208 208 pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']);
209 209 pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
210 210 pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']);
211 211 pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
212 212 pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
213 213 pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']);
214 214 pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']);
215 215 pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']);
216 216 pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']);
217 217 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
218 218 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
219 219 pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']);
220 220 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
221 221 pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']);
222 222 pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']);
223 223 pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']);
224 224 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']);
225 225 pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']);
226 226 pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']);
227 227 pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']);
228 228 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']);
229 229 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
230 230 pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']);
231 pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']);
231 232 pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']);
232 233 pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']);
233 234 pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']);
234 235 pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']);
235 236 pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']);
236 237 pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']);
237 238 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
238 239 pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']);
239 240 pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']);
240 241 pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']);
241 242 pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']);
242 243 pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']);
243 244 pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']);
244 245 pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']);
245 246 pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']);
246 247 pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']);
247 248 pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']);
248 249 pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']);
249 250 pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']);
250 251 pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']);
251 252 pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']);
252 253 pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']);
253 254 pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']);
254 255 pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']);
255 256 pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']);
256 257 pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']);
257 258 pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']);
258 259 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
259 260 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']);
260 261 pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']);
261 262 pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']);
262 263 pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
263 264 pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
264 265 pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']);
265 266 pyroutes.register('rss_feed_home', '/%(repo_name)s/feed/rss', ['repo_name']);
266 267 pyroutes.register('atom_feed_home', '/%(repo_name)s/feed/atom', ['repo_name']);
267 268 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
268 269 pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']);
269 270 pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']);
270 271 pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']);
271 272 pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']);
272 273 pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']);
273 274 pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']);
274 275 pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']);
275 276 pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']);
276 277 pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']);
277 278 pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']);
278 279 pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']);
279 280 pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']);
280 281 pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']);
281 282 pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']);
282 283 pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']);
283 284 pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']);
284 285 pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']);
285 286 pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']);
286 287 pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']);
287 288 pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']);
288 289 pyroutes.register('search', '/_admin/search', []);
289 290 pyroutes.register('search_repo', '/%(repo_name)s/search', ['repo_name']);
290 291 pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']);
291 292 pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']);
292 293 pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
293 294 pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
294 295 pyroutes.register('my_account_update', '/_admin/my_account/update', []);
295 296 pyroutes.register('my_account_password', '/_admin/my_account/password', []);
296 297 pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []);
297 298 pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []);
298 299 pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []);
299 300 pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []);
300 301 pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []);
301 302 pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []);
302 303 pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []);
303 304 pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []);
304 305 pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []);
305 306 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
306 307 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
307 308 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
308 309 pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
309 310 pyroutes.register('my_account_watched', '/_admin/my_account/watched', []);
310 311 pyroutes.register('my_account_perms', '/_admin/my_account/perms', []);
311 312 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
312 313 pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []);
313 314 pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
314 315 pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
315 316 pyroutes.register('notifications_show_all', '/_admin/notifications', []);
316 317 pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []);
317 318 pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
318 319 pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
319 320 pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']);
320 321 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
321 322 pyroutes.register('gists_show', '/_admin/gists', []);
322 323 pyroutes.register('gists_new', '/_admin/gists/new', []);
323 324 pyroutes.register('gists_create', '/_admin/gists/create', []);
324 325 pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']);
325 326 pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']);
326 327 pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']);
327 328 pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']);
328 329 pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']);
329 330 pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']);
330 331 pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']);
331 332 pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']);
332 333 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
333 334 pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']);
334 335 pyroutes.register('apiv2', '/_admin/api', []);
335 336 }
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now