##// END OF EJS Templates
repositories: added option to archive repositories instead of deleting them....
marcink -
r3090:bdd9dc16 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,36 b''
1 import logging
2
3 from sqlalchemy import *
4
5 from rhodecode.model import meta
6 from rhodecode.lib.dbmigrate.versions import _reset_base, notify
7
8 log = logging.getLogger(__name__)
9
10
11 def upgrade(migrate_engine):
12 """
13 Upgrade operations go here.
14 Don't create your own engine; bind migrate_engine to your metadata
15 """
16 _reset_base(migrate_engine)
17 from rhodecode.lib.dbmigrate.schema import db_4_13_0_0 as db
18
19 repository_table = db.Repository.__table__
20
21 archived = Column('archived', Boolean(), nullable=True)
22 archived.create(table=repository_table)
23
24 # issue fixups
25 fixups(db, meta.Session)
26
27
28 def downgrade(migrate_engine):
29 meta = MetaData()
30 meta.bind = migrate_engine
31
32
33 def fixups(models, _SESSION):
34 pass
35
36
@@ -1,63 +1,63 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22
23 23 RhodeCode, a web based repository management software
24 24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 25 """
26 26
27 27 import os
28 28 import sys
29 29 import platform
30 30
31 31 VERSION = tuple(open(os.path.join(
32 32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33 33
34 34 BACKENDS = {
35 35 'hg': 'Mercurial repository',
36 36 'git': 'Git repository',
37 37 'svn': 'Subversion repository',
38 38 }
39 39
40 40 CELERY_ENABLED = False
41 41 CELERY_EAGER = False
42 42
43 43 # link to config for pyramid
44 44 CONFIG = {}
45 45
46 46 # Populated with the settings dictionary from application init in
47 47 # rhodecode.conf.environment.load_pyramid_environment
48 48 PYRAMID_SETTINGS = {}
49 49
50 50 # Linked module for extensions
51 51 EXTENSIONS = {}
52 52
53 53 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
54 __dbversion__ = 90 # defines current db version for migrations
54 __dbversion__ = 91 # defines current db version for migrations
55 55 __platform__ = platform.system()
56 56 __license__ = 'AGPLv3, and Commercial License'
57 57 __author__ = 'RhodeCode GmbH'
58 58 __url__ = 'https://code.rhodecode.com'
59 59
60 60 is_windows = __platform__ in ['Windows']
61 61 is_unix = not is_windows
62 62 is_test = False
63 63 disable_error_handler = False
@@ -1,652 +1,677 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import logging
23 23 import operator
24 24
25 25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
26 26
27 27 from rhodecode.lib import helpers as h, diffs
28 28 from rhodecode.lib.utils2 import StrictAttributeDict, safe_int, datetime_to_time
29 29 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 30 from rhodecode.model import repo
31 31 from rhodecode.model import repo_group
32 32 from rhodecode.model import user_group
33 33 from rhodecode.model import user
34 34 from rhodecode.model.db import User
35 35 from rhodecode.model.scm import ScmModel
36 36 from rhodecode.model.settings import VcsSettingsModel
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 ADMIN_PREFIX = '/_admin'
42 42 STATIC_FILE_PREFIX = '/_static'
43 43
44 44 URL_NAME_REQUIREMENTS = {
45 45 # group name can have a slash in them, but they must not end with a slash
46 46 'group_name': r'.*?[^/]',
47 47 'repo_group_name': r'.*?[^/]',
48 48 # repo names can have a slash in them, but they must not end with a slash
49 49 'repo_name': r'.*?[^/]',
50 50 # file path eats up everything at the end
51 51 'f_path': r'.*',
52 52 # reference types
53 53 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
54 54 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
55 55 }
56 56
57 57
58 58 def add_route_with_slash(config,name, pattern, **kw):
59 59 config.add_route(name, pattern, **kw)
60 60 if not pattern.endswith('/'):
61 61 config.add_route(name + '_slash', pattern + '/', **kw)
62 62
63 63
64 64 def add_route_requirements(route_path, requirements=URL_NAME_REQUIREMENTS):
65 65 """
66 66 Adds regex requirements to pyramid routes using a mapping dict
67 67 e.g::
68 68 add_route_requirements('{repo_name}/settings')
69 69 """
70 70 for key, regex in requirements.items():
71 71 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
72 72 return route_path
73 73
74 74
75 75 def get_format_ref_id(repo):
76 76 """Returns a `repo` specific reference formatter function"""
77 77 if h.is_svn(repo):
78 78 return _format_ref_id_svn
79 79 else:
80 80 return _format_ref_id
81 81
82 82
83 83 def _format_ref_id(name, raw_id):
84 84 """Default formatting of a given reference `name`"""
85 85 return name
86 86
87 87
88 88 def _format_ref_id_svn(name, raw_id):
89 89 """Special way of formatting a reference for Subversion including path"""
90 90 return '%s@%s' % (name, raw_id)
91 91
92 92
93 93 class TemplateArgs(StrictAttributeDict):
94 94 pass
95 95
96 96
97 97 class BaseAppView(object):
98 98
99 99 def __init__(self, context, request):
100 100 self.request = request
101 101 self.context = context
102 102 self.session = request.session
103 103 if not hasattr(request, 'user'):
104 104 # NOTE(marcink): edge case, we ended up in matched route
105 105 # but probably of web-app context, e.g API CALL/VCS CALL
106 106 if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'):
107 107 log.warning('Unable to process request `%s` in this scope', request)
108 108 raise HTTPBadRequest()
109 109
110 110 self._rhodecode_user = request.user # auth user
111 111 self._rhodecode_db_user = self._rhodecode_user.get_instance()
112 112 self._maybe_needs_password_change(
113 113 request.matched_route.name, self._rhodecode_db_user)
114 114
115 115 def _maybe_needs_password_change(self, view_name, user_obj):
116 116 log.debug('Checking if user %s needs password change on view %s',
117 117 user_obj, view_name)
118 118 skip_user_views = [
119 119 'logout', 'login',
120 120 'my_account_password', 'my_account_password_update'
121 121 ]
122 122
123 123 if not user_obj:
124 124 return
125 125
126 126 if user_obj.username == User.DEFAULT_USER:
127 127 return
128 128
129 129 now = time.time()
130 130 should_change = user_obj.user_data.get('force_password_change')
131 131 change_after = safe_int(should_change) or 0
132 132 if should_change and now > change_after:
133 133 log.debug('User %s requires password change', user_obj)
134 134 h.flash('You are required to change your password', 'warning',
135 135 ignore_duplicate=True)
136 136
137 137 if view_name not in skip_user_views:
138 138 raise HTTPFound(
139 139 self.request.route_path('my_account_password'))
140 140
141 141 def _log_creation_exception(self, e, repo_name):
142 142 _ = self.request.translate
143 143 reason = None
144 144 if len(e.args) == 2:
145 145 reason = e.args[1]
146 146
147 147 if reason == 'INVALID_CERTIFICATE':
148 148 log.exception(
149 149 'Exception creating a repository: invalid certificate')
150 150 msg = (_('Error creating repository %s: invalid certificate')
151 151 % repo_name)
152 152 else:
153 153 log.exception("Exception creating a repository")
154 154 msg = (_('Error creating repository %s')
155 155 % repo_name)
156 156 return msg
157 157
158 158 def _get_local_tmpl_context(self, include_app_defaults=True):
159 159 c = TemplateArgs()
160 160 c.auth_user = self.request.user
161 161 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
162 162 c.rhodecode_user = self.request.user
163 163
164 164 if include_app_defaults:
165 165 from rhodecode.lib.base import attach_context_attributes
166 166 attach_context_attributes(c, self.request, self.request.user.user_id)
167 167
168 168 return c
169 169
170 170 def _get_template_context(self, tmpl_args, **kwargs):
171 171
172 172 local_tmpl_args = {
173 173 'defaults': {},
174 174 'errors': {},
175 175 'c': tmpl_args
176 176 }
177 177 local_tmpl_args.update(kwargs)
178 178 return local_tmpl_args
179 179
180 180 def load_default_context(self):
181 181 """
182 182 example:
183 183
184 184 def load_default_context(self):
185 185 c = self._get_local_tmpl_context()
186 186 c.custom_var = 'foobar'
187 187
188 188 return c
189 189 """
190 190 raise NotImplementedError('Needs implementation in view class')
191 191
192 192
193 193 class RepoAppView(BaseAppView):
194 194
195 195 def __init__(self, context, request):
196 196 super(RepoAppView, self).__init__(context, request)
197 197 self.db_repo = request.db_repo
198 198 self.db_repo_name = self.db_repo.repo_name
199 199 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
200 200
201 201 def _handle_missing_requirements(self, error):
202 202 log.error(
203 203 'Requirements are missing for repository %s: %s',
204 204 self.db_repo_name, error.message)
205 205
206 206 def _get_local_tmpl_context(self, include_app_defaults=True):
207 207 _ = self.request.translate
208 208 c = super(RepoAppView, self)._get_local_tmpl_context(
209 209 include_app_defaults=include_app_defaults)
210 210
211 211 # register common vars for this type of view
212 212 c.rhodecode_db_repo = self.db_repo
213 213 c.repo_name = self.db_repo_name
214 214 c.repository_pull_requests = self.db_repo_pull_requests
215 215 self.path_filter = PathFilter(None)
216 216
217 217 c.repository_requirements_missing = {}
218 218 try:
219 219 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
220 220 if self.rhodecode_vcs_repo:
221 221 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
222 222 c.auth_user.username)
223 223 self.path_filter = PathFilter(path_perms)
224 224 except RepositoryRequirementError as e:
225 225 c.repository_requirements_missing = {'error': str(e)}
226 226 self._handle_missing_requirements(e)
227 227 self.rhodecode_vcs_repo = None
228 228
229 229 c.path_filter = self.path_filter # used by atom_feed_entry.mako
230 230
231 231 if self.rhodecode_vcs_repo is None:
232 232 # unable to fetch this repo as vcs instance, report back to user
233 233 h.flash(_(
234 234 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
235 235 "Please check if it exist, or is not damaged.") %
236 236 {'repo_name': c.repo_name},
237 237 category='error', ignore_duplicate=True)
238 238 if c.repository_requirements_missing:
239 239 route = self.request.matched_route.name
240 240 if route.startswith(('edit_repo', 'repo_summary')):
241 241 # allow summary and edit repo on missing requirements
242 242 return c
243 243
244 244 raise HTTPFound(
245 245 h.route_path('repo_summary', repo_name=self.db_repo_name))
246 246
247 247 else: # redirect if we don't show missing requirements
248 248 raise HTTPFound(h.route_path('home'))
249 249
250 250 return c
251 251
252 252 def _get_f_path_unchecked(self, matchdict, default=None):
253 253 """
254 254 Should only be used by redirects, everything else should call _get_f_path
255 255 """
256 256 f_path = matchdict.get('f_path')
257 257 if f_path:
258 258 # fix for multiple initial slashes that causes errors for GIT
259 259 return f_path.lstrip('/')
260 260
261 261 return default
262 262
263 263 def _get_f_path(self, matchdict, default=None):
264 264 f_path_match = self._get_f_path_unchecked(matchdict, default)
265 265 return self.path_filter.assert_path_permissions(f_path_match)
266 266
267 267 def _get_general_setting(self, target_repo, settings_key, default=False):
268 268 settings_model = VcsSettingsModel(repo=target_repo)
269 269 settings = settings_model.get_general_settings()
270 270 return settings.get(settings_key, default)
271 271
272 272
273 273 class PathFilter(object):
274 274
275 275 # Expects and instance of BasePathPermissionChecker or None
276 276 def __init__(self, permission_checker):
277 277 self.permission_checker = permission_checker
278 278
279 279 def assert_path_permissions(self, path):
280 280 if path and self.permission_checker and not self.permission_checker.has_access(path):
281 281 raise HTTPForbidden()
282 282 return path
283 283
284 284 def filter_patchset(self, patchset):
285 285 if not self.permission_checker or not patchset:
286 286 return patchset, False
287 287 had_filtered = False
288 288 filtered_patchset = []
289 289 for patch in patchset:
290 290 filename = patch.get('filename', None)
291 291 if not filename or self.permission_checker.has_access(filename):
292 292 filtered_patchset.append(patch)
293 293 else:
294 294 had_filtered = True
295 295 if had_filtered:
296 296 if isinstance(patchset, diffs.LimitedDiffContainer):
297 297 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
298 298 return filtered_patchset, True
299 299 else:
300 300 return patchset, False
301 301
302 302 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
303 303 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
304 304 result = diffset.render_patchset(filtered_patchset, source_ref=source_ref, target_ref=target_ref)
305 305 result.has_hidden_changes = has_hidden_changes
306 306 return result
307 307
308 308 def get_raw_patch(self, diff_processor):
309 309 if self.permission_checker is None:
310 310 return diff_processor.as_raw()
311 311 elif self.permission_checker.has_full_access:
312 312 return diff_processor.as_raw()
313 313 else:
314 314 return '# Repository has user-specific filters, raw patch generation is disabled.'
315 315
316 316 @property
317 317 def is_enabled(self):
318 318 return self.permission_checker is not None
319 319
320 320
321 321 class RepoGroupAppView(BaseAppView):
322 322 def __init__(self, context, request):
323 323 super(RepoGroupAppView, self).__init__(context, request)
324 324 self.db_repo_group = request.db_repo_group
325 325 self.db_repo_group_name = self.db_repo_group.group_name
326 326
327 327 def _revoke_perms_on_yourself(self, form_result):
328 328 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
329 329 form_result['perm_updates'])
330 330 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
331 331 form_result['perm_additions'])
332 332 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
333 333 form_result['perm_deletions'])
334 334 admin_perm = 'group.admin'
335 335 if _updates and _updates[0][1] != admin_perm or \
336 336 _additions and _additions[0][1] != admin_perm or \
337 337 _deletions and _deletions[0][1] != admin_perm:
338 338 return True
339 339 return False
340 340
341 341
342 342 class UserGroupAppView(BaseAppView):
343 343 def __init__(self, context, request):
344 344 super(UserGroupAppView, self).__init__(context, request)
345 345 self.db_user_group = request.db_user_group
346 346 self.db_user_group_name = self.db_user_group.users_group_name
347 347
348 348
349 349 class UserAppView(BaseAppView):
350 350 def __init__(self, context, request):
351 351 super(UserAppView, self).__init__(context, request)
352 352 self.db_user = request.db_user
353 353 self.db_user_id = self.db_user.user_id
354 354
355 355 _ = self.request.translate
356 356 if not request.db_user_supports_default:
357 357 if self.db_user.username == User.DEFAULT_USER:
358 358 h.flash(_("Editing user `{}` is disabled.".format(
359 359 User.DEFAULT_USER)), category='warning')
360 360 raise HTTPFound(h.route_path('users'))
361 361
362 362
363 363 class DataGridAppView(object):
364 364 """
365 365 Common class to have re-usable grid rendering components
366 366 """
367 367
368 368 def _extract_ordering(self, request, column_map=None):
369 369 column_map = column_map or {}
370 370 column_index = safe_int(request.GET.get('order[0][column]'))
371 371 order_dir = request.GET.get(
372 372 'order[0][dir]', 'desc')
373 373 order_by = request.GET.get(
374 374 'columns[%s][data][sort]' % column_index, 'name_raw')
375 375
376 376 # translate datatable to DB columns
377 377 order_by = column_map.get(order_by) or order_by
378 378
379 379 search_q = request.GET.get('search[value]')
380 380 return search_q, order_by, order_dir
381 381
382 382 def _extract_chunk(self, request):
383 383 start = safe_int(request.GET.get('start'), 0)
384 384 length = safe_int(request.GET.get('length'), 25)
385 385 draw = safe_int(request.GET.get('draw'))
386 386 return draw, start, length
387 387
388 388 def _get_order_col(self, order_by, model):
389 389 if isinstance(order_by, basestring):
390 390 try:
391 391 return operator.attrgetter(order_by)(model)
392 392 except AttributeError:
393 393 return None
394 394 else:
395 395 return order_by
396 396
397 397
398 398 class BaseReferencesView(RepoAppView):
399 399 """
400 400 Base for reference view for branches, tags and bookmarks.
401 401 """
402 402 def load_default_context(self):
403 403 c = self._get_local_tmpl_context()
404 404
405 405
406 406 return c
407 407
408 408 def load_refs_context(self, ref_items, partials_template):
409 409 _render = self.request.get_partial_renderer(partials_template)
410 410 pre_load = ["author", "date", "message"]
411 411
412 412 is_svn = h.is_svn(self.rhodecode_vcs_repo)
413 413 is_hg = h.is_hg(self.rhodecode_vcs_repo)
414 414
415 415 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
416 416
417 417 closed_refs = {}
418 418 if is_hg:
419 419 closed_refs = self.rhodecode_vcs_repo.branches_closed
420 420
421 421 data = []
422 422 for ref_name, commit_id in ref_items:
423 423 commit = self.rhodecode_vcs_repo.get_commit(
424 424 commit_id=commit_id, pre_load=pre_load)
425 425 closed = ref_name in closed_refs
426 426
427 427 # TODO: johbo: Unify generation of reference links
428 428 use_commit_id = '/' in ref_name or is_svn
429 429
430 430 if use_commit_id:
431 431 files_url = h.route_path(
432 432 'repo_files',
433 433 repo_name=self.db_repo_name,
434 434 f_path=ref_name if is_svn else '',
435 435 commit_id=commit_id)
436 436
437 437 else:
438 438 files_url = h.route_path(
439 439 'repo_files',
440 440 repo_name=self.db_repo_name,
441 441 f_path=ref_name if is_svn else '',
442 442 commit_id=ref_name,
443 443 _query=dict(at=ref_name))
444 444
445 445 data.append({
446 446 "name": _render('name', ref_name, files_url, closed),
447 447 "name_raw": ref_name,
448 448 "date": _render('date', commit.date),
449 449 "date_raw": datetime_to_time(commit.date),
450 450 "author": _render('author', commit.author),
451 451 "commit": _render(
452 452 'commit', commit.message, commit.raw_id, commit.idx),
453 453 "commit_raw": commit.idx,
454 454 "compare": _render(
455 455 'compare', format_ref_id(ref_name, commit.raw_id)),
456 456 })
457 457
458 458 return data
459 459
460 460
461 461 class RepoRoutePredicate(object):
462 462 def __init__(self, val, config):
463 463 self.val = val
464 464
465 465 def text(self):
466 466 return 'repo_route = %s' % self.val
467 467
468 468 phash = text
469 469
470 470 def __call__(self, info, request):
471 471 if hasattr(request, 'vcs_call'):
472 472 # skip vcs calls
473 473 return
474 474
475 475 repo_name = info['match']['repo_name']
476 476 repo_model = repo.RepoModel()
477 477
478 478 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
479 479
480 480 def redirect_if_creating(route_info, db_repo):
481 481 skip_views = ['edit_repo_advanced_delete']
482 482 route = route_info['route']
483 483 # we should skip delete view so we can actually "remove" repositories
484 484 # if they get stuck in creating state.
485 485 if route.name in skip_views:
486 486 return
487 487
488 488 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
489 489 repo_creating_url = request.route_path(
490 490 'repo_creating', repo_name=db_repo.repo_name)
491 491 raise HTTPFound(repo_creating_url)
492 492
493 493 if by_name_match:
494 494 # register this as request object we can re-use later
495 495 request.db_repo = by_name_match
496 496 redirect_if_creating(info, by_name_match)
497 497 return True
498 498
499 499 by_id_match = repo_model.get_repo_by_id(repo_name)
500 500 if by_id_match:
501 501 request.db_repo = by_id_match
502 502 redirect_if_creating(info, by_id_match)
503 503 return True
504 504
505 505 return False
506 506
507 507
508 class RepoForbidArchivedRoutePredicate(object):
509 def __init__(self, val, config):
510 self.val = val
511
512 def text(self):
513 return 'repo_forbid_archived = %s' % self.val
514
515 phash = text
516
517 def __call__(self, info, request):
518 _ = request.translate
519 rhodecode_db_repo = request.db_repo
520
521 log.debug(
522 '%s checking if archived flag for repo for %s',
523 self.__class__.__name__, rhodecode_db_repo.repo_name)
524
525 if rhodecode_db_repo.archived:
526 log.warning('Current view is not supported for archived repo:%s',
527 rhodecode_db_repo.repo_name)
528
529 h.flash(
530 h.literal(_('Action not supported for archived repository.')),
531 category='warning')
532 summary_url = request.route_path(
533 'repo_summary', repo_name=rhodecode_db_repo.repo_name)
534 raise HTTPFound(summary_url)
535 return True
536
537
508 538 class RepoTypeRoutePredicate(object):
509 539 def __init__(self, val, config):
510 540 self.val = val or ['hg', 'git', 'svn']
511 541
512 542 def text(self):
513 543 return 'repo_accepted_type = %s' % self.val
514 544
515 545 phash = text
516 546
517 547 def __call__(self, info, request):
518 548 if hasattr(request, 'vcs_call'):
519 549 # skip vcs calls
520 550 return
521 551
522 552 rhodecode_db_repo = request.db_repo
523 553
524 554 log.debug(
525 555 '%s checking repo type for %s in %s',
526 556 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
527 557
528 558 if rhodecode_db_repo.repo_type in self.val:
529 559 return True
530 560 else:
531 561 log.warning('Current view is not supported for repo type:%s',
532 562 rhodecode_db_repo.repo_type)
533
534 # h.flash(h.literal(
535 # _('Action not supported for %s.' % rhodecode_repo.alias)),
536 # category='warning')
537 # return redirect(
538 # route_path('repo_summary', repo_name=cls.rhodecode_db_repo.repo_name))
539
540 563 return False
541 564
542 565
543 566 class RepoGroupRoutePredicate(object):
544 567 def __init__(self, val, config):
545 568 self.val = val
546 569
547 570 def text(self):
548 571 return 'repo_group_route = %s' % self.val
549 572
550 573 phash = text
551 574
552 575 def __call__(self, info, request):
553 576 if hasattr(request, 'vcs_call'):
554 577 # skip vcs calls
555 578 return
556 579
557 580 repo_group_name = info['match']['repo_group_name']
558 581 repo_group_model = repo_group.RepoGroupModel()
559 582 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
560 583
561 584 if by_name_match:
562 585 # register this as request object we can re-use later
563 586 request.db_repo_group = by_name_match
564 587 return True
565 588
566 589 return False
567 590
568 591
569 592 class UserGroupRoutePredicate(object):
570 593 def __init__(self, val, config):
571 594 self.val = val
572 595
573 596 def text(self):
574 597 return 'user_group_route = %s' % self.val
575 598
576 599 phash = text
577 600
578 601 def __call__(self, info, request):
579 602 if hasattr(request, 'vcs_call'):
580 603 # skip vcs calls
581 604 return
582 605
583 606 user_group_id = info['match']['user_group_id']
584 607 user_group_model = user_group.UserGroup()
585 608 by_id_match = user_group_model.get(user_group_id, cache=False)
586 609
587 610 if by_id_match:
588 611 # register this as request object we can re-use later
589 612 request.db_user_group = by_id_match
590 613 return True
591 614
592 615 return False
593 616
594 617
595 618 class UserRoutePredicateBase(object):
596 619 supports_default = None
597 620
598 621 def __init__(self, val, config):
599 622 self.val = val
600 623
601 624 def text(self):
602 625 raise NotImplementedError()
603 626
604 627 def __call__(self, info, request):
605 628 if hasattr(request, 'vcs_call'):
606 629 # skip vcs calls
607 630 return
608 631
609 632 user_id = info['match']['user_id']
610 633 user_model = user.User()
611 634 by_id_match = user_model.get(user_id, cache=False)
612 635
613 636 if by_id_match:
614 637 # register this as request object we can re-use later
615 638 request.db_user = by_id_match
616 639 request.db_user_supports_default = self.supports_default
617 640 return True
618 641
619 642 return False
620 643
621 644
622 645 class UserRoutePredicate(UserRoutePredicateBase):
623 646 supports_default = False
624 647
625 648 def text(self):
626 649 return 'user_route = %s' % self.val
627 650
628 651 phash = text
629 652
630 653
631 654 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
632 655 supports_default = True
633 656
634 657 def text(self):
635 658 return 'user_with_default_route = %s' % self.val
636 659
637 660 phash = text
638 661
639 662
640 663 def includeme(config):
641 664 config.add_route_predicate(
642 665 'repo_route', RepoRoutePredicate)
643 666 config.add_route_predicate(
644 667 'repo_accepted_types', RepoTypeRoutePredicate)
645 668 config.add_route_predicate(
669 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate)
670 config.add_route_predicate(
646 671 'repo_group_route', RepoGroupRoutePredicate)
647 672 config.add_route_predicate(
648 673 'user_group_route', UserGroupRoutePredicate)
649 674 config.add_route_predicate(
650 675 'user_route_with_default', UserRouteWithDefaultPredicate)
651 676 config.add_route_predicate(
652 'user_route', UserRoutePredicate) No newline at end of file
677 'user_route', UserRoutePredicate)
@@ -1,461 +1,462 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import re
22 22 import logging
23 23 import collections
24 24
25 25 from pyramid.view import view_config
26 26
27 27 from rhodecode.apps._base import BaseAppView
28 28 from rhodecode.lib import helpers as h
29 29 from rhodecode.lib.auth import (
30 30 LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator,
31 31 CSRFRequired)
32 32 from rhodecode.lib.index import searcher_from_config
33 33 from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int
34 34 from rhodecode.lib.ext_json import json
35 35 from rhodecode.model.db import (
36 func, or_, in_filter_generator, Repository, RepoGroup, User, UserGroup)
36 func, true, or_, in_filter_generator, Repository, RepoGroup, User, UserGroup)
37 37 from rhodecode.model.repo import RepoModel
38 38 from rhodecode.model.repo_group import RepoGroupModel
39 39 from rhodecode.model.scm import RepoGroupList, RepoList
40 40 from rhodecode.model.user import UserModel
41 41 from rhodecode.model.user_group import UserGroupModel
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 class HomeView(BaseAppView):
47 47
48 48 def load_default_context(self):
49 49 c = self._get_local_tmpl_context()
50 50 c.user = c.auth_user.get_instance()
51 51
52 52 return c
53 53
54 54 @LoginRequired()
55 55 @view_config(
56 56 route_name='user_autocomplete_data', request_method='GET',
57 57 renderer='json_ext', xhr=True)
58 58 def user_autocomplete_data(self):
59 59 self.load_default_context()
60 60 query = self.request.GET.get('query')
61 61 active = str2bool(self.request.GET.get('active') or True)
62 62 include_groups = str2bool(self.request.GET.get('user_groups'))
63 63 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
64 64 skip_default_user = str2bool(self.request.GET.get('skip_default_user'))
65 65
66 66 log.debug('generating user list, query:%s, active:%s, with_groups:%s',
67 67 query, active, include_groups)
68 68
69 69 _users = UserModel().get_users(
70 70 name_contains=query, only_active=active)
71 71
72 72 def maybe_skip_default_user(usr):
73 73 if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER:
74 74 return False
75 75 return True
76 76 _users = filter(maybe_skip_default_user, _users)
77 77
78 78 if include_groups:
79 79 # extend with user groups
80 80 _user_groups = UserGroupModel().get_user_groups(
81 81 name_contains=query, only_active=active,
82 82 expand_groups=expand_groups)
83 83 _users = _users + _user_groups
84 84
85 85 return {'suggestions': _users}
86 86
87 87 @LoginRequired()
88 88 @NotAnonymous()
89 89 @view_config(
90 90 route_name='user_group_autocomplete_data', request_method='GET',
91 91 renderer='json_ext', xhr=True)
92 92 def user_group_autocomplete_data(self):
93 93 self.load_default_context()
94 94 query = self.request.GET.get('query')
95 95 active = str2bool(self.request.GET.get('active') or True)
96 96 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
97 97
98 98 log.debug('generating user group list, query:%s, active:%s',
99 99 query, active)
100 100
101 101 _user_groups = UserGroupModel().get_user_groups(
102 102 name_contains=query, only_active=active,
103 103 expand_groups=expand_groups)
104 104 _user_groups = _user_groups
105 105
106 106 return {'suggestions': _user_groups}
107 107
108 108 def _get_repo_list(self, name_contains=None, repo_type=None, limit=20):
109 109 org_query = name_contains
110 110 allowed_ids = self._rhodecode_user.repo_acl_ids(
111 111 ['repository.read', 'repository.write', 'repository.admin'],
112 112 cache=False, name_filter=name_contains) or [-1]
113 113
114 114 query = Repository.query()\
115 115 .order_by(func.length(Repository.repo_name))\
116 116 .order_by(Repository.repo_name)\
117 .filter(Repository.archived.isnot(true()))\
117 118 .filter(or_(
118 119 # generate multiple IN to fix limitation problems
119 120 *in_filter_generator(Repository.repo_id, allowed_ids)
120 121 ))
121 122
122 123 if repo_type:
123 124 query = query.filter(Repository.repo_type == repo_type)
124 125
125 126 if name_contains:
126 127 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
127 128 query = query.filter(
128 129 Repository.repo_name.ilike(ilike_expression))
129 130 query = query.limit(limit)
130 131
131 132 acl_iter = query
132 133
133 134 return [
134 135 {
135 136 'id': obj.repo_name,
136 137 'value': org_query,
137 138 'value_display': obj.repo_name,
138 139 'text': obj.repo_name,
139 140 'type': 'repo',
140 141 'repo_id': obj.repo_id,
141 142 'repo_type': obj.repo_type,
142 143 'private': obj.private,
143 144 'url': h.route_path('repo_summary', repo_name=obj.repo_name)
144 145 }
145 146 for obj in acl_iter]
146 147
147 148 def _get_repo_group_list(self, name_contains=None, limit=20):
148 149 org_query = name_contains
149 150 allowed_ids = self._rhodecode_user.repo_group_acl_ids(
150 151 ['group.read', 'group.write', 'group.admin'],
151 152 cache=False, name_filter=name_contains) or [-1]
152 153
153 154 query = RepoGroup.query()\
154 155 .order_by(func.length(RepoGroup.group_name))\
155 156 .order_by(RepoGroup.group_name) \
156 157 .filter(or_(
157 158 # generate multiple IN to fix limitation problems
158 159 *in_filter_generator(RepoGroup.group_id, allowed_ids)
159 160 ))
160 161
161 162 if name_contains:
162 163 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
163 164 query = query.filter(
164 165 RepoGroup.group_name.ilike(ilike_expression))
165 166 query = query.limit(limit)
166 167
167 168 acl_iter = query
168 169
169 170 return [
170 171 {
171 172 'id': obj.group_name,
172 173 'value': org_query,
173 174 'value_display': obj.group_name,
174 175 'type': 'repo_group',
175 176 'url': h.route_path(
176 177 'repo_group_home', repo_group_name=obj.group_name)
177 178 }
178 179 for obj in acl_iter]
179 180
180 181 def _get_user_list(self, name_contains=None, limit=20):
181 182 org_query = name_contains
182 183 if not name_contains:
183 184 return []
184 185
185 186 name_contains = re.compile('(?:user:)(.+)').findall(name_contains)
186 187 if len(name_contains) != 1:
187 188 return []
188 189 name_contains = name_contains[0]
189 190
190 191 query = User.query()\
191 192 .order_by(func.length(User.username))\
192 193 .order_by(User.username) \
193 194 .filter(User.username != User.DEFAULT_USER)
194 195
195 196 if name_contains:
196 197 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
197 198 query = query.filter(
198 199 User.username.ilike(ilike_expression))
199 200 query = query.limit(limit)
200 201
201 202 acl_iter = query
202 203
203 204 return [
204 205 {
205 206 'id': obj.user_id,
206 207 'value': org_query,
207 208 'value_display': obj.username,
208 209 'type': 'user',
209 210 'icon_link': h.gravatar_url(obj.email, 30),
210 211 'url': h.route_path(
211 212 'user_profile', username=obj.username)
212 213 }
213 214 for obj in acl_iter]
214 215
215 216 def _get_user_groups_list(self, name_contains=None, limit=20):
216 217 org_query = name_contains
217 218 if not name_contains:
218 219 return []
219 220
220 221 name_contains = re.compile('(?:user_group:)(.+)').findall(name_contains)
221 222 if len(name_contains) != 1:
222 223 return []
223 224 name_contains = name_contains[0]
224 225
225 226 query = UserGroup.query()\
226 227 .order_by(func.length(UserGroup.users_group_name))\
227 228 .order_by(UserGroup.users_group_name)
228 229
229 230 if name_contains:
230 231 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
231 232 query = query.filter(
232 233 UserGroup.users_group_name.ilike(ilike_expression))
233 234 query = query.limit(limit)
234 235
235 236 acl_iter = query
236 237
237 238 return [
238 239 {
239 240 'id': obj.users_group_id,
240 241 'value': org_query,
241 242 'value_display': obj.users_group_name,
242 243 'type': 'user_group',
243 244 'url': h.route_path(
244 245 'user_group_profile', user_group_name=obj.users_group_name)
245 246 }
246 247 for obj in acl_iter]
247 248
248 249 def _get_hash_commit_list(self, auth_user, query):
249 250 org_query = query
250 251 if not query or len(query) < 3:
251 252 return []
252 253
253 254 commit_hashes = re.compile('(?:commit:)([0-9a-f]{2,40})').findall(query)
254 255
255 256 if len(commit_hashes) != 1:
256 257 return []
257 258 commit_hash = commit_hashes[0]
258 259
259 260 searcher = searcher_from_config(self.request.registry.settings)
260 261 result = searcher.search(
261 262 'commit_id:%s*' % commit_hash, 'commit', auth_user,
262 263 raise_on_exc=False)
263 264
264 265 return [
265 266 {
266 267 'id': entry['commit_id'],
267 268 'value': org_query,
268 269 'value_display': 'repo `{}` commit: {}'.format(
269 270 entry['repository'], entry['commit_id']),
270 271 'type': 'commit',
271 272 'repo': entry['repository'],
272 273 'url': h.route_path(
273 274 'repo_commit',
274 275 repo_name=entry['repository'], commit_id=entry['commit_id'])
275 276 }
276 277 for entry in result['results']]
277 278
278 279 @LoginRequired()
279 280 @view_config(
280 281 route_name='repo_list_data', request_method='GET',
281 282 renderer='json_ext', xhr=True)
282 283 def repo_list_data(self):
283 284 _ = self.request.translate
284 285 self.load_default_context()
285 286
286 287 query = self.request.GET.get('query')
287 288 repo_type = self.request.GET.get('repo_type')
288 289 log.debug('generating repo list, query:%s, repo_type:%s',
289 290 query, repo_type)
290 291
291 292 res = []
292 293 repos = self._get_repo_list(query, repo_type=repo_type)
293 294 if repos:
294 295 res.append({
295 296 'text': _('Repositories'),
296 297 'children': repos
297 298 })
298 299
299 300 data = {
300 301 'more': False,
301 302 'results': res
302 303 }
303 304 return data
304 305
305 306 @LoginRequired()
306 307 @view_config(
307 308 route_name='goto_switcher_data', request_method='GET',
308 309 renderer='json_ext', xhr=True)
309 310 def goto_switcher_data(self):
310 311 c = self.load_default_context()
311 312
312 313 _ = self.request.translate
313 314
314 315 query = self.request.GET.get('query')
315 316 log.debug('generating main filter data, query %s', query)
316 317
317 318 default_search_val = u'Full text search for: `{}`'.format(query)
318 319 res = []
319 320 if not query:
320 321 return {'suggestions': res}
321 322
322 323 res.append({
323 324 'id': -1,
324 325 'value': query,
325 326 'value_display': default_search_val,
326 327 'type': 'search',
327 328 'url': h.route_path(
328 329 'search', _query={'q': query})
329 330 })
330 331 repo_group_id = safe_int(self.request.GET.get('repo_group_id'))
331 332 if repo_group_id:
332 333 repo_group = RepoGroup.get(repo_group_id)
333 334 composed_hint = '{}/{}'.format(repo_group.group_name, query)
334 335 show_hint = not query.startswith(repo_group.group_name)
335 336 if repo_group and show_hint:
336 337 hint = u'Group search: `{}`'.format(composed_hint)
337 338 res.append({
338 339 'id': -1,
339 340 'value': composed_hint,
340 341 'value_display': hint,
341 342 'type': 'hint',
342 343 'url': ""
343 344 })
344 345
345 346 repo_groups = self._get_repo_group_list(query)
346 347 for serialized_repo_group in repo_groups:
347 348 res.append(serialized_repo_group)
348 349
349 350 repos = self._get_repo_list(query)
350 351 for serialized_repo in repos:
351 352 res.append(serialized_repo)
352 353
353 354 # TODO(marcink): permissions for that ?
354 355 allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER
355 356 if allowed_user_search:
356 357 users = self._get_user_list(query)
357 358 for serialized_user in users:
358 359 res.append(serialized_user)
359 360
360 361 user_groups = self._get_user_groups_list(query)
361 362 for serialized_user_group in user_groups:
362 363 res.append(serialized_user_group)
363 364
364 365 commits = self._get_hash_commit_list(c.auth_user, query)
365 366 if commits:
366 367 unique_repos = collections.OrderedDict()
367 368 for commit in commits:
368 369 repo_name = commit['repo']
369 370 unique_repos.setdefault(repo_name, []).append(commit)
370 371
371 372 for repo, commits in unique_repos.items():
372 373 for commit in commits:
373 374 res.append(commit)
374 375
375 376 return {'suggestions': res}
376 377
377 378 def _get_groups_and_repos(self, repo_group_id=None):
378 379 # repo groups groups
379 380 repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id)
380 381 _perms = ['group.read', 'group.write', 'group.admin']
381 382 repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms)
382 383 repo_group_data = RepoGroupModel().get_repo_groups_as_dict(
383 384 repo_group_list=repo_group_list_acl, admin=False)
384 385
385 386 # repositories
386 387 repo_list = Repository.get_all_repos(group_id=repo_group_id)
387 388 _perms = ['repository.read', 'repository.write', 'repository.admin']
388 389 repo_list_acl = RepoList(repo_list, perm_set=_perms)
389 390 repo_data = RepoModel().get_repos_as_dict(
390 391 repo_list=repo_list_acl, admin=False)
391 392
392 393 return repo_data, repo_group_data
393 394
394 395 @LoginRequired()
395 396 @view_config(
396 397 route_name='home', request_method='GET',
397 398 renderer='rhodecode:templates/index.mako')
398 399 def main_page(self):
399 400 c = self.load_default_context()
400 401 c.repo_group = None
401 402
402 403 repo_data, repo_group_data = self._get_groups_and_repos()
403 404 # json used to render the grids
404 405 c.repos_data = json.dumps(repo_data)
405 406 c.repo_groups_data = json.dumps(repo_group_data)
406 407
407 408 return self._get_template_context(c)
408 409
409 410 @LoginRequired()
410 411 @HasRepoGroupPermissionAnyDecorator(
411 412 'group.read', 'group.write', 'group.admin')
412 413 @view_config(
413 414 route_name='repo_group_home', request_method='GET',
414 415 renderer='rhodecode:templates/index_repo_group.mako')
415 416 @view_config(
416 417 route_name='repo_group_home_slash', request_method='GET',
417 418 renderer='rhodecode:templates/index_repo_group.mako')
418 419 def repo_group_main_page(self):
419 420 c = self.load_default_context()
420 421 c.repo_group = self.request.db_repo_group
421 422 repo_data, repo_group_data = self._get_groups_and_repos(
422 423 c.repo_group.group_id)
423 424
424 425 # json used to render the grids
425 426 c.repos_data = json.dumps(repo_data)
426 427 c.repo_groups_data = json.dumps(repo_group_data)
427 428
428 429 return self._get_template_context(c)
429 430
430 431 @LoginRequired()
431 432 @CSRFRequired()
432 433 @view_config(
433 434 route_name='markup_preview', request_method='POST',
434 435 renderer='string', xhr=True)
435 436 def markup_preview(self):
436 437 # Technically a CSRF token is not needed as no state changes with this
437 438 # call. However, as this is a POST is better to have it, so automated
438 439 # tools don't flag it as potential CSRF.
439 440 # Post is required because the payload could be bigger than the maximum
440 441 # allowed by GET.
441 442
442 443 text = self.request.POST.get('text')
443 444 renderer = self.request.POST.get('renderer') or 'rst'
444 445 if text:
445 446 return h.render(text, renderer=renderer, mentions=True)
446 447 return ''
447 448
448 449 @LoginRequired()
449 450 @CSRFRequired()
450 451 @view_config(
451 452 route_name='store_user_session_value', request_method='POST',
452 453 renderer='string', xhr=True)
453 454 def store_user_session_attr(self):
454 455 key = self.request.POST.get('key')
455 456 val = self.request.POST.get('val')
456 457
457 458 existing_value = self.request.session.get(key)
458 459 if existing_value != val:
459 460 self.request.session[key] = val
460 461
461 462 return 'stored:{}'.format(key)
@@ -1,476 +1,483 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 from rhodecode.apps._base import add_route_with_slash
21 21
22 22
23 23 def includeme(config):
24 24
25 25 # repo creating checks, special cases that aren't repo routes
26 26 config.add_route(
27 27 name='repo_creating',
28 28 pattern='/{repo_name:.*?[^/]}/repo_creating')
29 29
30 30 config.add_route(
31 31 name='repo_creating_check',
32 32 pattern='/{repo_name:.*?[^/]}/repo_creating_check')
33 33
34 34 # Summary
35 35 # NOTE(marcink): one additional route is defined in very bottom, catch
36 36 # all pattern
37 37 config.add_route(
38 38 name='repo_summary_explicit',
39 39 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
40 40 config.add_route(
41 41 name='repo_summary_commits',
42 42 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
43 43
44 44 # Commits
45 45 config.add_route(
46 46 name='repo_commit',
47 47 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True)
48 48
49 49 config.add_route(
50 50 name='repo_commit_children',
51 51 pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True)
52 52
53 53 config.add_route(
54 54 name='repo_commit_parents',
55 55 pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True)
56 56
57 57 config.add_route(
58 58 name='repo_commit_raw',
59 59 pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True)
60 60
61 61 config.add_route(
62 62 name='repo_commit_patch',
63 63 pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True)
64 64
65 65 config.add_route(
66 66 name='repo_commit_download',
67 67 pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True)
68 68
69 69 config.add_route(
70 70 name='repo_commit_data',
71 71 pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True)
72 72
73 73 config.add_route(
74 74 name='repo_commit_comment_create',
75 75 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True)
76 76
77 77 config.add_route(
78 78 name='repo_commit_comment_preview',
79 79 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True)
80 80
81 81 config.add_route(
82 82 name='repo_commit_comment_delete',
83 83 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True)
84 84
85 85 # still working url for backward compat.
86 86 config.add_route(
87 87 name='repo_commit_raw_deprecated',
88 88 pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True)
89 89
90 90 # Files
91 91 config.add_route(
92 92 name='repo_archivefile',
93 93 pattern='/{repo_name:.*?[^/]}/archive/{fname}', repo_route=True)
94 94
95 95 config.add_route(
96 96 name='repo_files_diff',
97 97 pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True)
98 98 config.add_route( # legacy route to make old links work
99 99 name='repo_files_diff_2way_redirect',
100 100 pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True)
101 101
102 102 config.add_route(
103 103 name='repo_files',
104 104 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True)
105 105 config.add_route(
106 106 name='repo_files:default_path',
107 107 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True)
108 108 config.add_route(
109 109 name='repo_files:default_commit',
110 110 pattern='/{repo_name:.*?[^/]}/files', repo_route=True)
111 111
112 112 config.add_route(
113 113 name='repo_files:rendered',
114 114 pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True)
115 115
116 116 config.add_route(
117 117 name='repo_files:annotated',
118 118 pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True)
119 119 config.add_route(
120 120 name='repo_files:annotated_previous',
121 121 pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True)
122 122
123 123 config.add_route(
124 124 name='repo_nodetree_full',
125 125 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True)
126 126 config.add_route(
127 127 name='repo_nodetree_full:default_path',
128 128 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True)
129 129
130 130 config.add_route(
131 131 name='repo_files_nodelist',
132 132 pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True)
133 133
134 134 config.add_route(
135 135 name='repo_file_raw',
136 136 pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True)
137 137
138 138 config.add_route(
139 139 name='repo_file_download',
140 140 pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True)
141 141 config.add_route( # backward compat to keep old links working
142 142 name='repo_file_download:legacy',
143 143 pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}',
144 144 repo_route=True)
145 145
146 146 config.add_route(
147 147 name='repo_file_history',
148 148 pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True)
149 149
150 150 config.add_route(
151 151 name='repo_file_authors',
152 152 pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True)
153 153
154 154 config.add_route(
155 155 name='repo_files_remove_file',
156 156 pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}',
157 157 repo_route=True)
158 158 config.add_route(
159 159 name='repo_files_delete_file',
160 160 pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}',
161 161 repo_route=True)
162 162 config.add_route(
163 163 name='repo_files_edit_file',
164 164 pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}',
165 165 repo_route=True)
166 166 config.add_route(
167 167 name='repo_files_update_file',
168 168 pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}',
169 169 repo_route=True)
170 170 config.add_route(
171 171 name='repo_files_add_file',
172 172 pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}',
173 173 repo_route=True)
174 174 config.add_route(
175 175 name='repo_files_create_file',
176 176 pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}',
177 177 repo_route=True)
178 178
179 179 # Refs data
180 180 config.add_route(
181 181 name='repo_refs_data',
182 182 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
183 183
184 184 config.add_route(
185 185 name='repo_refs_changelog_data',
186 186 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
187 187
188 188 config.add_route(
189 189 name='repo_stats',
190 190 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
191 191
192 192 # Changelog
193 193 config.add_route(
194 194 name='repo_changelog',
195 195 pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True)
196 196 config.add_route(
197 197 name='repo_changelog_file',
198 198 pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True)
199 199 config.add_route(
200 200 name='repo_changelog_elements',
201 201 pattern='/{repo_name:.*?[^/]}/changelog_elements', repo_route=True)
202 202 config.add_route(
203 203 name='repo_changelog_elements_file',
204 204 pattern='/{repo_name:.*?[^/]}/changelog_elements/{commit_id}/{f_path:.*}', repo_route=True)
205 205
206 206 # Compare
207 207 config.add_route(
208 208 name='repo_compare_select',
209 209 pattern='/{repo_name:.*?[^/]}/compare', repo_route=True)
210 210
211 211 config.add_route(
212 212 name='repo_compare',
213 213 pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True)
214 214
215 215 # Tags
216 216 config.add_route(
217 217 name='tags_home',
218 218 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
219 219
220 220 # Branches
221 221 config.add_route(
222 222 name='branches_home',
223 223 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
224 224
225 225 # Bookmarks
226 226 config.add_route(
227 227 name='bookmarks_home',
228 228 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
229 229
230 230 # Forks
231 231 config.add_route(
232 232 name='repo_fork_new',
233 233 pattern='/{repo_name:.*?[^/]}/fork', repo_route=True,
234 repo_forbid_when_archived=True,
234 235 repo_accepted_types=['hg', 'git'])
235 236
236 237 config.add_route(
237 238 name='repo_fork_create',
238 239 pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True,
240 repo_forbid_when_archived=True,
239 241 repo_accepted_types=['hg', 'git'])
240 242
241 243 config.add_route(
242 244 name='repo_forks_show_all',
243 245 pattern='/{repo_name:.*?[^/]}/forks', repo_route=True,
244 246 repo_accepted_types=['hg', 'git'])
245 247 config.add_route(
246 248 name='repo_forks_data',
247 249 pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True,
248 250 repo_accepted_types=['hg', 'git'])
249 251
250 252 # Pull Requests
251 253 config.add_route(
252 254 name='pullrequest_show',
253 255 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}',
254 256 repo_route=True)
255 257
256 258 config.add_route(
257 259 name='pullrequest_show_all',
258 260 pattern='/{repo_name:.*?[^/]}/pull-request',
259 261 repo_route=True, repo_accepted_types=['hg', 'git'])
260 262
261 263 config.add_route(
262 264 name='pullrequest_show_all_data',
263 265 pattern='/{repo_name:.*?[^/]}/pull-request-data',
264 266 repo_route=True, repo_accepted_types=['hg', 'git'])
265 267
266 268 config.add_route(
267 269 name='pullrequest_repo_refs',
268 270 pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}',
269 271 repo_route=True)
270 272
271 273 config.add_route(
272 274 name='pullrequest_repo_destinations',
273 275 pattern='/{repo_name:.*?[^/]}/pull-request/repo-destinations',
274 276 repo_route=True)
275 277
276 278 config.add_route(
277 279 name='pullrequest_new',
278 280 pattern='/{repo_name:.*?[^/]}/pull-request/new',
279 repo_route=True, repo_accepted_types=['hg', 'git'])
281 repo_route=True, repo_accepted_types=['hg', 'git'],
282 repo_forbid_when_archived=True)
280 283
281 284 config.add_route(
282 285 name='pullrequest_create',
283 286 pattern='/{repo_name:.*?[^/]}/pull-request/create',
284 repo_route=True, repo_accepted_types=['hg', 'git'])
287 repo_route=True, repo_accepted_types=['hg', 'git'],
288 repo_forbid_when_archived=True)
285 289
286 290 config.add_route(
287 291 name='pullrequest_update',
288 292 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update',
289 repo_route=True)
293 repo_route=True, repo_forbid_when_archived=True)
290 294
291 295 config.add_route(
292 296 name='pullrequest_merge',
293 297 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge',
294 repo_route=True)
298 repo_route=True, repo_forbid_when_archived=True)
295 299
296 300 config.add_route(
297 301 name='pullrequest_delete',
298 302 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete',
299 repo_route=True)
303 repo_route=True, repo_forbid_when_archived=True)
300 304
301 305 config.add_route(
302 306 name='pullrequest_comment_create',
303 307 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment',
304 308 repo_route=True)
305 309
306 310 config.add_route(
307 311 name='pullrequest_comment_delete',
308 312 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete',
309 313 repo_route=True, repo_accepted_types=['hg', 'git'])
310 314
311 315 # Settings
312 316 config.add_route(
313 317 name='edit_repo',
314 318 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
315 319 # update is POST on edit_repo
316 320
317 321 # Settings advanced
318 322 config.add_route(
319 323 name='edit_repo_advanced',
320 324 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
321 325 config.add_route(
326 name='edit_repo_advanced_archive',
327 pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True)
328 config.add_route(
322 329 name='edit_repo_advanced_delete',
323 330 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
324 331 config.add_route(
325 332 name='edit_repo_advanced_locking',
326 333 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
327 334 config.add_route(
328 335 name='edit_repo_advanced_journal',
329 336 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
330 337 config.add_route(
331 338 name='edit_repo_advanced_fork',
332 339 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
333 340
334 341 config.add_route(
335 342 name='edit_repo_advanced_hooks',
336 343 pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True)
337 344
338 345 # Caches
339 346 config.add_route(
340 347 name='edit_repo_caches',
341 348 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
342 349
343 350 # Permissions
344 351 config.add_route(
345 352 name='edit_repo_perms',
346 353 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
347 354
348 355 # Permissions Branch (EE feature)
349 356 config.add_route(
350 357 name='edit_repo_perms_branch',
351 358 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True)
352 359 config.add_route(
353 360 name='edit_repo_perms_branch_delete',
354 361 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete',
355 362 repo_route=True)
356 363
357 364 # Maintenance
358 365 config.add_route(
359 366 name='edit_repo_maintenance',
360 367 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
361 368
362 369 config.add_route(
363 370 name='edit_repo_maintenance_execute',
364 371 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
365 372
366 373 # Fields
367 374 config.add_route(
368 375 name='edit_repo_fields',
369 376 pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True)
370 377 config.add_route(
371 378 name='edit_repo_fields_create',
372 379 pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True)
373 380 config.add_route(
374 381 name='edit_repo_fields_delete',
375 382 pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True)
376 383
377 384 # Locking
378 385 config.add_route(
379 386 name='repo_edit_toggle_locking',
380 387 pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True)
381 388
382 389 # Remote
383 390 config.add_route(
384 391 name='edit_repo_remote',
385 392 pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True)
386 393 config.add_route(
387 394 name='edit_repo_remote_pull',
388 395 pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True)
389 396 config.add_route(
390 397 name='edit_repo_remote_push',
391 398 pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True)
392 399
393 400 # Statistics
394 401 config.add_route(
395 402 name='edit_repo_statistics',
396 403 pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True)
397 404 config.add_route(
398 405 name='edit_repo_statistics_reset',
399 406 pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True)
400 407
401 408 # Issue trackers
402 409 config.add_route(
403 410 name='edit_repo_issuetracker',
404 411 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True)
405 412 config.add_route(
406 413 name='edit_repo_issuetracker_test',
407 414 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True)
408 415 config.add_route(
409 416 name='edit_repo_issuetracker_delete',
410 417 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True)
411 418 config.add_route(
412 419 name='edit_repo_issuetracker_update',
413 420 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True)
414 421
415 422 # VCS Settings
416 423 config.add_route(
417 424 name='edit_repo_vcs',
418 425 pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True)
419 426 config.add_route(
420 427 name='edit_repo_vcs_update',
421 428 pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True)
422 429
423 430 # svn pattern
424 431 config.add_route(
425 432 name='edit_repo_vcs_svn_pattern_delete',
426 433 pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True)
427 434
428 435 # Repo Review Rules (EE feature)
429 436 config.add_route(
430 437 name='repo_reviewers',
431 438 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
432 439
433 440 config.add_route(
434 441 name='repo_default_reviewers_data',
435 442 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
436 443
437 444 # Repo Automation (EE feature)
438 445 config.add_route(
439 446 name='repo_automation',
440 447 pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True)
441 448
442 449 # Strip
443 450 config.add_route(
444 451 name='edit_repo_strip',
445 452 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
446 453
447 454 config.add_route(
448 455 name='strip_check',
449 456 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
450 457
451 458 config.add_route(
452 459 name='strip_execute',
453 460 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
454 461
455 462 # Audit logs
456 463 config.add_route(
457 464 name='edit_repo_audit_logs',
458 465 pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True)
459 466
460 467 # ATOM/RSS Feed
461 468 config.add_route(
462 469 name='rss_feed_home',
463 470 pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True)
464 471
465 472 config.add_route(
466 473 name='atom_feed_home',
467 474 pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True)
468 475
469 476 # NOTE(marcink): needs to be at the end for catch-all
470 477 add_route_with_slash(
471 478 config,
472 479 name='repo_summary',
473 480 pattern='/{repo_name:.*?[^/]}', repo_route=True)
474 481
475 482 # Scan module for configuration decorators.
476 483 config.scan('.views', ignore='.tests')
@@ -1,315 +1,336 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.tests import TestController, assert_session_flash, HG_FORK, GIT_FORK
24 24
25 25 from rhodecode.tests.fixture import Fixture
26 26 from rhodecode.lib import helpers as h
27 27
28 28 from rhodecode.model.db import Repository
29 29 from rhodecode.model.repo import RepoModel
30 30 from rhodecode.model.user import UserModel
31 31 from rhodecode.model.meta import Session
32 32
33 33 fixture = Fixture()
34 34
35 35
36 36 def route_path(name, params=None, **kwargs):
37 37 import urllib
38 38
39 39 base_url = {
40 40 'repo_summary': '/{repo_name}',
41 41 'repo_creating_check': '/{repo_name}/repo_creating_check',
42 42 'repo_fork_new': '/{repo_name}/fork',
43 43 'repo_fork_create': '/{repo_name}/fork/create',
44 44 'repo_forks_show_all': '/{repo_name}/forks',
45 45 'repo_forks_data': '/{repo_name}/forks/data',
46 46 }[name].format(**kwargs)
47 47
48 48 if params:
49 49 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
50 50 return base_url
51 51
52 52
53 53 FORK_NAME = {
54 54 'hg': HG_FORK,
55 55 'git': GIT_FORK
56 56 }
57 57
58 58
59 59 @pytest.mark.skip_backends('svn')
60 60 class TestRepoForkViewTests(TestController):
61 61
62 62 def test_show_forks(self, backend, xhr_header):
63 63 self.log_user()
64 64 response = self.app.get(
65 65 route_path('repo_forks_data', repo_name=backend.repo_name),
66 66 extra_environ=xhr_header)
67 67
68 68 assert response.json == {u'data': [], u'draw': None,
69 69 u'recordsFiltered': 0, u'recordsTotal': 0}
70 70
71 71 def test_no_permissions_to_fork_page(self, backend, user_util):
72 72 user = user_util.create_user(password='qweqwe')
73 73 user_id = user.user_id
74 74 self.log_user(user.username, 'qweqwe')
75 75
76 76 user_model = UserModel()
77 77 user_model.revoke_perm(user_id, 'hg.fork.repository')
78 78 user_model.grant_perm(user_id, 'hg.fork.none')
79 79 u = UserModel().get(user_id)
80 80 u.inherit_default_permissions = False
81 81 Session().commit()
82 82 # try create a fork
83 83 self.app.get(
84 84 route_path('repo_fork_new', repo_name=backend.repo_name),
85 85 status=404)
86 86
87 87 def test_no_permissions_to_fork_submit(self, backend, csrf_token, user_util):
88 88 user = user_util.create_user(password='qweqwe')
89 89 user_id = user.user_id
90 90 self.log_user(user.username, 'qweqwe')
91 91
92 92 user_model = UserModel()
93 93 user_model.revoke_perm(user_id, 'hg.fork.repository')
94 94 user_model.grant_perm(user_id, 'hg.fork.none')
95 95 u = UserModel().get(user_id)
96 96 u.inherit_default_permissions = False
97 97 Session().commit()
98 98 # try create a fork
99 99 self.app.post(
100 100 route_path('repo_fork_create', repo_name=backend.repo_name),
101 101 {'csrf_token': csrf_token},
102 102 status=404)
103 103
104 104 def test_fork_missing_data(self, autologin_user, backend, csrf_token):
105 105 # try create a fork
106 106 response = self.app.post(
107 107 route_path('repo_fork_create', repo_name=backend.repo_name),
108 108 {'csrf_token': csrf_token},
109 109 status=200)
110 110 # test if html fill works fine
111 111 response.mustcontain('Missing value')
112 112
113 113 def test_create_fork_page(self, autologin_user, backend):
114 114 self.app.get(
115 115 route_path('repo_fork_new', repo_name=backend.repo_name),
116 116 status=200)
117 117
118 118 def test_create_and_show_fork(
119 119 self, autologin_user, backend, csrf_token, xhr_header):
120 120
121 121 # create a fork
122 122 fork_name = FORK_NAME[backend.alias]
123 123 description = 'fork of vcs test'
124 124 repo_name = backend.repo_name
125 125 source_repo = Repository.get_by_repo_name(repo_name)
126 126 creation_args = {
127 127 'repo_name': fork_name,
128 128 'repo_group': '',
129 129 'fork_parent_id': source_repo.repo_id,
130 130 'repo_type': backend.alias,
131 131 'description': description,
132 132 'private': 'False',
133 133 'landing_rev': 'rev:tip',
134 134 'csrf_token': csrf_token,
135 135 }
136 136
137 137 self.app.post(
138 138 route_path('repo_fork_create', repo_name=repo_name), creation_args)
139 139
140 140 response = self.app.get(
141 141 route_path('repo_forks_data', repo_name=repo_name),
142 142 extra_environ=xhr_header)
143 143
144 144 assert response.json['data'][0]['fork_name'] == \
145 145 """<a href="/%s">%s</a>""" % (fork_name, fork_name)
146 146
147 147 # remove this fork
148 148 fixture.destroy_repo(fork_name)
149 149
150 150 def test_fork_create(self, autologin_user, backend, csrf_token):
151 151 fork_name = FORK_NAME[backend.alias]
152 152 description = 'fork of vcs test'
153 153 repo_name = backend.repo_name
154 154 source_repo = Repository.get_by_repo_name(repo_name)
155 155 creation_args = {
156 156 'repo_name': fork_name,
157 157 'repo_group': '',
158 158 'fork_parent_id': source_repo.repo_id,
159 159 'repo_type': backend.alias,
160 160 'description': description,
161 161 'private': 'False',
162 162 'landing_rev': 'rev:tip',
163 163 'csrf_token': csrf_token,
164 164 }
165 165 self.app.post(
166 166 route_path('repo_fork_create', repo_name=repo_name), creation_args)
167 167 repo = Repository.get_by_repo_name(FORK_NAME[backend.alias])
168 168 assert repo.fork.repo_name == backend.repo_name
169 169
170 170 # run the check page that triggers the flash message
171 171 response = self.app.get(
172 172 route_path('repo_creating_check', repo_name=fork_name))
173 173 # test if we have a message that fork is ok
174 174 assert_session_flash(response,
175 175 'Forked repository %s as <a href="/%s">%s</a>'
176 176 % (repo_name, fork_name, fork_name))
177 177
178 178 # test if the fork was created in the database
179 179 fork_repo = Session().query(Repository)\
180 180 .filter(Repository.repo_name == fork_name).one()
181 181
182 182 assert fork_repo.repo_name == fork_name
183 183 assert fork_repo.fork.repo_name == repo_name
184 184
185 185 # test if the repository is visible in the list ?
186 186 response = self.app.get(
187 187 h.route_path('repo_summary', repo_name=fork_name))
188 188 response.mustcontain(fork_name)
189 189 response.mustcontain(backend.alias)
190 190 response.mustcontain('Fork of')
191 191 response.mustcontain('<a href="/%s">%s</a>' % (repo_name, repo_name))
192 192
193 193 def test_fork_create_into_group(self, autologin_user, backend, csrf_token):
194 194 group = fixture.create_repo_group('vc')
195 195 group_id = group.group_id
196 196 fork_name = FORK_NAME[backend.alias]
197 197 fork_name_full = 'vc/%s' % fork_name
198 198 description = 'fork of vcs test'
199 199 repo_name = backend.repo_name
200 200 source_repo = Repository.get_by_repo_name(repo_name)
201 201 creation_args = {
202 202 'repo_name': fork_name,
203 203 'repo_group': group_id,
204 204 'fork_parent_id': source_repo.repo_id,
205 205 'repo_type': backend.alias,
206 206 'description': description,
207 207 'private': 'False',
208 208 'landing_rev': 'rev:tip',
209 209 'csrf_token': csrf_token,
210 210 }
211 211 self.app.post(
212 212 route_path('repo_fork_create', repo_name=repo_name), creation_args)
213 213 repo = Repository.get_by_repo_name(fork_name_full)
214 214 assert repo.fork.repo_name == backend.repo_name
215 215
216 216 # run the check page that triggers the flash message
217 217 response = self.app.get(
218 218 route_path('repo_creating_check', repo_name=fork_name_full))
219 219 # test if we have a message that fork is ok
220 220 assert_session_flash(response,
221 221 'Forked repository %s as <a href="/%s">%s</a>'
222 222 % (repo_name, fork_name_full, fork_name_full))
223 223
224 224 # test if the fork was created in the database
225 225 fork_repo = Session().query(Repository)\
226 226 .filter(Repository.repo_name == fork_name_full).one()
227 227
228 228 assert fork_repo.repo_name == fork_name_full
229 229 assert fork_repo.fork.repo_name == repo_name
230 230
231 231 # test if the repository is visible in the list ?
232 232 response = self.app.get(
233 233 h.route_path('repo_summary', repo_name=fork_name_full))
234 234 response.mustcontain(fork_name_full)
235 235 response.mustcontain(backend.alias)
236 236
237 237 response.mustcontain('Fork of')
238 238 response.mustcontain('<a href="/%s">%s</a>' % (repo_name, repo_name))
239 239
240 240 fixture.destroy_repo(fork_name_full)
241 241 fixture.destroy_repo_group(group_id)
242 242
243 243 def test_fork_read_permission(self, backend, xhr_header, user_util):
244 244 user = user_util.create_user(password='qweqwe')
245 245 user_id = user.user_id
246 246 self.log_user(user.username, 'qweqwe')
247 247
248 248 # create a fake fork
249 249 fork = user_util.create_repo(repo_type=backend.alias)
250 250 source = user_util.create_repo(repo_type=backend.alias)
251 251 repo_name = source.repo_name
252 252
253 253 fork.fork_id = source.repo_id
254 254 fork_name = fork.repo_name
255 255 Session().commit()
256 256
257 257 forks = Repository.query()\
258 258 .filter(Repository.repo_type == backend.alias)\
259 259 .filter(Repository.fork_id == source.repo_id).all()
260 260 assert 1 == len(forks)
261 261
262 262 # set read permissions for this
263 263 RepoModel().grant_user_permission(
264 264 repo=forks[0], user=user_id, perm='repository.read')
265 265 Session().commit()
266 266
267 267 response = self.app.get(
268 268 route_path('repo_forks_data', repo_name=repo_name),
269 269 extra_environ=xhr_header)
270 270
271 271 assert response.json['data'][0]['fork_name'] == \
272 272 """<a href="/%s">%s</a>""" % (fork_name, fork_name)
273 273
274 274 def test_fork_none_permission(self, backend, xhr_header, user_util):
275 275 user = user_util.create_user(password='qweqwe')
276 276 user_id = user.user_id
277 277 self.log_user(user.username, 'qweqwe')
278 278
279 279 # create a fake fork
280 280 fork = user_util.create_repo(repo_type=backend.alias)
281 281 source = user_util.create_repo(repo_type=backend.alias)
282 282 repo_name = source.repo_name
283 283
284 284 fork.fork_id = source.repo_id
285 285
286 286 Session().commit()
287 287
288 288 forks = Repository.query()\
289 289 .filter(Repository.repo_type == backend.alias)\
290 290 .filter(Repository.fork_id == source.repo_id).all()
291 291 assert 1 == len(forks)
292 292
293 293 # set none
294 294 RepoModel().grant_user_permission(
295 295 repo=forks[0], user=user_id, perm='repository.none')
296 296 Session().commit()
297 297
298 298 # fork shouldn't be there
299 299 response = self.app.get(
300 300 route_path('repo_forks_data', repo_name=repo_name),
301 301 extra_environ=xhr_header)
302 302
303 303 assert response.json == {u'data': [], u'draw': None,
304 304 u'recordsFiltered': 0, u'recordsTotal': 0}
305 305
306 @pytest.mark.parametrize('url_type', [
307 'repo_fork_new',
308 'repo_fork_create'
309 ])
310 def test_fork_is_forbidden_on_archived_repo(self, backend, xhr_header, user_util, url_type):
311 user = user_util.create_user(password='qweqwe')
312 self.log_user(user.username, 'qweqwe')
313
314 # create a temporary repo
315 source = user_util.create_repo(repo_type=backend.alias)
316 repo_name = source.repo_name
317 repo = Repository.get_by_repo_name(repo_name)
318 repo.archived = True
319 Session().commit()
320
321 response = self.app.get(
322 route_path(url_type, repo_name=repo_name), status=302)
323
324 msg = 'Action not supported for archived repository.'
325 assert_session_flash(response, msg)
326
306 327
307 328 class TestSVNFork(TestController):
308 329 @pytest.mark.parametrize('route_name', [
309 330 'repo_fork_create', 'repo_fork_new'
310 331 ])
311 332 def test_fork_redirects(self, autologin_user, backend_svn, route_name):
312 333
313 334 self.app.get(route_path(
314 335 route_name, repo_name=backend_svn.repo_name),
315 336 status=404)
@@ -1,1206 +1,1228 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35 from rhodecode.tests.utils import AssertResponse
36 36
37 37
38 38 def route_path(name, params=None, **kwargs):
39 39 import urllib
40 40
41 41 base_url = {
42 42 'repo_changelog': '/{repo_name}/changelog',
43 43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 56 }[name].format(**kwargs)
57 57
58 58 if params:
59 59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 60 return base_url
61 61
62 62
63 63 @pytest.mark.usefixtures('app', 'autologin_user')
64 64 @pytest.mark.backends("git", "hg")
65 65 class TestPullrequestsView(object):
66 66
67 67 def test_index(self, backend):
68 68 self.app.get(route_path(
69 69 'pullrequest_new',
70 70 repo_name=backend.repo_name))
71 71
72 72 def test_option_menu_create_pull_request_exists(self, backend):
73 73 repo_name = backend.repo_name
74 74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 75
76 76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 77 'pullrequest_new', repo_name=repo_name)
78 78 response.mustcontain(create_pr_link)
79 79
80 80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 81 repo = backend.repo
82 82
83 83 self.app.get(
84 84 route_path('pullrequest_new',
85 85 repo_name=repo.repo_name,
86 86 commit=repo.get_commit().raw_id),
87 87 status=200)
88 88
89 89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 90 def test_show(self, pr_util, pr_merge_enabled):
91 91 pull_request = pr_util.create_pull_request(
92 92 mergeable=pr_merge_enabled, enable_notifications=False)
93 93
94 94 response = self.app.get(route_path(
95 95 'pullrequest_show',
96 96 repo_name=pull_request.target_repo.scm_instance().name,
97 97 pull_request_id=pull_request.pull_request_id))
98 98
99 99 for commit_id in pull_request.revisions:
100 100 response.mustcontain(commit_id)
101 101
102 102 assert pull_request.target_ref_parts.type in response
103 103 assert pull_request.target_ref_parts.name in response
104 104 target_clone_url = pull_request.target_repo.clone_url()
105 105 assert target_clone_url in response
106 106
107 107 assert 'class="pull-request-merge"' in response
108 108 assert (
109 109 'Server-side pull request merging is disabled.'
110 110 in response) != pr_merge_enabled
111 111
112 112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
113 113 # Logout
114 114 response = self.app.post(
115 115 h.route_path('logout'),
116 116 params={'csrf_token': csrf_token})
117 117 # Login as regular user
118 118 response = self.app.post(h.route_path('login'),
119 119 {'username': TEST_USER_REGULAR_LOGIN,
120 120 'password': 'test12'})
121 121
122 122 pull_request = pr_util.create_pull_request(
123 123 author=TEST_USER_REGULAR_LOGIN)
124 124
125 125 response = self.app.get(route_path(
126 126 'pullrequest_show',
127 127 repo_name=pull_request.target_repo.scm_instance().name,
128 128 pull_request_id=pull_request.pull_request_id))
129 129
130 130 response.mustcontain('Server-side pull request merging is disabled.')
131 131
132 132 assert_response = response.assert_response()
133 133 # for regular user without a merge permissions, we don't see it
134 134 assert_response.no_element_exists('#close-pull-request-action')
135 135
136 136 user_util.grant_user_permission_to_repo(
137 137 pull_request.target_repo,
138 138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
139 139 'repository.write')
140 140 response = self.app.get(route_path(
141 141 'pullrequest_show',
142 142 repo_name=pull_request.target_repo.scm_instance().name,
143 143 pull_request_id=pull_request.pull_request_id))
144 144
145 145 response.mustcontain('Server-side pull request merging is disabled.')
146 146
147 147 assert_response = response.assert_response()
148 148 # now regular user has a merge permissions, we have CLOSE button
149 149 assert_response.one_element_exists('#close-pull-request-action')
150 150
151 151 def test_show_invalid_commit_id(self, pr_util):
152 152 # Simulating invalid revisions which will cause a lookup error
153 153 pull_request = pr_util.create_pull_request()
154 154 pull_request.revisions = ['invalid']
155 155 Session().add(pull_request)
156 156 Session().commit()
157 157
158 158 response = self.app.get(route_path(
159 159 'pullrequest_show',
160 160 repo_name=pull_request.target_repo.scm_instance().name,
161 161 pull_request_id=pull_request.pull_request_id))
162 162
163 163 for commit_id in pull_request.revisions:
164 164 response.mustcontain(commit_id)
165 165
166 166 def test_show_invalid_source_reference(self, pr_util):
167 167 pull_request = pr_util.create_pull_request()
168 168 pull_request.source_ref = 'branch:b:invalid'
169 169 Session().add(pull_request)
170 170 Session().commit()
171 171
172 172 self.app.get(route_path(
173 173 'pullrequest_show',
174 174 repo_name=pull_request.target_repo.scm_instance().name,
175 175 pull_request_id=pull_request.pull_request_id))
176 176
177 177 def test_edit_title_description(self, pr_util, csrf_token):
178 178 pull_request = pr_util.create_pull_request()
179 179 pull_request_id = pull_request.pull_request_id
180 180
181 181 response = self.app.post(
182 182 route_path('pullrequest_update',
183 183 repo_name=pull_request.target_repo.repo_name,
184 184 pull_request_id=pull_request_id),
185 185 params={
186 186 'edit_pull_request': 'true',
187 187 'title': 'New title',
188 188 'description': 'New description',
189 189 'csrf_token': csrf_token})
190 190
191 191 assert_session_flash(
192 192 response, u'Pull request title & description updated.',
193 193 category='success')
194 194
195 195 pull_request = PullRequest.get(pull_request_id)
196 196 assert pull_request.title == 'New title'
197 197 assert pull_request.description == 'New description'
198 198
199 199 def test_edit_title_description_closed(self, pr_util, csrf_token):
200 200 pull_request = pr_util.create_pull_request()
201 201 pull_request_id = pull_request.pull_request_id
202 202 repo_name = pull_request.target_repo.repo_name
203 203 pr_util.close()
204 204
205 205 response = self.app.post(
206 206 route_path('pullrequest_update',
207 207 repo_name=repo_name, pull_request_id=pull_request_id),
208 208 params={
209 209 'edit_pull_request': 'true',
210 210 'title': 'New title',
211 211 'description': 'New description',
212 212 'csrf_token': csrf_token}, status=200)
213 213 assert_session_flash(
214 214 response, u'Cannot update closed pull requests.',
215 215 category='error')
216 216
217 217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
218 218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
219 219
220 220 pull_request = pr_util.create_pull_request()
221 221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
222 222 Session().add(pull_request)
223 223 Session().commit()
224 224
225 225 pull_request_id = pull_request.pull_request_id
226 226
227 227 response = self.app.post(
228 228 route_path('pullrequest_update',
229 229 repo_name=pull_request.target_repo.repo_name,
230 230 pull_request_id=pull_request_id),
231 231 params={'update_commits': 'true',
232 232 'csrf_token': csrf_token})
233 233
234 234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
235 235 UpdateFailureReason.MISSING_SOURCE_REF])
236 236 assert_session_flash(response, expected_msg, category='error')
237 237
238 238 def test_missing_target_reference(self, pr_util, csrf_token):
239 239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
240 240 pull_request = pr_util.create_pull_request(
241 241 approved=True, mergeable=True)
242 242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
243 243 Session().add(pull_request)
244 244 Session().commit()
245 245
246 246 pull_request_id = pull_request.pull_request_id
247 247 pull_request_url = route_path(
248 248 'pullrequest_show',
249 249 repo_name=pull_request.target_repo.repo_name,
250 250 pull_request_id=pull_request_id)
251 251
252 252 response = self.app.get(pull_request_url)
253 253
254 254 assertr = AssertResponse(response)
255 255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
256 256 MergeFailureReason.MISSING_TARGET_REF]
257 257 assertr.element_contains(
258 258 'span[data-role="merge-message"]', str(expected_msg))
259 259
260 260 def test_comment_and_close_pull_request_custom_message_approved(
261 261 self, pr_util, csrf_token, xhr_header):
262 262
263 263 pull_request = pr_util.create_pull_request(approved=True)
264 264 pull_request_id = pull_request.pull_request_id
265 265 author = pull_request.user_id
266 266 repo = pull_request.target_repo.repo_id
267 267
268 268 self.app.post(
269 269 route_path('pullrequest_comment_create',
270 270 repo_name=pull_request.target_repo.scm_instance().name,
271 271 pull_request_id=pull_request_id),
272 272 params={
273 273 'close_pull_request': '1',
274 274 'text': 'Closing a PR',
275 275 'csrf_token': csrf_token},
276 276 extra_environ=xhr_header,)
277 277
278 278 journal = UserLog.query()\
279 279 .filter(UserLog.user_id == author)\
280 280 .filter(UserLog.repository_id == repo) \
281 281 .order_by('user_log_id') \
282 282 .all()
283 283 assert journal[-1].action == 'repo.pull_request.close'
284 284
285 285 pull_request = PullRequest.get(pull_request_id)
286 286 assert pull_request.is_closed()
287 287
288 288 status = ChangesetStatusModel().get_status(
289 289 pull_request.source_repo, pull_request=pull_request)
290 290 assert status == ChangesetStatus.STATUS_APPROVED
291 291 comments = ChangesetComment().query() \
292 292 .filter(ChangesetComment.pull_request == pull_request) \
293 293 .order_by(ChangesetComment.comment_id.asc())\
294 294 .all()
295 295 assert comments[-1].text == 'Closing a PR'
296 296
297 297 def test_comment_force_close_pull_request_rejected(
298 298 self, pr_util, csrf_token, xhr_header):
299 299 pull_request = pr_util.create_pull_request()
300 300 pull_request_id = pull_request.pull_request_id
301 301 PullRequestModel().update_reviewers(
302 302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
303 303 pull_request.author)
304 304 author = pull_request.user_id
305 305 repo = pull_request.target_repo.repo_id
306 306
307 307 self.app.post(
308 308 route_path('pullrequest_comment_create',
309 309 repo_name=pull_request.target_repo.scm_instance().name,
310 310 pull_request_id=pull_request_id),
311 311 params={
312 312 'close_pull_request': '1',
313 313 'csrf_token': csrf_token},
314 314 extra_environ=xhr_header)
315 315
316 316 pull_request = PullRequest.get(pull_request_id)
317 317
318 318 journal = UserLog.query()\
319 319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
320 320 .order_by('user_log_id') \
321 321 .all()
322 322 assert journal[-1].action == 'repo.pull_request.close'
323 323
324 324 # check only the latest status, not the review status
325 325 status = ChangesetStatusModel().get_status(
326 326 pull_request.source_repo, pull_request=pull_request)
327 327 assert status == ChangesetStatus.STATUS_REJECTED
328 328
329 329 def test_comment_and_close_pull_request(
330 330 self, pr_util, csrf_token, xhr_header):
331 331 pull_request = pr_util.create_pull_request()
332 332 pull_request_id = pull_request.pull_request_id
333 333
334 334 response = self.app.post(
335 335 route_path('pullrequest_comment_create',
336 336 repo_name=pull_request.target_repo.scm_instance().name,
337 337 pull_request_id=pull_request.pull_request_id),
338 338 params={
339 339 'close_pull_request': 'true',
340 340 'csrf_token': csrf_token},
341 341 extra_environ=xhr_header)
342 342
343 343 assert response.json
344 344
345 345 pull_request = PullRequest.get(pull_request_id)
346 346 assert pull_request.is_closed()
347 347
348 348 # check only the latest status, not the review status
349 349 status = ChangesetStatusModel().get_status(
350 350 pull_request.source_repo, pull_request=pull_request)
351 351 assert status == ChangesetStatus.STATUS_REJECTED
352 352
353 353 def test_create_pull_request(self, backend, csrf_token):
354 354 commits = [
355 355 {'message': 'ancestor'},
356 356 {'message': 'change'},
357 357 {'message': 'change2'},
358 358 ]
359 359 commit_ids = backend.create_master_repo(commits)
360 360 target = backend.create_repo(heads=['ancestor'])
361 361 source = backend.create_repo(heads=['change2'])
362 362
363 363 response = self.app.post(
364 364 route_path('pullrequest_create', repo_name=source.repo_name),
365 365 [
366 366 ('source_repo', source.repo_name),
367 367 ('source_ref', 'branch:default:' + commit_ids['change2']),
368 368 ('target_repo', target.repo_name),
369 369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
370 370 ('common_ancestor', commit_ids['ancestor']),
371 371 ('pullrequest_title', 'Title'),
372 372 ('pullrequest_desc', 'Description'),
373 373 ('description_renderer', 'markdown'),
374 374 ('__start__', 'review_members:sequence'),
375 375 ('__start__', 'reviewer:mapping'),
376 376 ('user_id', '1'),
377 377 ('__start__', 'reasons:sequence'),
378 378 ('reason', 'Some reason'),
379 379 ('__end__', 'reasons:sequence'),
380 380 ('__start__', 'rules:sequence'),
381 381 ('__end__', 'rules:sequence'),
382 382 ('mandatory', 'False'),
383 383 ('__end__', 'reviewer:mapping'),
384 384 ('__end__', 'review_members:sequence'),
385 385 ('__start__', 'revisions:sequence'),
386 386 ('revisions', commit_ids['change']),
387 387 ('revisions', commit_ids['change2']),
388 388 ('__end__', 'revisions:sequence'),
389 389 ('user', ''),
390 390 ('csrf_token', csrf_token),
391 391 ],
392 392 status=302)
393 393
394 394 location = response.headers['Location']
395 395 pull_request_id = location.rsplit('/', 1)[1]
396 396 assert pull_request_id != 'new'
397 397 pull_request = PullRequest.get(int(pull_request_id))
398 398
399 399 # check that we have now both revisions
400 400 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
401 401 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
402 402 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
403 403 assert pull_request.target_ref == expected_target_ref
404 404
405 405 def test_reviewer_notifications(self, backend, csrf_token):
406 406 # We have to use the app.post for this test so it will create the
407 407 # notifications properly with the new PR
408 408 commits = [
409 409 {'message': 'ancestor',
410 410 'added': [FileNode('file_A', content='content_of_ancestor')]},
411 411 {'message': 'change',
412 412 'added': [FileNode('file_a', content='content_of_change')]},
413 413 {'message': 'change-child'},
414 414 {'message': 'ancestor-child', 'parents': ['ancestor'],
415 415 'added': [
416 416 FileNode('file_B', content='content_of_ancestor_child')]},
417 417 {'message': 'ancestor-child-2'},
418 418 ]
419 419 commit_ids = backend.create_master_repo(commits)
420 420 target = backend.create_repo(heads=['ancestor-child'])
421 421 source = backend.create_repo(heads=['change'])
422 422
423 423 response = self.app.post(
424 424 route_path('pullrequest_create', repo_name=source.repo_name),
425 425 [
426 426 ('source_repo', source.repo_name),
427 427 ('source_ref', 'branch:default:' + commit_ids['change']),
428 428 ('target_repo', target.repo_name),
429 429 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
430 430 ('common_ancestor', commit_ids['ancestor']),
431 431 ('pullrequest_title', 'Title'),
432 432 ('pullrequest_desc', 'Description'),
433 433 ('description_renderer', 'markdown'),
434 434 ('__start__', 'review_members:sequence'),
435 435 ('__start__', 'reviewer:mapping'),
436 436 ('user_id', '2'),
437 437 ('__start__', 'reasons:sequence'),
438 438 ('reason', 'Some reason'),
439 439 ('__end__', 'reasons:sequence'),
440 440 ('__start__', 'rules:sequence'),
441 441 ('__end__', 'rules:sequence'),
442 442 ('mandatory', 'False'),
443 443 ('__end__', 'reviewer:mapping'),
444 444 ('__end__', 'review_members:sequence'),
445 445 ('__start__', 'revisions:sequence'),
446 446 ('revisions', commit_ids['change']),
447 447 ('__end__', 'revisions:sequence'),
448 448 ('user', ''),
449 449 ('csrf_token', csrf_token),
450 450 ],
451 451 status=302)
452 452
453 453 location = response.headers['Location']
454 454
455 455 pull_request_id = location.rsplit('/', 1)[1]
456 456 assert pull_request_id != 'new'
457 457 pull_request = PullRequest.get(int(pull_request_id))
458 458
459 459 # Check that a notification was made
460 460 notifications = Notification.query()\
461 461 .filter(Notification.created_by == pull_request.author.user_id,
462 462 Notification.type_ == Notification.TYPE_PULL_REQUEST,
463 463 Notification.subject.contains(
464 464 "wants you to review pull request #%s" % pull_request_id))
465 465 assert len(notifications.all()) == 1
466 466
467 467 # Change reviewers and check that a notification was made
468 468 PullRequestModel().update_reviewers(
469 469 pull_request.pull_request_id, [(1, [], False, [])],
470 470 pull_request.author)
471 471 assert len(notifications.all()) == 2
472 472
473 473 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
474 474 csrf_token):
475 475 commits = [
476 476 {'message': 'ancestor',
477 477 'added': [FileNode('file_A', content='content_of_ancestor')]},
478 478 {'message': 'change',
479 479 'added': [FileNode('file_a', content='content_of_change')]},
480 480 {'message': 'change-child'},
481 481 {'message': 'ancestor-child', 'parents': ['ancestor'],
482 482 'added': [
483 483 FileNode('file_B', content='content_of_ancestor_child')]},
484 484 {'message': 'ancestor-child-2'},
485 485 ]
486 486 commit_ids = backend.create_master_repo(commits)
487 487 target = backend.create_repo(heads=['ancestor-child'])
488 488 source = backend.create_repo(heads=['change'])
489 489
490 490 response = self.app.post(
491 491 route_path('pullrequest_create', repo_name=source.repo_name),
492 492 [
493 493 ('source_repo', source.repo_name),
494 494 ('source_ref', 'branch:default:' + commit_ids['change']),
495 495 ('target_repo', target.repo_name),
496 496 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
497 497 ('common_ancestor', commit_ids['ancestor']),
498 498 ('pullrequest_title', 'Title'),
499 499 ('pullrequest_desc', 'Description'),
500 500 ('description_renderer', 'markdown'),
501 501 ('__start__', 'review_members:sequence'),
502 502 ('__start__', 'reviewer:mapping'),
503 503 ('user_id', '1'),
504 504 ('__start__', 'reasons:sequence'),
505 505 ('reason', 'Some reason'),
506 506 ('__end__', 'reasons:sequence'),
507 507 ('__start__', 'rules:sequence'),
508 508 ('__end__', 'rules:sequence'),
509 509 ('mandatory', 'False'),
510 510 ('__end__', 'reviewer:mapping'),
511 511 ('__end__', 'review_members:sequence'),
512 512 ('__start__', 'revisions:sequence'),
513 513 ('revisions', commit_ids['change']),
514 514 ('__end__', 'revisions:sequence'),
515 515 ('user', ''),
516 516 ('csrf_token', csrf_token),
517 517 ],
518 518 status=302)
519 519
520 520 location = response.headers['Location']
521 521
522 522 pull_request_id = location.rsplit('/', 1)[1]
523 523 assert pull_request_id != 'new'
524 524 pull_request = PullRequest.get(int(pull_request_id))
525 525
526 526 # target_ref has to point to the ancestor's commit_id in order to
527 527 # show the correct diff
528 528 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
529 529 assert pull_request.target_ref == expected_target_ref
530 530
531 531 # Check generated diff contents
532 532 response = response.follow()
533 533 assert 'content_of_ancestor' not in response.body
534 534 assert 'content_of_ancestor-child' not in response.body
535 535 assert 'content_of_change' in response.body
536 536
537 537 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
538 538 # Clear any previous calls to rcextensions
539 539 rhodecode.EXTENSIONS.calls.clear()
540 540
541 541 pull_request = pr_util.create_pull_request(
542 542 approved=True, mergeable=True)
543 543 pull_request_id = pull_request.pull_request_id
544 544 repo_name = pull_request.target_repo.scm_instance().name,
545 545
546 546 response = self.app.post(
547 547 route_path('pullrequest_merge',
548 548 repo_name=str(repo_name[0]),
549 549 pull_request_id=pull_request_id),
550 550 params={'csrf_token': csrf_token}).follow()
551 551
552 552 pull_request = PullRequest.get(pull_request_id)
553 553
554 554 assert response.status_int == 200
555 555 assert pull_request.is_closed()
556 556 assert_pull_request_status(
557 557 pull_request, ChangesetStatus.STATUS_APPROVED)
558 558
559 559 # Check the relevant log entries were added
560 560 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
561 561 actions = [log.action for log in user_logs]
562 562 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
563 563 expected_actions = [
564 564 u'repo.pull_request.close',
565 565 u'repo.pull_request.merge',
566 566 u'repo.pull_request.comment.create'
567 567 ]
568 568 assert actions == expected_actions
569 569
570 570 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
571 571 actions = [log for log in user_logs]
572 572 assert actions[-1].action == 'user.push'
573 573 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
574 574
575 575 # Check post_push rcextension was really executed
576 576 push_calls = rhodecode.EXTENSIONS.calls['post_push']
577 577 assert len(push_calls) == 1
578 578 unused_last_call_args, last_call_kwargs = push_calls[0]
579 579 assert last_call_kwargs['action'] == 'push'
580 580 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
581 581
582 582 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
583 583 pull_request = pr_util.create_pull_request(mergeable=False)
584 584 pull_request_id = pull_request.pull_request_id
585 585 pull_request = PullRequest.get(pull_request_id)
586 586
587 587 response = self.app.post(
588 588 route_path('pullrequest_merge',
589 589 repo_name=pull_request.target_repo.scm_instance().name,
590 590 pull_request_id=pull_request.pull_request_id),
591 591 params={'csrf_token': csrf_token}).follow()
592 592
593 593 assert response.status_int == 200
594 594 response.mustcontain(
595 595 'Merge is not currently possible because of below failed checks.')
596 596 response.mustcontain('Server-side pull request merging is disabled.')
597 597
598 598 @pytest.mark.skip_backends('svn')
599 599 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
600 600 pull_request = pr_util.create_pull_request(mergeable=True)
601 601 pull_request_id = pull_request.pull_request_id
602 602 repo_name = pull_request.target_repo.scm_instance().name
603 603
604 604 response = self.app.post(
605 605 route_path('pullrequest_merge',
606 606 repo_name=repo_name,
607 607 pull_request_id=pull_request_id),
608 608 params={'csrf_token': csrf_token}).follow()
609 609
610 610 assert response.status_int == 200
611 611
612 612 response.mustcontain(
613 613 'Merge is not currently possible because of below failed checks.')
614 614 response.mustcontain('Pull request reviewer approval is pending.')
615 615
616 616 def test_merge_pull_request_renders_failure_reason(
617 617 self, user_regular, csrf_token, pr_util):
618 618 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
619 619 pull_request_id = pull_request.pull_request_id
620 620 repo_name = pull_request.target_repo.scm_instance().name
621 621
622 622 model_patcher = mock.patch.multiple(
623 623 PullRequestModel,
624 624 merge_repo=mock.Mock(return_value=MergeResponse(
625 625 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
626 626 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
627 627
628 628 with model_patcher:
629 629 response = self.app.post(
630 630 route_path('pullrequest_merge',
631 631 repo_name=repo_name,
632 632 pull_request_id=pull_request_id),
633 633 params={'csrf_token': csrf_token}, status=302)
634 634
635 635 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
636 636 MergeFailureReason.PUSH_FAILED])
637 637
638 638 def test_update_source_revision(self, backend, csrf_token):
639 639 commits = [
640 640 {'message': 'ancestor'},
641 641 {'message': 'change'},
642 642 {'message': 'change-2'},
643 643 ]
644 644 commit_ids = backend.create_master_repo(commits)
645 645 target = backend.create_repo(heads=['ancestor'])
646 646 source = backend.create_repo(heads=['change'])
647 647
648 648 # create pr from a in source to A in target
649 649 pull_request = PullRequest()
650 650 pull_request.source_repo = source
651 651 # TODO: johbo: Make sure that we write the source ref this way!
652 652 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
653 653 branch=backend.default_branch_name, commit_id=commit_ids['change'])
654 654 pull_request.target_repo = target
655 655
656 656 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
657 657 branch=backend.default_branch_name,
658 658 commit_id=commit_ids['ancestor'])
659 659 pull_request.revisions = [commit_ids['change']]
660 660 pull_request.title = u"Test"
661 661 pull_request.description = u"Description"
662 662 pull_request.author = UserModel().get_by_username(
663 663 TEST_USER_ADMIN_LOGIN)
664 664 Session().add(pull_request)
665 665 Session().commit()
666 666 pull_request_id = pull_request.pull_request_id
667 667
668 668 # source has ancestor - change - change-2
669 669 backend.pull_heads(source, heads=['change-2'])
670 670
671 671 # update PR
672 672 self.app.post(
673 673 route_path('pullrequest_update',
674 674 repo_name=target.repo_name,
675 675 pull_request_id=pull_request_id),
676 676 params={'update_commits': 'true',
677 677 'csrf_token': csrf_token})
678 678
679 679 # check that we have now both revisions
680 680 pull_request = PullRequest.get(pull_request_id)
681 681 assert pull_request.revisions == [
682 682 commit_ids['change-2'], commit_ids['change']]
683 683
684 684 # TODO: johbo: this should be a test on its own
685 685 response = self.app.get(route_path(
686 686 'pullrequest_new',
687 687 repo_name=target.repo_name))
688 688 assert response.status_int == 200
689 689 assert 'Pull request updated to' in response.body
690 690 assert 'with 1 added, 0 removed commits.' in response.body
691 691
692 692 def test_update_target_revision(self, backend, csrf_token):
693 693 commits = [
694 694 {'message': 'ancestor'},
695 695 {'message': 'change'},
696 696 {'message': 'ancestor-new', 'parents': ['ancestor']},
697 697 {'message': 'change-rebased'},
698 698 ]
699 699 commit_ids = backend.create_master_repo(commits)
700 700 target = backend.create_repo(heads=['ancestor'])
701 701 source = backend.create_repo(heads=['change'])
702 702
703 703 # create pr from a in source to A in target
704 704 pull_request = PullRequest()
705 705 pull_request.source_repo = source
706 706 # TODO: johbo: Make sure that we write the source ref this way!
707 707 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
708 708 branch=backend.default_branch_name, commit_id=commit_ids['change'])
709 709 pull_request.target_repo = target
710 710 # TODO: johbo: Target ref should be branch based, since tip can jump
711 711 # from branch to branch
712 712 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
713 713 branch=backend.default_branch_name,
714 714 commit_id=commit_ids['ancestor'])
715 715 pull_request.revisions = [commit_ids['change']]
716 716 pull_request.title = u"Test"
717 717 pull_request.description = u"Description"
718 718 pull_request.author = UserModel().get_by_username(
719 719 TEST_USER_ADMIN_LOGIN)
720 720 Session().add(pull_request)
721 721 Session().commit()
722 722 pull_request_id = pull_request.pull_request_id
723 723
724 724 # target has ancestor - ancestor-new
725 725 # source has ancestor - ancestor-new - change-rebased
726 726 backend.pull_heads(target, heads=['ancestor-new'])
727 727 backend.pull_heads(source, heads=['change-rebased'])
728 728
729 729 # update PR
730 730 self.app.post(
731 731 route_path('pullrequest_update',
732 732 repo_name=target.repo_name,
733 733 pull_request_id=pull_request_id),
734 734 params={'update_commits': 'true',
735 735 'csrf_token': csrf_token},
736 736 status=200)
737 737
738 738 # check that we have now both revisions
739 739 pull_request = PullRequest.get(pull_request_id)
740 740 assert pull_request.revisions == [commit_ids['change-rebased']]
741 741 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
742 742 branch=backend.default_branch_name,
743 743 commit_id=commit_ids['ancestor-new'])
744 744
745 745 # TODO: johbo: This should be a test on its own
746 746 response = self.app.get(route_path(
747 747 'pullrequest_new',
748 748 repo_name=target.repo_name))
749 749 assert response.status_int == 200
750 750 assert 'Pull request updated to' in response.body
751 751 assert 'with 1 added, 1 removed commits.' in response.body
752 752
753 753 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
754 754 backend = backend_git
755 755 commits = [
756 756 {'message': 'master-commit-1'},
757 757 {'message': 'master-commit-2-change-1'},
758 758 {'message': 'master-commit-3-change-2'},
759 759
760 760 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
761 761 {'message': 'feat-commit-2'},
762 762 ]
763 763 commit_ids = backend.create_master_repo(commits)
764 764 target = backend.create_repo(heads=['master-commit-3-change-2'])
765 765 source = backend.create_repo(heads=['feat-commit-2'])
766 766
767 767 # create pr from a in source to A in target
768 768 pull_request = PullRequest()
769 769 pull_request.source_repo = source
770 770 # TODO: johbo: Make sure that we write the source ref this way!
771 771 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
772 772 branch=backend.default_branch_name,
773 773 commit_id=commit_ids['master-commit-3-change-2'])
774 774
775 775 pull_request.target_repo = target
776 776 # TODO: johbo: Target ref should be branch based, since tip can jump
777 777 # from branch to branch
778 778 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
779 779 branch=backend.default_branch_name,
780 780 commit_id=commit_ids['feat-commit-2'])
781 781
782 782 pull_request.revisions = [
783 783 commit_ids['feat-commit-1'],
784 784 commit_ids['feat-commit-2']
785 785 ]
786 786 pull_request.title = u"Test"
787 787 pull_request.description = u"Description"
788 788 pull_request.author = UserModel().get_by_username(
789 789 TEST_USER_ADMIN_LOGIN)
790 790 Session().add(pull_request)
791 791 Session().commit()
792 792 pull_request_id = pull_request.pull_request_id
793 793
794 794 # PR is created, now we simulate a force-push into target,
795 795 # that drops a 2 last commits
796 796 vcsrepo = target.scm_instance()
797 797 vcsrepo.config.clear_section('hooks')
798 798 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
799 799
800 800 # update PR
801 801 self.app.post(
802 802 route_path('pullrequest_update',
803 803 repo_name=target.repo_name,
804 804 pull_request_id=pull_request_id),
805 805 params={'update_commits': 'true',
806 806 'csrf_token': csrf_token},
807 807 status=200)
808 808
809 809 response = self.app.get(route_path(
810 810 'pullrequest_new',
811 811 repo_name=target.repo_name))
812 812 assert response.status_int == 200
813 813 response.mustcontain('Pull request updated to')
814 814 response.mustcontain('with 0 added, 0 removed commits.')
815 815
816 816 def test_update_of_ancestor_reference(self, backend, csrf_token):
817 817 commits = [
818 818 {'message': 'ancestor'},
819 819 {'message': 'change'},
820 820 {'message': 'change-2'},
821 821 {'message': 'ancestor-new', 'parents': ['ancestor']},
822 822 {'message': 'change-rebased'},
823 823 ]
824 824 commit_ids = backend.create_master_repo(commits)
825 825 target = backend.create_repo(heads=['ancestor'])
826 826 source = backend.create_repo(heads=['change'])
827 827
828 828 # create pr from a in source to A in target
829 829 pull_request = PullRequest()
830 830 pull_request.source_repo = source
831 831 # TODO: johbo: Make sure that we write the source ref this way!
832 832 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
833 833 branch=backend.default_branch_name,
834 834 commit_id=commit_ids['change'])
835 835 pull_request.target_repo = target
836 836 # TODO: johbo: Target ref should be branch based, since tip can jump
837 837 # from branch to branch
838 838 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
839 839 branch=backend.default_branch_name,
840 840 commit_id=commit_ids['ancestor'])
841 841 pull_request.revisions = [commit_ids['change']]
842 842 pull_request.title = u"Test"
843 843 pull_request.description = u"Description"
844 844 pull_request.author = UserModel().get_by_username(
845 845 TEST_USER_ADMIN_LOGIN)
846 846 Session().add(pull_request)
847 847 Session().commit()
848 848 pull_request_id = pull_request.pull_request_id
849 849
850 850 # target has ancestor - ancestor-new
851 851 # source has ancestor - ancestor-new - change-rebased
852 852 backend.pull_heads(target, heads=['ancestor-new'])
853 853 backend.pull_heads(source, heads=['change-rebased'])
854 854
855 855 # update PR
856 856 self.app.post(
857 857 route_path('pullrequest_update',
858 858 repo_name=target.repo_name,
859 859 pull_request_id=pull_request_id),
860 860 params={'update_commits': 'true',
861 861 'csrf_token': csrf_token},
862 862 status=200)
863 863
864 864 # Expect the target reference to be updated correctly
865 865 pull_request = PullRequest.get(pull_request_id)
866 866 assert pull_request.revisions == [commit_ids['change-rebased']]
867 867 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
868 868 branch=backend.default_branch_name,
869 869 commit_id=commit_ids['ancestor-new'])
870 870 assert pull_request.target_ref == expected_target_ref
871 871
872 872 def test_remove_pull_request_branch(self, backend_git, csrf_token):
873 873 branch_name = 'development'
874 874 commits = [
875 875 {'message': 'initial-commit'},
876 876 {'message': 'old-feature'},
877 877 {'message': 'new-feature', 'branch': branch_name},
878 878 ]
879 879 repo = backend_git.create_repo(commits)
880 880 commit_ids = backend_git.commit_ids
881 881
882 882 pull_request = PullRequest()
883 883 pull_request.source_repo = repo
884 884 pull_request.target_repo = repo
885 885 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
886 886 branch=branch_name, commit_id=commit_ids['new-feature'])
887 887 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
888 888 branch=backend_git.default_branch_name,
889 889 commit_id=commit_ids['old-feature'])
890 890 pull_request.revisions = [commit_ids['new-feature']]
891 891 pull_request.title = u"Test"
892 892 pull_request.description = u"Description"
893 893 pull_request.author = UserModel().get_by_username(
894 894 TEST_USER_ADMIN_LOGIN)
895 895 Session().add(pull_request)
896 896 Session().commit()
897 897
898 898 vcs = repo.scm_instance()
899 899 vcs.remove_ref('refs/heads/{}'.format(branch_name))
900 900
901 901 response = self.app.get(route_path(
902 902 'pullrequest_show',
903 903 repo_name=repo.repo_name,
904 904 pull_request_id=pull_request.pull_request_id))
905 905
906 906 assert response.status_int == 200
907 907 assert_response = AssertResponse(response)
908 908 assert_response.element_contains(
909 909 '#changeset_compare_view_content .alert strong',
910 910 'Missing commits')
911 911 assert_response.element_contains(
912 912 '#changeset_compare_view_content .alert',
913 913 'This pull request cannot be displayed, because one or more'
914 914 ' commits no longer exist in the source repository.')
915 915
916 916 def test_strip_commits_from_pull_request(
917 917 self, backend, pr_util, csrf_token):
918 918 commits = [
919 919 {'message': 'initial-commit'},
920 920 {'message': 'old-feature'},
921 921 {'message': 'new-feature', 'parents': ['initial-commit']},
922 922 ]
923 923 pull_request = pr_util.create_pull_request(
924 924 commits, target_head='initial-commit', source_head='new-feature',
925 925 revisions=['new-feature'])
926 926
927 927 vcs = pr_util.source_repository.scm_instance()
928 928 if backend.alias == 'git':
929 929 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
930 930 else:
931 931 vcs.strip(pr_util.commit_ids['new-feature'])
932 932
933 933 response = self.app.get(route_path(
934 934 'pullrequest_show',
935 935 repo_name=pr_util.target_repository.repo_name,
936 936 pull_request_id=pull_request.pull_request_id))
937 937
938 938 assert response.status_int == 200
939 939 assert_response = AssertResponse(response)
940 940 assert_response.element_contains(
941 941 '#changeset_compare_view_content .alert strong',
942 942 'Missing commits')
943 943 assert_response.element_contains(
944 944 '#changeset_compare_view_content .alert',
945 945 'This pull request cannot be displayed, because one or more'
946 946 ' commits no longer exist in the source repository.')
947 947 assert_response.element_contains(
948 948 '#update_commits',
949 949 'Update commits')
950 950
951 951 def test_strip_commits_and_update(
952 952 self, backend, pr_util, csrf_token):
953 953 commits = [
954 954 {'message': 'initial-commit'},
955 955 {'message': 'old-feature'},
956 956 {'message': 'new-feature', 'parents': ['old-feature']},
957 957 ]
958 958 pull_request = pr_util.create_pull_request(
959 959 commits, target_head='old-feature', source_head='new-feature',
960 960 revisions=['new-feature'], mergeable=True)
961 961
962 962 vcs = pr_util.source_repository.scm_instance()
963 963 if backend.alias == 'git':
964 964 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
965 965 else:
966 966 vcs.strip(pr_util.commit_ids['new-feature'])
967 967
968 968 response = self.app.post(
969 969 route_path('pullrequest_update',
970 970 repo_name=pull_request.target_repo.repo_name,
971 971 pull_request_id=pull_request.pull_request_id),
972 972 params={'update_commits': 'true',
973 973 'csrf_token': csrf_token})
974 974
975 975 assert response.status_int == 200
976 976 assert response.body == 'true'
977 977
978 978 # Make sure that after update, it won't raise 500 errors
979 979 response = self.app.get(route_path(
980 980 'pullrequest_show',
981 981 repo_name=pr_util.target_repository.repo_name,
982 982 pull_request_id=pull_request.pull_request_id))
983 983
984 984 assert response.status_int == 200
985 985 assert_response = AssertResponse(response)
986 986 assert_response.element_contains(
987 987 '#changeset_compare_view_content .alert strong',
988 988 'Missing commits')
989 989
990 990 def test_branch_is_a_link(self, pr_util):
991 991 pull_request = pr_util.create_pull_request()
992 992 pull_request.source_ref = 'branch:origin:1234567890abcdef'
993 993 pull_request.target_ref = 'branch:target:abcdef1234567890'
994 994 Session().add(pull_request)
995 995 Session().commit()
996 996
997 997 response = self.app.get(route_path(
998 998 'pullrequest_show',
999 999 repo_name=pull_request.target_repo.scm_instance().name,
1000 1000 pull_request_id=pull_request.pull_request_id))
1001 1001 assert response.status_int == 200
1002 1002 assert_response = AssertResponse(response)
1003 1003
1004 1004 origin = assert_response.get_element('.pr-origininfo .tag')
1005 1005 origin_children = origin.getchildren()
1006 1006 assert len(origin_children) == 1
1007 1007 target = assert_response.get_element('.pr-targetinfo .tag')
1008 1008 target_children = target.getchildren()
1009 1009 assert len(target_children) == 1
1010 1010
1011 1011 expected_origin_link = route_path(
1012 1012 'repo_changelog',
1013 1013 repo_name=pull_request.source_repo.scm_instance().name,
1014 1014 params=dict(branch='origin'))
1015 1015 expected_target_link = route_path(
1016 1016 'repo_changelog',
1017 1017 repo_name=pull_request.target_repo.scm_instance().name,
1018 1018 params=dict(branch='target'))
1019 1019 assert origin_children[0].attrib['href'] == expected_origin_link
1020 1020 assert origin_children[0].text == 'branch: origin'
1021 1021 assert target_children[0].attrib['href'] == expected_target_link
1022 1022 assert target_children[0].text == 'branch: target'
1023 1023
1024 1024 def test_bookmark_is_not_a_link(self, pr_util):
1025 1025 pull_request = pr_util.create_pull_request()
1026 1026 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1027 1027 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1028 1028 Session().add(pull_request)
1029 1029 Session().commit()
1030 1030
1031 1031 response = self.app.get(route_path(
1032 1032 'pullrequest_show',
1033 1033 repo_name=pull_request.target_repo.scm_instance().name,
1034 1034 pull_request_id=pull_request.pull_request_id))
1035 1035 assert response.status_int == 200
1036 1036 assert_response = AssertResponse(response)
1037 1037
1038 1038 origin = assert_response.get_element('.pr-origininfo .tag')
1039 1039 assert origin.text.strip() == 'bookmark: origin'
1040 1040 assert origin.getchildren() == []
1041 1041
1042 1042 target = assert_response.get_element('.pr-targetinfo .tag')
1043 1043 assert target.text.strip() == 'bookmark: target'
1044 1044 assert target.getchildren() == []
1045 1045
1046 1046 def test_tag_is_not_a_link(self, pr_util):
1047 1047 pull_request = pr_util.create_pull_request()
1048 1048 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1049 1049 pull_request.target_ref = 'tag:target:abcdef1234567890'
1050 1050 Session().add(pull_request)
1051 1051 Session().commit()
1052 1052
1053 1053 response = self.app.get(route_path(
1054 1054 'pullrequest_show',
1055 1055 repo_name=pull_request.target_repo.scm_instance().name,
1056 1056 pull_request_id=pull_request.pull_request_id))
1057 1057 assert response.status_int == 200
1058 1058 assert_response = AssertResponse(response)
1059 1059
1060 1060 origin = assert_response.get_element('.pr-origininfo .tag')
1061 1061 assert origin.text.strip() == 'tag: origin'
1062 1062 assert origin.getchildren() == []
1063 1063
1064 1064 target = assert_response.get_element('.pr-targetinfo .tag')
1065 1065 assert target.text.strip() == 'tag: target'
1066 1066 assert target.getchildren() == []
1067 1067
1068 1068 @pytest.mark.parametrize('mergeable', [True, False])
1069 1069 def test_shadow_repository_link(
1070 1070 self, mergeable, pr_util, http_host_only_stub):
1071 1071 """
1072 1072 Check that the pull request summary page displays a link to the shadow
1073 1073 repository if the pull request is mergeable. If it is not mergeable
1074 1074 the link should not be displayed.
1075 1075 """
1076 1076 pull_request = pr_util.create_pull_request(
1077 1077 mergeable=mergeable, enable_notifications=False)
1078 1078 target_repo = pull_request.target_repo.scm_instance()
1079 1079 pr_id = pull_request.pull_request_id
1080 1080 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1081 1081 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1082 1082
1083 1083 response = self.app.get(route_path(
1084 1084 'pullrequest_show',
1085 1085 repo_name=target_repo.name,
1086 1086 pull_request_id=pr_id))
1087 1087
1088 1088 assertr = AssertResponse(response)
1089 1089 if mergeable:
1090 1090 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1091 1091 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1092 1092 else:
1093 1093 assertr.no_element_exists('.pr-mergeinfo')
1094 1094
1095 1095
1096 1096 @pytest.mark.usefixtures('app')
1097 1097 @pytest.mark.backends("git", "hg")
1098 1098 class TestPullrequestsControllerDelete(object):
1099 1099 def test_pull_request_delete_button_permissions_admin(
1100 1100 self, autologin_user, user_admin, pr_util):
1101 1101 pull_request = pr_util.create_pull_request(
1102 1102 author=user_admin.username, enable_notifications=False)
1103 1103
1104 1104 response = self.app.get(route_path(
1105 1105 'pullrequest_show',
1106 1106 repo_name=pull_request.target_repo.scm_instance().name,
1107 1107 pull_request_id=pull_request.pull_request_id))
1108 1108
1109 1109 response.mustcontain('id="delete_pullrequest"')
1110 1110 response.mustcontain('Confirm to delete this pull request')
1111 1111
1112 1112 def test_pull_request_delete_button_permissions_owner(
1113 1113 self, autologin_regular_user, user_regular, pr_util):
1114 1114 pull_request = pr_util.create_pull_request(
1115 1115 author=user_regular.username, enable_notifications=False)
1116 1116
1117 1117 response = self.app.get(route_path(
1118 1118 'pullrequest_show',
1119 1119 repo_name=pull_request.target_repo.scm_instance().name,
1120 1120 pull_request_id=pull_request.pull_request_id))
1121 1121
1122 1122 response.mustcontain('id="delete_pullrequest"')
1123 1123 response.mustcontain('Confirm to delete this pull request')
1124 1124
1125 1125 def test_pull_request_delete_button_permissions_forbidden(
1126 1126 self, autologin_regular_user, user_regular, user_admin, pr_util):
1127 1127 pull_request = pr_util.create_pull_request(
1128 1128 author=user_admin.username, enable_notifications=False)
1129 1129
1130 1130 response = self.app.get(route_path(
1131 1131 'pullrequest_show',
1132 1132 repo_name=pull_request.target_repo.scm_instance().name,
1133 1133 pull_request_id=pull_request.pull_request_id))
1134 1134 response.mustcontain(no=['id="delete_pullrequest"'])
1135 1135 response.mustcontain(no=['Confirm to delete this pull request'])
1136 1136
1137 1137 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1138 1138 self, autologin_regular_user, user_regular, user_admin, pr_util,
1139 1139 user_util):
1140 1140
1141 1141 pull_request = pr_util.create_pull_request(
1142 1142 author=user_admin.username, enable_notifications=False)
1143 1143
1144 1144 user_util.grant_user_permission_to_repo(
1145 1145 pull_request.target_repo, user_regular,
1146 1146 'repository.write')
1147 1147
1148 1148 response = self.app.get(route_path(
1149 1149 'pullrequest_show',
1150 1150 repo_name=pull_request.target_repo.scm_instance().name,
1151 1151 pull_request_id=pull_request.pull_request_id))
1152 1152
1153 1153 response.mustcontain('id="open_edit_pullrequest"')
1154 1154 response.mustcontain('id="delete_pullrequest"')
1155 1155 response.mustcontain(no=['Confirm to delete this pull request'])
1156 1156
1157 1157 def test_delete_comment_returns_404_if_comment_does_not_exist(
1158 1158 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1159 1159
1160 1160 pull_request = pr_util.create_pull_request(
1161 1161 author=user_admin.username, enable_notifications=False)
1162 1162
1163 1163 self.app.post(
1164 1164 route_path(
1165 1165 'pullrequest_comment_delete',
1166 1166 repo_name=pull_request.target_repo.scm_instance().name,
1167 1167 pull_request_id=pull_request.pull_request_id,
1168 1168 comment_id=1024404),
1169 1169 extra_environ=xhr_header,
1170 1170 params={'csrf_token': csrf_token},
1171 1171 status=404
1172 1172 )
1173 1173
1174 1174 def test_delete_comment(
1175 1175 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1176 1176
1177 1177 pull_request = pr_util.create_pull_request(
1178 1178 author=user_admin.username, enable_notifications=False)
1179 1179 comment = pr_util.create_comment()
1180 1180 comment_id = comment.comment_id
1181 1181
1182 1182 response = self.app.post(
1183 1183 route_path(
1184 1184 'pullrequest_comment_delete',
1185 1185 repo_name=pull_request.target_repo.scm_instance().name,
1186 1186 pull_request_id=pull_request.pull_request_id,
1187 1187 comment_id=comment_id),
1188 1188 extra_environ=xhr_header,
1189 1189 params={'csrf_token': csrf_token},
1190 1190 status=200
1191 1191 )
1192 1192 assert response.body == 'true'
1193 1193
1194 @pytest.mark.parametrize('url_type', [
1195 'pullrequest_new',
1196 'pullrequest_create',
1197 'pullrequest_update',
1198 'pullrequest_merge',
1199 ])
1200 def test_pull_request_is_forbidden_on_archived_repo(
1201 self, autologin_user, backend, xhr_header, user_util, url_type):
1202
1203 # create a temporary repo
1204 source = user_util.create_repo(repo_type=backend.alias)
1205 repo_name = source.repo_name
1206 repo = Repository.get_by_repo_name(repo_name)
1207 repo.archived = True
1208 Session().commit()
1209
1210 response = self.app.get(
1211 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1212
1213 msg = 'Action not supported for archived repository.'
1214 assert_session_flash(response, msg)
1215
1194 1216
1195 1217 def assert_pull_request_status(pull_request, expected_status):
1196 1218 status = ChangesetStatusModel().calculated_review_status(
1197 1219 pull_request=pull_request)
1198 1220 assert status == expected_status
1199 1221
1200 1222
1201 1223 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1202 1224 @pytest.mark.usefixtures("autologin_user")
1203 1225 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1204 1226 response = app.get(
1205 1227 route_path(route, repo_name=backend_svn.repo_name), status=404)
1206 1228
@@ -1,150 +1,173 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.utils2 import safe_unicode, safe_str
24 24 from rhodecode.model.db import Repository
25 25 from rhodecode.model.repo import RepoModel
26 26 from rhodecode.tests import (
27 27 HG_REPO, GIT_REPO, assert_session_flash, no_newline_id_generator)
28 28 from rhodecode.tests.fixture import Fixture
29 29 from rhodecode.tests.utils import repo_on_filesystem
30 30
31 31 fixture = Fixture()
32 32
33 33
34 34 def route_path(name, params=None, **kwargs):
35 35 import urllib
36 36
37 37 base_url = {
38 38 'repo_summary_explicit': '/{repo_name}/summary',
39 39 'repo_summary': '/{repo_name}',
40 40 'edit_repo_advanced': '/{repo_name}/settings/advanced',
41 41 'edit_repo_advanced_delete': '/{repo_name}/settings/advanced/delete',
42 'edit_repo_advanced_archive': '/{repo_name}/settings/advanced/archive',
42 43 'edit_repo_advanced_fork': '/{repo_name}/settings/advanced/fork',
43 44 'edit_repo_advanced_locking': '/{repo_name}/settings/advanced/locking',
44 45 'edit_repo_advanced_journal': '/{repo_name}/settings/advanced/journal',
45 46
46 47 }[name].format(**kwargs)
47 48
48 49 if params:
49 50 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
50 51 return base_url
51 52
52 53
53 54 @pytest.mark.usefixtures('autologin_user', 'app')
54 55 class TestAdminRepoSettingsAdvanced(object):
55 56
56 57 def test_set_repo_fork_has_no_self_id(self, autologin_user, backend):
57 58 repo = backend.repo
58 59 response = self.app.get(
59 60 route_path('edit_repo_advanced', repo_name=backend.repo_name))
60 61 opt = """<option value="%s">vcs_test_git</option>""" % repo.repo_id
61 62 response.mustcontain(no=[opt])
62 63
63 64 def test_set_fork_of_target_repo(
64 65 self, autologin_user, backend, csrf_token):
65 66 target_repo = 'target_%s' % backend.alias
66 67 fixture.create_repo(target_repo, repo_type=backend.alias)
67 68 repo2 = Repository.get_by_repo_name(target_repo)
68 69 response = self.app.post(
69 70 route_path('edit_repo_advanced_fork', repo_name=backend.repo_name),
70 71 params={'id_fork_of': repo2.repo_id,
71 72 'csrf_token': csrf_token})
72 73 repo = Repository.get_by_repo_name(backend.repo_name)
73 74 repo2 = Repository.get_by_repo_name(target_repo)
74 75 assert_session_flash(
75 76 response,
76 77 'Marked repo %s as fork of %s' % (repo.repo_name, repo2.repo_name))
77 78
78 79 assert repo.fork == repo2
79 80 response = response.follow()
80 81 # check if given repo is selected
81 82
82 83 opt = 'This repository is a fork of <a href="%s">%s</a>' % (
83 84 route_path('repo_summary', repo_name=repo2.repo_name),
84 85 repo2.repo_name)
85 86
86 87 response.mustcontain(opt)
87 88
88 89 fixture.destroy_repo(target_repo, forks='detach')
89 90
90 91 @pytest.mark.backends("hg", "git")
91 92 def test_set_fork_of_other_type_repo(
92 93 self, autologin_user, backend, csrf_token):
93 94 TARGET_REPO_MAP = {
94 95 'git': {
95 96 'type': 'hg',
96 97 'repo_name': HG_REPO},
97 98 'hg': {
98 99 'type': 'git',
99 100 'repo_name': GIT_REPO},
100 101 }
101 102 target_repo = TARGET_REPO_MAP[backend.alias]
102 103
103 104 repo2 = Repository.get_by_repo_name(target_repo['repo_name'])
104 105 response = self.app.post(
105 106 route_path('edit_repo_advanced_fork', repo_name=backend.repo_name),
106 107 params={'id_fork_of': repo2.repo_id,
107 108 'csrf_token': csrf_token})
108 109 assert_session_flash(
109 110 response,
110 111 'Cannot set repository as fork of repository with other type')
111 112
112 113 def test_set_fork_of_none(self, autologin_user, backend, csrf_token):
113 114 # mark it as None
114 115 response = self.app.post(
115 116 route_path('edit_repo_advanced_fork', repo_name=backend.repo_name),
116 117 params={'id_fork_of': None,
117 118 'csrf_token': csrf_token})
118 119 assert_session_flash(
119 120 response,
120 121 'Marked repo %s as fork of %s'
121 122 % (backend.repo_name, "Nothing"))
122 123 assert backend.repo.fork is None
123 124
124 125 def test_set_fork_of_same_repo(self, autologin_user, backend, csrf_token):
125 126 repo = Repository.get_by_repo_name(backend.repo_name)
126 127 response = self.app.post(
127 128 route_path('edit_repo_advanced_fork', repo_name=backend.repo_name),
128 129 params={'id_fork_of': repo.repo_id, 'csrf_token': csrf_token})
129 130 assert_session_flash(
130 131 response, 'An error occurred during this operation')
131 132
132 133 @pytest.mark.parametrize(
133 134 "suffix",
134 135 ['', u'ąęł' , '123'],
135 136 ids=no_newline_id_generator)
136 def test_advanced_delete(self, autologin_user, backend, suffix, csrf_token):
137 def test_advanced_repo_delete(self, autologin_user, backend, suffix, csrf_token):
137 138 repo = backend.create_repo(name_suffix=suffix)
138 139 repo_name = repo.repo_name
139 140 repo_name_str = safe_str(repo.repo_name)
140 141
141 142 response = self.app.post(
142 143 route_path('edit_repo_advanced_delete', repo_name=repo_name_str),
143 144 params={'csrf_token': csrf_token})
144 145 assert_session_flash(response,
145 146 u'Deleted repository `{}`'.format(repo_name))
146 147 response.follow()
147 148
148 149 # check if repo was deleted from db
149 150 assert RepoModel().get_by_repo_name(repo_name) is None
150 151 assert not repo_on_filesystem(repo_name_str)
152
153 @pytest.mark.parametrize(
154 "suffix",
155 ['', u'ąęł' , '123'],
156 ids=no_newline_id_generator)
157 def test_advanced_repo_archive(self, autologin_user, backend, suffix, csrf_token):
158 repo = backend.create_repo(name_suffix=suffix)
159 repo_name = repo.repo_name
160 repo_name_str = safe_str(repo.repo_name)
161
162 response = self.app.post(
163 route_path('edit_repo_advanced_archive', repo_name=repo_name_str),
164 params={'csrf_token': csrf_token})
165
166 assert_session_flash(response,
167 u'Archived repository `{}`'.format(repo_name))
168
169 response = self.app.get(route_path('repo_summary', repo_name=repo_name_str))
170 response.mustcontain('This repository has been archived. It is now read-only.')
171
172 # check if repo was deleted from db
173 assert RepoModel().get_by_repo_name(repo_name).archived is True
@@ -1,263 +1,314 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 from pyramid.view import view_config
24 24 from pyramid.httpexceptions import HTTPFound
25 25
26 from rhodecode import events
26 27 from rhodecode.apps._base import RepoAppView
27 28 from rhodecode.lib import helpers as h
28 29 from rhodecode.lib import audit_logger
29 30 from rhodecode.lib.auth import (
30 31 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired,
31 32 HasRepoPermissionAny)
32 33 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
33 34 from rhodecode.lib.utils2 import safe_int
34 35 from rhodecode.lib.vcs import RepositoryError
35 36 from rhodecode.model.db import Session, UserFollowing, User, Repository
36 37 from rhodecode.model.repo import RepoModel
37 38 from rhodecode.model.scm import ScmModel
38 39
39 40 log = logging.getLogger(__name__)
40 41
41 42
42 43 class RepoSettingsView(RepoAppView):
43 44
44 45 def load_default_context(self):
45 46 c = self._get_local_tmpl_context()
46 47 return c
47 48
49 def _get_users_with_permissions(self):
50 user_permissions = {}
51 for perm in self.db_repo.permissions():
52 user_permissions[perm.user_id] = perm
53
54 return user_permissions
55
48 56 @LoginRequired()
49 57 @HasRepoPermissionAnyDecorator('repository.admin')
50 58 @view_config(
51 59 route_name='edit_repo_advanced', request_method='GET',
52 60 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
53 61 def edit_advanced(self):
54 62 c = self.load_default_context()
55 63 c.active = 'advanced'
56 64
57 65 c.default_user_id = User.get_default_user().user_id
58 66 c.in_public_journal = UserFollowing.query() \
59 67 .filter(UserFollowing.user_id == c.default_user_id) \
60 68 .filter(UserFollowing.follows_repository == self.db_repo).scalar()
61 69
62 70 c.has_origin_repo_read_perm = False
63 71 if self.db_repo.fork:
64 72 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
65 73 'repository.write', 'repository.read', 'repository.admin')(
66 74 self.db_repo.fork.repo_name, 'repo set as fork page')
67 75
68 76 return self._get_template_context(c)
69 77
70 78 @LoginRequired()
71 79 @HasRepoPermissionAnyDecorator('repository.admin')
72 80 @CSRFRequired()
73 81 @view_config(
82 route_name='edit_repo_advanced_archive', request_method='POST',
83 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
84 def edit_advanced_archive(self):
85 """
86 Archives the repository. It will become read-only, and not visible in search
87 or other queries. But still visible for super-admins.
88 """
89
90 _ = self.request.translate
91
92 try:
93 old_data = self.db_repo.get_api_data()
94 RepoModel().archive(self.db_repo)
95
96 repo = audit_logger.RepoWrap(repo_id=None, repo_name=self.db_repo.repo_name)
97 audit_logger.store_web(
98 'repo.archive', action_data={'old_data': old_data},
99 user=self._rhodecode_user, repo=repo)
100
101 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
102 h.flash(
103 _('Archived repository `%s`') % self.db_repo_name,
104 category='success')
105 Session().commit()
106 except Exception:
107 log.exception("Exception during archiving of repository")
108 h.flash(_('An error occurred during archiving of `%s`')
109 % self.db_repo_name, category='error')
110 # redirect to advanced for more deletion options
111 raise HTTPFound(
112 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name,
113 _anchor='advanced-archive'))
114
115 # flush permissions for all users defined in permissions
116 affected_user_ids = self._get_users_with_permissions().keys()
117 events.trigger(events.UserPermissionsChange(affected_user_ids))
118
119 raise HTTPFound(h.route_path('home'))
120
121 @LoginRequired()
122 @HasRepoPermissionAnyDecorator('repository.admin')
123 @CSRFRequired()
124 @view_config(
74 125 route_name='edit_repo_advanced_delete', request_method='POST',
75 126 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
76 127 def edit_advanced_delete(self):
77 128 """
78 129 Deletes the repository, or shows warnings if deletion is not possible
79 130 because of attached forks or other errors.
80 131 """
81 132 _ = self.request.translate
82 133 handle_forks = self.request.POST.get('forks', None)
83 134 if handle_forks == 'detach_forks':
84 135 handle_forks = 'detach'
85 136 elif handle_forks == 'delete_forks':
86 137 handle_forks = 'delete'
87 138
88 139 try:
89 140 old_data = self.db_repo.get_api_data()
90 141 RepoModel().delete(self.db_repo, forks=handle_forks)
91 142
92 143 _forks = self.db_repo.forks.count()
93 144 if _forks and handle_forks:
94 145 if handle_forks == 'detach_forks':
95 146 h.flash(_('Detached %s forks') % _forks, category='success')
96 147 elif handle_forks == 'delete_forks':
97 148 h.flash(_('Deleted %s forks') % _forks, category='success')
98 149
99 150 repo = audit_logger.RepoWrap(repo_id=None, repo_name=self.db_repo.repo_name)
100 151 audit_logger.store_web(
101 152 'repo.delete', action_data={'old_data': old_data},
102 153 user=self._rhodecode_user, repo=repo)
103 154
104 155 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
105 156 h.flash(
106 157 _('Deleted repository `%s`') % self.db_repo_name,
107 158 category='success')
108 159 Session().commit()
109 160 except AttachedForksError:
110 161 repo_advanced_url = h.route_path(
111 162 'edit_repo_advanced', repo_name=self.db_repo_name,
112 163 _anchor='advanced-delete')
113 164 delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url)
114 165 h.flash(_('Cannot delete `{repo}` it still contains attached forks. '
115 166 'Try using {delete_or_detach} option.')
116 167 .format(repo=self.db_repo_name, delete_or_detach=delete_anchor),
117 168 category='warning')
118 169
119 170 # redirect to advanced for forks handle action ?
120 171 raise HTTPFound(repo_advanced_url)
121 172
122 173 except AttachedPullRequestsError:
123 174 repo_advanced_url = h.route_path(
124 175 'edit_repo_advanced', repo_name=self.db_repo_name,
125 176 _anchor='advanced-delete')
126 177 attached_prs = len(self.db_repo.pull_requests_source +
127 178 self.db_repo.pull_requests_target)
128 179 h.flash(
129 180 _('Cannot delete `{repo}` it still contains {num} attached pull requests. '
130 181 'Consider archiving the repository instead.').format(
131 182 repo=self.db_repo_name, num=attached_prs), category='warning')
132 183
133 184 # redirect to advanced for forks handle action ?
134 185 raise HTTPFound(repo_advanced_url)
135 186
136 187 except Exception:
137 188 log.exception("Exception during deletion of repository")
138 189 h.flash(_('An error occurred during deletion of `%s`')
139 190 % self.db_repo_name, category='error')
140 191 # redirect to advanced for more deletion options
141 192 raise HTTPFound(
142 193 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name,
143 194 _anchor='advanced-delete'))
144 195
145 196 raise HTTPFound(h.route_path('home'))
146 197
147 198 @LoginRequired()
148 199 @HasRepoPermissionAnyDecorator('repository.admin')
149 200 @CSRFRequired()
150 201 @view_config(
151 202 route_name='edit_repo_advanced_journal', request_method='POST',
152 203 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
153 204 def edit_advanced_journal(self):
154 205 """
155 206 Set's this repository to be visible in public journal,
156 207 in other words making default user to follow this repo
157 208 """
158 209 _ = self.request.translate
159 210
160 211 try:
161 212 user_id = User.get_default_user().user_id
162 213 ScmModel().toggle_following_repo(self.db_repo.repo_id, user_id)
163 214 h.flash(_('Updated repository visibility in public journal'),
164 215 category='success')
165 216 Session().commit()
166 217 except Exception:
167 218 h.flash(_('An error occurred during setting this '
168 219 'repository in public journal'),
169 220 category='error')
170 221
171 222 raise HTTPFound(
172 223 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
173 224
174 225 @LoginRequired()
175 226 @HasRepoPermissionAnyDecorator('repository.admin')
176 227 @CSRFRequired()
177 228 @view_config(
178 229 route_name='edit_repo_advanced_fork', request_method='POST',
179 230 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
180 231 def edit_advanced_fork(self):
181 232 """
182 233 Mark given repository as a fork of another
183 234 """
184 235 _ = self.request.translate
185 236
186 237 new_fork_id = safe_int(self.request.POST.get('id_fork_of'))
187 238
188 239 # valid repo, re-check permissions
189 240 if new_fork_id:
190 241 repo = Repository.get(new_fork_id)
191 242 # ensure we have at least read access to the repo we mark
192 243 perm_check = HasRepoPermissionAny(
193 244 'repository.read', 'repository.write', 'repository.admin')
194 245
195 246 if repo and perm_check(repo_name=repo.repo_name):
196 247 new_fork_id = repo.repo_id
197 248 else:
198 249 new_fork_id = None
199 250
200 251 try:
201 252 repo = ScmModel().mark_as_fork(
202 253 self.db_repo_name, new_fork_id, self._rhodecode_user.user_id)
203 254 fork = repo.fork.repo_name if repo.fork else _('Nothing')
204 255 Session().commit()
205 256 h.flash(
206 257 _('Marked repo %s as fork of %s') % (self.db_repo_name, fork),
207 258 category='success')
208 259 except RepositoryError as e:
209 260 log.exception("Repository Error occurred")
210 261 h.flash(str(e), category='error')
211 262 except Exception:
212 263 log.exception("Exception while editing fork")
213 264 h.flash(_('An error occurred during this operation'),
214 265 category='error')
215 266
216 267 raise HTTPFound(
217 268 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
218 269
219 270 @LoginRequired()
220 271 @HasRepoPermissionAnyDecorator('repository.admin')
221 272 @CSRFRequired()
222 273 @view_config(
223 274 route_name='edit_repo_advanced_locking', request_method='POST',
224 275 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
225 276 def edit_advanced_locking(self):
226 277 """
227 278 Toggle locking of repository
228 279 """
229 280 _ = self.request.translate
230 281 set_lock = self.request.POST.get('set_lock')
231 282 set_unlock = self.request.POST.get('set_unlock')
232 283
233 284 try:
234 285 if set_lock:
235 286 Repository.lock(self.db_repo, self._rhodecode_user.user_id,
236 287 lock_reason=Repository.LOCK_WEB)
237 288 h.flash(_('Locked repository'), category='success')
238 289 elif set_unlock:
239 290 Repository.unlock(self.db_repo)
240 291 h.flash(_('Unlocked repository'), category='success')
241 292 except Exception as e:
242 293 log.exception("Exception during unlocking")
243 294 h.flash(_('An error occurred during unlocking'), category='error')
244 295
245 296 raise HTTPFound(
246 297 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
247 298
248 299 @LoginRequired()
249 300 @HasRepoPermissionAnyDecorator('repository.admin')
250 301 @view_config(
251 302 route_name='edit_repo_advanced_hooks', request_method='GET',
252 303 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
253 304 def edit_advanced_install_hooks(self):
254 305 """
255 306 Install Hooks for repository
256 307 """
257 308 _ = self.request.translate
258 309 self.load_default_context()
259 310 self.rhodecode_vcs_repo.install_hooks(force=True)
260 311 h.flash(_('installed updated hooks into this repository'),
261 312 category='success')
262 313 raise HTTPFound(
263 314 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
@@ -1,287 +1,288 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2017-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import datetime
23 23
24 24 from rhodecode.lib.jsonalchemy import JsonRaw
25 25 from rhodecode.model import meta
26 26 from rhodecode.model.db import User, UserLog, Repository
27 27
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31 # action as key, and expected action_data as value
32 32 ACTIONS_V1 = {
33 33 'user.login.success': {'user_agent': ''},
34 34 'user.login.failure': {'user_agent': ''},
35 35 'user.logout': {'user_agent': ''},
36 36 'user.register': {},
37 37 'user.password.reset_request': {},
38 38 'user.push': {'user_agent': '', 'commit_ids': []},
39 39 'user.pull': {'user_agent': ''},
40 40
41 41 'user.create': {'data': {}},
42 42 'user.delete': {'old_data': {}},
43 43 'user.edit': {'old_data': {}},
44 44 'user.edit.permissions': {},
45 45 'user.edit.ip.add': {'ip': {}, 'user': {}},
46 46 'user.edit.ip.delete': {'ip': {}, 'user': {}},
47 47 'user.edit.token.add': {'token': {}, 'user': {}},
48 48 'user.edit.token.delete': {'token': {}, 'user': {}},
49 49 'user.edit.email.add': {'email': ''},
50 50 'user.edit.email.delete': {'email': ''},
51 51 'user.edit.ssh_key.add': {'token': {}, 'user': {}},
52 52 'user.edit.ssh_key.delete': {'token': {}, 'user': {}},
53 53 'user.edit.password_reset.enabled': {},
54 54 'user.edit.password_reset.disabled': {},
55 55
56 56 'user_group.create': {'data': {}},
57 57 'user_group.delete': {'old_data': {}},
58 58 'user_group.edit': {'old_data': {}},
59 59 'user_group.edit.permissions': {},
60 60 'user_group.edit.member.add': {'user': {}},
61 61 'user_group.edit.member.delete': {'user': {}},
62 62
63 63 'repo.create': {'data': {}},
64 64 'repo.fork': {'data': {}},
65 65 'repo.edit': {'old_data': {}},
66 66 'repo.edit.permissions': {},
67 67 'repo.edit.permissions.branch': {},
68 'repo.archive': {'old_data': {}},
68 69 'repo.delete': {'old_data': {}},
69 70
70 71 'repo.archive.download': {'user_agent': '', 'archive_name': '',
71 72 'archive_spec': '', 'archive_cached': ''},
72 73
73 74 'repo.permissions.branch_rule.create': {},
74 75 'repo.permissions.branch_rule.edit': {},
75 76 'repo.permissions.branch_rule.delete': {},
76 77
77 78 'repo.pull_request.create': '',
78 79 'repo.pull_request.edit': '',
79 80 'repo.pull_request.delete': '',
80 81 'repo.pull_request.close': '',
81 82 'repo.pull_request.merge': '',
82 83 'repo.pull_request.vote': '',
83 84 'repo.pull_request.comment.create': '',
84 85 'repo.pull_request.comment.delete': '',
85 86
86 87 'repo.pull_request.reviewer.add': '',
87 88 'repo.pull_request.reviewer.delete': '',
88 89
89 90 'repo.commit.strip': {'commit_id': ''},
90 91 'repo.commit.comment.create': {'data': {}},
91 92 'repo.commit.comment.delete': {'data': {}},
92 93 'repo.commit.vote': '',
93 94
94 95 'repo_group.create': {'data': {}},
95 96 'repo_group.edit': {'old_data': {}},
96 97 'repo_group.edit.permissions': {},
97 98 'repo_group.delete': {'old_data': {}},
98 99 }
99 100
100 101 ACTIONS = ACTIONS_V1
101 102
102 103 SOURCE_WEB = 'source_web'
103 104 SOURCE_API = 'source_api'
104 105
105 106
106 107 class UserWrap(object):
107 108 """
108 109 Fake object used to imitate AuthUser
109 110 """
110 111
111 112 def __init__(self, user_id=None, username=None, ip_addr=None):
112 113 self.user_id = user_id
113 114 self.username = username
114 115 self.ip_addr = ip_addr
115 116
116 117
117 118 class RepoWrap(object):
118 119 """
119 120 Fake object used to imitate RepoObject that audit logger requires
120 121 """
121 122
122 123 def __init__(self, repo_id=None, repo_name=None):
123 124 self.repo_id = repo_id
124 125 self.repo_name = repo_name
125 126
126 127
127 128 def _store_log(action_name, action_data, user_id, username, user_data,
128 129 ip_address, repository_id, repository_name):
129 130 user_log = UserLog()
130 131 user_log.version = UserLog.VERSION_2
131 132
132 133 user_log.action = action_name
133 134 user_log.action_data = action_data or JsonRaw(u'{}')
134 135
135 136 user_log.user_ip = ip_address
136 137
137 138 user_log.user_id = user_id
138 139 user_log.username = username
139 140 user_log.user_data = user_data or JsonRaw(u'{}')
140 141
141 142 user_log.repository_id = repository_id
142 143 user_log.repository_name = repository_name
143 144
144 145 user_log.action_date = datetime.datetime.now()
145 146
146 147 return user_log
147 148
148 149
149 150 def store_web(*args, **kwargs):
150 151 if 'action_data' not in kwargs:
151 152 kwargs['action_data'] = {}
152 153 kwargs['action_data'].update({
153 154 'source': SOURCE_WEB
154 155 })
155 156 return store(*args, **kwargs)
156 157
157 158
158 159 def store_api(*args, **kwargs):
159 160 if 'action_data' not in kwargs:
160 161 kwargs['action_data'] = {}
161 162 kwargs['action_data'].update({
162 163 'source': SOURCE_API
163 164 })
164 165 return store(*args, **kwargs)
165 166
166 167
167 168 def store(action, user, action_data=None, user_data=None, ip_addr=None,
168 169 repo=None, sa_session=None, commit=False):
169 170 """
170 171 Audit logger for various actions made by users, typically this
171 172 results in a call such::
172 173
173 174 from rhodecode.lib import audit_logger
174 175
175 176 audit_logger.store(
176 177 'repo.edit', user=self._rhodecode_user)
177 178 audit_logger.store(
178 179 'repo.delete', action_data={'data': repo_data},
179 180 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'))
180 181
181 182 # repo action
182 183 audit_logger.store(
183 184 'repo.delete',
184 185 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'),
185 186 repo=audit_logger.RepoWrap(repo_name='some-repo'))
186 187
187 188 # repo action, when we know and have the repository object already
188 189 audit_logger.store(
189 190 'repo.delete', action_data={'source': audit_logger.SOURCE_WEB, },
190 191 user=self._rhodecode_user,
191 192 repo=repo_object)
192 193
193 194 # alternative wrapper to the above
194 195 audit_logger.store_web(
195 196 'repo.delete', action_data={},
196 197 user=self._rhodecode_user,
197 198 repo=repo_object)
198 199
199 200 # without an user ?
200 201 audit_logger.store(
201 202 'user.login.failure',
202 203 user=audit_logger.UserWrap(
203 204 username=self.request.params.get('username'),
204 205 ip_addr=self.request.remote_addr))
205 206
206 207 """
207 208 from rhodecode.lib.utils2 import safe_unicode
208 209 from rhodecode.lib.auth import AuthUser
209 210
210 211 action_spec = ACTIONS.get(action, None)
211 212 if action_spec is None:
212 213 raise ValueError('Action `{}` is not supported'.format(action))
213 214
214 215 if not sa_session:
215 216 sa_session = meta.Session()
216 217
217 218 try:
218 219 username = getattr(user, 'username', None)
219 220 if not username:
220 221 pass
221 222
222 223 user_id = getattr(user, 'user_id', None)
223 224 if not user_id:
224 225 # maybe we have username ? Try to figure user_id from username
225 226 if username:
226 227 user_id = getattr(
227 228 User.get_by_username(username), 'user_id', None)
228 229
229 230 ip_addr = ip_addr or getattr(user, 'ip_addr', None)
230 231 if not ip_addr:
231 232 pass
232 233
233 234 if not user_data:
234 235 # try to get this from the auth user
235 236 if isinstance(user, AuthUser):
236 237 user_data = {
237 238 'username': user.username,
238 239 'email': user.email,
239 240 }
240 241
241 242 repository_name = getattr(repo, 'repo_name', None)
242 243 repository_id = getattr(repo, 'repo_id', None)
243 244 if not repository_id:
244 245 # maybe we have repo_name ? Try to figure repo_id from repo_name
245 246 if repository_name:
246 247 repository_id = getattr(
247 248 Repository.get_by_repo_name(repository_name), 'repo_id', None)
248 249
249 250 action_name = safe_unicode(action)
250 251 ip_address = safe_unicode(ip_addr)
251 252
252 253 with sa_session.no_autoflush:
253 254 update_user_last_activity(sa_session, user_id)
254 255
255 256 user_log = _store_log(
256 257 action_name=action_name,
257 258 action_data=action_data or {},
258 259 user_id=user_id,
259 260 username=username,
260 261 user_data=user_data or {},
261 262 ip_address=ip_address,
262 263 repository_id=repository_id,
263 264 repository_name=repository_name
264 265 )
265 266
266 267 sa_session.add(user_log)
267 268
268 269 if commit:
269 270 sa_session.commit()
270 271
271 272 entry_id = user_log.entry_id or ''
272 273 log.info('AUDIT[%s]: Logging action: `%s` by user:id:%s[%s] ip:%s',
273 274 entry_id, action_name, user_id, username, ip_address)
274 275
275 276 except Exception:
276 277 log.exception('AUDIT: failed to store audit log')
277 278
278 279
279 280 def update_user_last_activity(sa_session, user_id):
280 281 _last_activity = datetime.datetime.now()
281 282 try:
282 283 sa_session.query(User).filter(User.user_id == user_id).update(
283 284 {"last_activity": _last_activity})
284 285 log.debug(
285 286 'updated user `%s` last activity to:%s', user_id, _last_activity)
286 287 except Exception:
287 288 log.exception("Failed last activity update")
@@ -1,2338 +1,2355 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 authentication and permission libraries
23 23 """
24 24
25 25 import os
26 26 import time
27 27 import inspect
28 28 import collections
29 29 import fnmatch
30 30 import hashlib
31 31 import itertools
32 32 import logging
33 33 import random
34 34 import traceback
35 35 from functools import wraps
36 36
37 37 import ipaddress
38 38
39 39 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
40 40 from sqlalchemy.orm.exc import ObjectDeletedError
41 41 from sqlalchemy.orm import joinedload
42 42 from zope.cachedescriptors.property import Lazy as LazyProperty
43 43
44 44 import rhodecode
45 45 from rhodecode.model import meta
46 46 from rhodecode.model.meta import Session
47 47 from rhodecode.model.user import UserModel
48 48 from rhodecode.model.db import (
49 49 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
50 50 UserIpMap, UserApiKeys, RepoGroup, UserGroup)
51 51 from rhodecode.lib import rc_cache
52 52 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5, safe_int, sha1
53 53 from rhodecode.lib.utils import (
54 54 get_repo_slug, get_repo_group_slug, get_user_group_slug)
55 55 from rhodecode.lib.caching_query import FromCache
56 56
57 57
58 58 if rhodecode.is_unix:
59 59 import bcrypt
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63 csrf_token_key = "csrf_token"
64 64
65 65
66 66 class PasswordGenerator(object):
67 67 """
68 68 This is a simple class for generating password from different sets of
69 69 characters
70 70 usage::
71 71
72 72 passwd_gen = PasswordGenerator()
73 73 #print 8-letter password containing only big and small letters
74 74 of alphabet
75 75 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
76 76 """
77 77 ALPHABETS_NUM = r'''1234567890'''
78 78 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
79 79 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
80 80 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
81 81 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
82 82 + ALPHABETS_NUM + ALPHABETS_SPECIAL
83 83 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
84 84 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
85 85 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
86 86 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
87 87
88 88 def __init__(self, passwd=''):
89 89 self.passwd = passwd
90 90
91 91 def gen_password(self, length, type_=None):
92 92 if type_ is None:
93 93 type_ = self.ALPHABETS_FULL
94 94 self.passwd = ''.join([random.choice(type_) for _ in range(length)])
95 95 return self.passwd
96 96
97 97
98 98 class _RhodeCodeCryptoBase(object):
99 99 ENC_PREF = None
100 100
101 101 def hash_create(self, str_):
102 102 """
103 103 hash the string using
104 104
105 105 :param str_: password to hash
106 106 """
107 107 raise NotImplementedError
108 108
109 109 def hash_check_with_upgrade(self, password, hashed):
110 110 """
111 111 Returns tuple in which first element is boolean that states that
112 112 given password matches it's hashed version, and the second is new hash
113 113 of the password, in case this password should be migrated to new
114 114 cipher.
115 115 """
116 116 checked_hash = self.hash_check(password, hashed)
117 117 return checked_hash, None
118 118
119 119 def hash_check(self, password, hashed):
120 120 """
121 121 Checks matching password with it's hashed value.
122 122
123 123 :param password: password
124 124 :param hashed: password in hashed form
125 125 """
126 126 raise NotImplementedError
127 127
128 128 def _assert_bytes(self, value):
129 129 """
130 130 Passing in an `unicode` object can lead to hard to detect issues
131 131 if passwords contain non-ascii characters. Doing a type check
132 132 during runtime, so that such mistakes are detected early on.
133 133 """
134 134 if not isinstance(value, str):
135 135 raise TypeError(
136 136 "Bytestring required as input, got %r." % (value, ))
137 137
138 138
139 139 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
140 140 ENC_PREF = ('$2a$10', '$2b$10')
141 141
142 142 def hash_create(self, str_):
143 143 self._assert_bytes(str_)
144 144 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
145 145
146 146 def hash_check_with_upgrade(self, password, hashed):
147 147 """
148 148 Returns tuple in which first element is boolean that states that
149 149 given password matches it's hashed version, and the second is new hash
150 150 of the password, in case this password should be migrated to new
151 151 cipher.
152 152
153 153 This implements special upgrade logic which works like that:
154 154 - check if the given password == bcrypted hash, if yes then we
155 155 properly used password and it was already in bcrypt. Proceed
156 156 without any changes
157 157 - if bcrypt hash check is not working try with sha256. If hash compare
158 158 is ok, it means we using correct but old hashed password. indicate
159 159 hash change and proceed
160 160 """
161 161
162 162 new_hash = None
163 163
164 164 # regular pw check
165 165 password_match_bcrypt = self.hash_check(password, hashed)
166 166
167 167 # now we want to know if the password was maybe from sha256
168 168 # basically calling _RhodeCodeCryptoSha256().hash_check()
169 169 if not password_match_bcrypt:
170 170 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
171 171 new_hash = self.hash_create(password) # make new bcrypt hash
172 172 password_match_bcrypt = True
173 173
174 174 return password_match_bcrypt, new_hash
175 175
176 176 def hash_check(self, password, hashed):
177 177 """
178 178 Checks matching password with it's hashed value.
179 179
180 180 :param password: password
181 181 :param hashed: password in hashed form
182 182 """
183 183 self._assert_bytes(password)
184 184 try:
185 185 return bcrypt.hashpw(password, hashed) == hashed
186 186 except ValueError as e:
187 187 # we're having a invalid salt here probably, we should not crash
188 188 # just return with False as it would be a wrong password.
189 189 log.debug('Failed to check password hash using bcrypt %s',
190 190 safe_str(e))
191 191
192 192 return False
193 193
194 194
195 195 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
196 196 ENC_PREF = '_'
197 197
198 198 def hash_create(self, str_):
199 199 self._assert_bytes(str_)
200 200 return hashlib.sha256(str_).hexdigest()
201 201
202 202 def hash_check(self, password, hashed):
203 203 """
204 204 Checks matching password with it's hashed value.
205 205
206 206 :param password: password
207 207 :param hashed: password in hashed form
208 208 """
209 209 self._assert_bytes(password)
210 210 return hashlib.sha256(password).hexdigest() == hashed
211 211
212 212
213 213 class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase):
214 214 ENC_PREF = '_'
215 215
216 216 def hash_create(self, str_):
217 217 self._assert_bytes(str_)
218 218 return sha1(str_)
219 219
220 220 def hash_check(self, password, hashed):
221 221 """
222 222 Checks matching password with it's hashed value.
223 223
224 224 :param password: password
225 225 :param hashed: password in hashed form
226 226 """
227 227 self._assert_bytes(password)
228 228 return sha1(password) == hashed
229 229
230 230
231 231 def crypto_backend():
232 232 """
233 233 Return the matching crypto backend.
234 234
235 235 Selection is based on if we run tests or not, we pick sha1-test backend to run
236 236 tests faster since BCRYPT is expensive to calculate
237 237 """
238 238 if rhodecode.is_test:
239 239 RhodeCodeCrypto = _RhodeCodeCryptoTest()
240 240 else:
241 241 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
242 242
243 243 return RhodeCodeCrypto
244 244
245 245
246 246 def get_crypt_password(password):
247 247 """
248 248 Create the hash of `password` with the active crypto backend.
249 249
250 250 :param password: The cleartext password.
251 251 :type password: unicode
252 252 """
253 253 password = safe_str(password)
254 254 return crypto_backend().hash_create(password)
255 255
256 256
257 257 def check_password(password, hashed):
258 258 """
259 259 Check if the value in `password` matches the hash in `hashed`.
260 260
261 261 :param password: The cleartext password.
262 262 :type password: unicode
263 263
264 264 :param hashed: The expected hashed version of the password.
265 265 :type hashed: The hash has to be passed in in text representation.
266 266 """
267 267 password = safe_str(password)
268 268 return crypto_backend().hash_check(password, hashed)
269 269
270 270
271 271 def generate_auth_token(data, salt=None):
272 272 """
273 273 Generates API KEY from given string
274 274 """
275 275
276 276 if salt is None:
277 277 salt = os.urandom(16)
278 278 return hashlib.sha1(safe_str(data) + salt).hexdigest()
279 279
280 280
281 281 def get_came_from(request):
282 282 """
283 283 get query_string+path from request sanitized after removing auth_token
284 284 """
285 285 _req = request
286 286
287 287 path = _req.path
288 288 if 'auth_token' in _req.GET:
289 289 # sanitize the request and remove auth_token for redirection
290 290 _req.GET.pop('auth_token')
291 291 qs = _req.query_string
292 292 if qs:
293 293 path += '?' + qs
294 294
295 295 return path
296 296
297 297
298 298 class CookieStoreWrapper(object):
299 299
300 300 def __init__(self, cookie_store):
301 301 self.cookie_store = cookie_store
302 302
303 303 def __repr__(self):
304 304 return 'CookieStore<%s>' % (self.cookie_store)
305 305
306 306 def get(self, key, other=None):
307 307 if isinstance(self.cookie_store, dict):
308 308 return self.cookie_store.get(key, other)
309 309 elif isinstance(self.cookie_store, AuthUser):
310 310 return self.cookie_store.__dict__.get(key, other)
311 311
312 312
313 313 def _cached_perms_data(user_id, scope, user_is_admin,
314 314 user_inherit_default_permissions, explicit, algo,
315 315 calculate_super_admin):
316 316
317 317 permissions = PermissionCalculator(
318 318 user_id, scope, user_is_admin, user_inherit_default_permissions,
319 319 explicit, algo, calculate_super_admin)
320 320 return permissions.calculate()
321 321
322 322
323 323 class PermOrigin(object):
324 324 SUPER_ADMIN = 'superadmin'
325 ARCHIVED = 'archived'
325 326
326 327 REPO_USER = 'user:%s'
327 328 REPO_USERGROUP = 'usergroup:%s'
328 329 REPO_OWNER = 'repo.owner'
329 330 REPO_DEFAULT = 'repo.default'
330 331 REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit'
331 332 REPO_PRIVATE = 'repo.private'
332 333
333 334 REPOGROUP_USER = 'user:%s'
334 335 REPOGROUP_USERGROUP = 'usergroup:%s'
335 336 REPOGROUP_OWNER = 'group.owner'
336 337 REPOGROUP_DEFAULT = 'group.default'
337 338 REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit'
338 339
339 340 USERGROUP_USER = 'user:%s'
340 341 USERGROUP_USERGROUP = 'usergroup:%s'
341 342 USERGROUP_OWNER = 'usergroup.owner'
342 343 USERGROUP_DEFAULT = 'usergroup.default'
343 344 USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit'
344 345
345 346
346 347 class PermOriginDict(dict):
347 348 """
348 349 A special dict used for tracking permissions along with their origins.
349 350
350 351 `__setitem__` has been overridden to expect a tuple(perm, origin)
351 352 `__getitem__` will return only the perm
352 353 `.perm_origin_stack` will return the stack of (perm, origin) set per key
353 354
354 355 >>> perms = PermOriginDict()
355 356 >>> perms['resource'] = 'read', 'default'
356 357 >>> perms['resource']
357 358 'read'
358 359 >>> perms['resource'] = 'write', 'admin'
359 360 >>> perms['resource']
360 361 'write'
361 362 >>> perms.perm_origin_stack
362 363 {'resource': [('read', 'default'), ('write', 'admin')]}
363 364 """
364 365
365 366 def __init__(self, *args, **kw):
366 367 dict.__init__(self, *args, **kw)
367 368 self.perm_origin_stack = collections.OrderedDict()
368 369
369 370 def __setitem__(self, key, (perm, origin)):
370 371 self.perm_origin_stack.setdefault(key, []).append(
371 372 (perm, origin))
372 373 dict.__setitem__(self, key, perm)
373 374
374 375
375 376 class BranchPermOriginDict(PermOriginDict):
376 377 """
377 378 Dedicated branch permissions dict, with tracking of patterns and origins.
378 379
379 380 >>> perms = BranchPermOriginDict()
380 381 >>> perms['resource'] = '*pattern', 'read', 'default'
381 382 >>> perms['resource']
382 383 {'*pattern': 'read'}
383 384 >>> perms['resource'] = '*pattern', 'write', 'admin'
384 385 >>> perms['resource']
385 386 {'*pattern': 'write'}
386 387 >>> perms.perm_origin_stack
387 388 {'resource': {'*pattern': [('read', 'default'), ('write', 'admin')]}}
388 389 """
389 390 def __setitem__(self, key, (pattern, perm, origin)):
390 391
391 392 self.perm_origin_stack.setdefault(key, {}) \
392 393 .setdefault(pattern, []).append((perm, origin))
393 394
394 395 if key in self:
395 396 self[key].__setitem__(pattern, perm)
396 397 else:
397 398 patterns = collections.OrderedDict()
398 399 patterns[pattern] = perm
399 400 dict.__setitem__(self, key, patterns)
400 401
401 402
402 403 class PermissionCalculator(object):
403 404
404 405 def __init__(
405 406 self, user_id, scope, user_is_admin,
406 407 user_inherit_default_permissions, explicit, algo,
407 408 calculate_super_admin_as_user=False):
408 409
409 410 self.user_id = user_id
410 411 self.user_is_admin = user_is_admin
411 412 self.inherit_default_permissions = user_inherit_default_permissions
412 413 self.explicit = explicit
413 414 self.algo = algo
414 415 self.calculate_super_admin_as_user = calculate_super_admin_as_user
415 416
416 417 scope = scope or {}
417 418 self.scope_repo_id = scope.get('repo_id')
418 419 self.scope_repo_group_id = scope.get('repo_group_id')
419 420 self.scope_user_group_id = scope.get('user_group_id')
420 421
421 422 self.default_user_id = User.get_default_user(cache=True).user_id
422 423
423 424 self.permissions_repositories = PermOriginDict()
424 425 self.permissions_repository_groups = PermOriginDict()
425 426 self.permissions_user_groups = PermOriginDict()
426 427 self.permissions_repository_branches = BranchPermOriginDict()
427 428 self.permissions_global = set()
428 429
429 430 self.default_repo_perms = Permission.get_default_repo_perms(
430 431 self.default_user_id, self.scope_repo_id)
431 432 self.default_repo_groups_perms = Permission.get_default_group_perms(
432 433 self.default_user_id, self.scope_repo_group_id)
433 434 self.default_user_group_perms = \
434 435 Permission.get_default_user_group_perms(
435 436 self.default_user_id, self.scope_user_group_id)
436 437
437 438 # default branch perms
438 439 self.default_branch_repo_perms = \
439 440 Permission.get_default_repo_branch_perms(
440 441 self.default_user_id, self.scope_repo_id)
441 442
442 443 def calculate(self):
443 444 if self.user_is_admin and not self.calculate_super_admin_as_user:
444 445 return self._calculate_admin_permissions()
445 446
446 447 self._calculate_global_default_permissions()
447 448 self._calculate_global_permissions()
448 449 self._calculate_default_permissions()
449 450 self._calculate_repository_permissions()
450 451 self._calculate_repository_branch_permissions()
451 452 self._calculate_repository_group_permissions()
452 453 self._calculate_user_group_permissions()
453 454 return self._permission_structure()
454 455
455 456 def _calculate_admin_permissions(self):
456 457 """
457 458 admin user have all default rights for repositories
458 459 and groups set to admin
459 460 """
460 461 self.permissions_global.add('hg.admin')
461 462 self.permissions_global.add('hg.create.write_on_repogroup.true')
462 463
463 464 # repositories
464 465 for perm in self.default_repo_perms:
465 466 r_k = perm.UserRepoToPerm.repository.repo_name
467 archived = perm.UserRepoToPerm.repository.archived
466 468 p = 'repository.admin'
467 469 self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN
470 # special case for archived repositories, which we block still even for
471 # super admins
472 if archived:
473 p = 'repository.read'
474 self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED
468 475
469 476 # repository groups
470 477 for perm in self.default_repo_groups_perms:
471 478 rg_k = perm.UserRepoGroupToPerm.group.group_name
472 479 p = 'group.admin'
473 480 self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN
474 481
475 482 # user groups
476 483 for perm in self.default_user_group_perms:
477 484 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
478 485 p = 'usergroup.admin'
479 486 self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN
480 487
481 488 # branch permissions
482 489 # since super-admin also can have custom rule permissions
483 490 # we *always* need to calculate those inherited from default, and also explicit
484 491 self._calculate_default_permissions_repository_branches(
485 492 user_inherit_object_permissions=False)
486 493 self._calculate_repository_branch_permissions()
487 494
488 495 return self._permission_structure()
489 496
490 497 def _calculate_global_default_permissions(self):
491 498 """
492 499 global permissions taken from the default user
493 500 """
494 501 default_global_perms = UserToPerm.query()\
495 502 .filter(UserToPerm.user_id == self.default_user_id)\
496 503 .options(joinedload(UserToPerm.permission))
497 504
498 505 for perm in default_global_perms:
499 506 self.permissions_global.add(perm.permission.permission_name)
500 507
501 508 if self.user_is_admin:
502 509 self.permissions_global.add('hg.admin')
503 510 self.permissions_global.add('hg.create.write_on_repogroup.true')
504 511
505 512 def _calculate_global_permissions(self):
506 513 """
507 514 Set global system permissions with user permissions or permissions
508 515 taken from the user groups of the current user.
509 516
510 517 The permissions include repo creating, repo group creating, forking
511 518 etc.
512 519 """
513 520
514 521 # now we read the defined permissions and overwrite what we have set
515 522 # before those can be configured from groups or users explicitly.
516 523
517 524 # In case we want to extend this list we should make sure
518 525 # this is in sync with User.DEFAULT_USER_PERMISSIONS definitions
519 526 _configurable = frozenset([
520 527 'hg.fork.none', 'hg.fork.repository',
521 528 'hg.create.none', 'hg.create.repository',
522 529 'hg.usergroup.create.false', 'hg.usergroup.create.true',
523 530 'hg.repogroup.create.false', 'hg.repogroup.create.true',
524 531 'hg.create.write_on_repogroup.false', 'hg.create.write_on_repogroup.true',
525 532 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
526 533 ])
527 534
528 535 # USER GROUPS comes first user group global permissions
529 536 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
530 537 .options(joinedload(UserGroupToPerm.permission))\
531 538 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
532 539 UserGroupMember.users_group_id))\
533 540 .filter(UserGroupMember.user_id == self.user_id)\
534 541 .order_by(UserGroupToPerm.users_group_id)\
535 542 .all()
536 543
537 544 # need to group here by groups since user can be in more than
538 545 # one group, so we get all groups
539 546 _explicit_grouped_perms = [
540 547 [x, list(y)] for x, y in
541 548 itertools.groupby(user_perms_from_users_groups,
542 549 lambda _x: _x.users_group)]
543 550
544 551 for gr, perms in _explicit_grouped_perms:
545 552 # since user can be in multiple groups iterate over them and
546 553 # select the lowest permissions first (more explicit)
547 554 # TODO(marcink): do this^^
548 555
549 556 # group doesn't inherit default permissions so we actually set them
550 557 if not gr.inherit_default_permissions:
551 558 # NEED TO IGNORE all previously set configurable permissions
552 559 # and replace them with explicitly set from this user
553 560 # group permissions
554 561 self.permissions_global = self.permissions_global.difference(
555 562 _configurable)
556 563 for perm in perms:
557 564 self.permissions_global.add(perm.permission.permission_name)
558 565
559 566 # user explicit global permissions
560 567 user_perms = Session().query(UserToPerm)\
561 568 .options(joinedload(UserToPerm.permission))\
562 569 .filter(UserToPerm.user_id == self.user_id).all()
563 570
564 571 if not self.inherit_default_permissions:
565 572 # NEED TO IGNORE all configurable permissions and
566 573 # replace them with explicitly set from this user permissions
567 574 self.permissions_global = self.permissions_global.difference(
568 575 _configurable)
569 576 for perm in user_perms:
570 577 self.permissions_global.add(perm.permission.permission_name)
571 578
572 579 def _calculate_default_permissions_repositories(self, user_inherit_object_permissions):
573 580 for perm in self.default_repo_perms:
574 581 r_k = perm.UserRepoToPerm.repository.repo_name
582 archived = perm.UserRepoToPerm.repository.archived
575 583 p = perm.Permission.permission_name
576 584 o = PermOrigin.REPO_DEFAULT
577 585 self.permissions_repositories[r_k] = p, o
578 586
579 587 # if we decide this user isn't inheriting permissions from
580 588 # default user we set him to .none so only explicit
581 589 # permissions work
582 590 if not user_inherit_object_permissions:
583 591 p = 'repository.none'
584 592 o = PermOrigin.REPO_DEFAULT_NO_INHERIT
585 593 self.permissions_repositories[r_k] = p, o
586 594
587 595 if perm.Repository.private and not (
588 596 perm.Repository.user_id == self.user_id):
589 597 # disable defaults for private repos,
590 598 p = 'repository.none'
591 599 o = PermOrigin.REPO_PRIVATE
592 600 self.permissions_repositories[r_k] = p, o
593 601
594 602 elif perm.Repository.user_id == self.user_id:
595 603 # set admin if owner
596 604 p = 'repository.admin'
597 605 o = PermOrigin.REPO_OWNER
598 606 self.permissions_repositories[r_k] = p, o
599 607
600 608 if self.user_is_admin:
601 609 p = 'repository.admin'
602 610 o = PermOrigin.SUPER_ADMIN
603 611 self.permissions_repositories[r_k] = p, o
604 612
613 # finally in case of archived repositories, we downgrade higher
614 # permissions to read
615 if archived:
616 current_perm = self.permissions_repositories[r_k]
617 if current_perm in ['repository.write', 'repository.admin']:
618 p = 'repository.read'
619 o = PermOrigin.ARCHIVED
620 self.permissions_repositories[r_k] = p, o
621
605 622 def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions):
606 623 for perm in self.default_branch_repo_perms:
607 624
608 625 r_k = perm.UserRepoToPerm.repository.repo_name
609 626 p = perm.Permission.permission_name
610 627 pattern = perm.UserToRepoBranchPermission.branch_pattern
611 628 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
612 629
613 630 if not self.explicit:
614 631 # TODO(marcink): fix this for multiple entries
615 632 cur_perm = self.permissions_repository_branches.get(r_k) or 'branch.none'
616 633 p = self._choose_permission(p, cur_perm)
617 634
618 635 # NOTE(marcink): register all pattern/perm instances in this
619 636 # special dict that aggregates entries
620 637 self.permissions_repository_branches[r_k] = pattern, p, o
621 638
622 639 def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions):
623 640 for perm in self.default_repo_groups_perms:
624 641 rg_k = perm.UserRepoGroupToPerm.group.group_name
625 642 p = perm.Permission.permission_name
626 643 o = PermOrigin.REPOGROUP_DEFAULT
627 644 self.permissions_repository_groups[rg_k] = p, o
628 645
629 646 # if we decide this user isn't inheriting permissions from default
630 647 # user we set him to .none so only explicit permissions work
631 648 if not user_inherit_object_permissions:
632 649 p = 'group.none'
633 650 o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT
634 651 self.permissions_repository_groups[rg_k] = p, o
635 652
636 653 if perm.RepoGroup.user_id == self.user_id:
637 654 # set admin if owner
638 655 p = 'group.admin'
639 656 o = PermOrigin.REPOGROUP_OWNER
640 657 self.permissions_repository_groups[rg_k] = p, o
641 658
642 659 if self.user_is_admin:
643 660 p = 'group.admin'
644 661 o = PermOrigin.SUPER_ADMIN
645 662 self.permissions_repository_groups[rg_k] = p, o
646 663
647 664 def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions):
648 665 for perm in self.default_user_group_perms:
649 666 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
650 667 p = perm.Permission.permission_name
651 668 o = PermOrigin.USERGROUP_DEFAULT
652 669 self.permissions_user_groups[u_k] = p, o
653 670
654 671 # if we decide this user isn't inheriting permissions from default
655 672 # user we set him to .none so only explicit permissions work
656 673 if not user_inherit_object_permissions:
657 674 p = 'usergroup.none'
658 675 o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT
659 676 self.permissions_user_groups[u_k] = p, o
660 677
661 678 if perm.UserGroup.user_id == self.user_id:
662 679 # set admin if owner
663 680 p = 'usergroup.admin'
664 681 o = PermOrigin.USERGROUP_OWNER
665 682 self.permissions_user_groups[u_k] = p, o
666 683
667 684 if self.user_is_admin:
668 685 p = 'usergroup.admin'
669 686 o = PermOrigin.SUPER_ADMIN
670 687 self.permissions_user_groups[u_k] = p, o
671 688
672 689 def _calculate_default_permissions(self):
673 690 """
674 691 Set default user permissions for repositories, repository branches,
675 692 repository groups, user groups taken from the default user.
676 693
677 694 Calculate inheritance of object permissions based on what we have now
678 695 in GLOBAL permissions. We check if .false is in GLOBAL since this is
679 696 explicitly set. Inherit is the opposite of .false being there.
680 697
681 698 .. note::
682 699
683 700 the syntax is little bit odd but what we need to check here is
684 701 the opposite of .false permission being in the list so even for
685 702 inconsistent state when both .true/.false is there
686 703 .false is more important
687 704
688 705 """
689 706 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
690 707 in self.permissions_global)
691 708
692 709 # default permissions inherited from `default` user permissions
693 710 self._calculate_default_permissions_repositories(
694 711 user_inherit_object_permissions)
695 712
696 713 self._calculate_default_permissions_repository_branches(
697 714 user_inherit_object_permissions)
698 715
699 716 self._calculate_default_permissions_repository_groups(
700 717 user_inherit_object_permissions)
701 718
702 719 self._calculate_default_permissions_user_groups(
703 720 user_inherit_object_permissions)
704 721
705 722 def _calculate_repository_permissions(self):
706 723 """
707 724 Repository permissions for the current user.
708 725
709 726 Check if the user is part of user groups for this repository and
710 727 fill in the permission from it. `_choose_permission` decides of which
711 728 permission should be selected based on selected method.
712 729 """
713 730
714 731 # user group for repositories permissions
715 732 user_repo_perms_from_user_group = Permission\
716 733 .get_default_repo_perms_from_user_group(
717 734 self.user_id, self.scope_repo_id)
718 735
719 736 multiple_counter = collections.defaultdict(int)
720 737 for perm in user_repo_perms_from_user_group:
721 738 r_k = perm.UserGroupRepoToPerm.repository.repo_name
722 739 multiple_counter[r_k] += 1
723 740 p = perm.Permission.permission_name
724 741 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
725 742 .users_group.users_group_name
726 743
727 744 if multiple_counter[r_k] > 1:
728 745 cur_perm = self.permissions_repositories[r_k]
729 746 p = self._choose_permission(p, cur_perm)
730 747
731 748 self.permissions_repositories[r_k] = p, o
732 749
733 750 if perm.Repository.user_id == self.user_id:
734 751 # set admin if owner
735 752 p = 'repository.admin'
736 753 o = PermOrigin.REPO_OWNER
737 754 self.permissions_repositories[r_k] = p, o
738 755
739 756 if self.user_is_admin:
740 757 p = 'repository.admin'
741 758 o = PermOrigin.SUPER_ADMIN
742 759 self.permissions_repositories[r_k] = p, o
743 760
744 761 # user explicit permissions for repositories, overrides any specified
745 762 # by the group permission
746 763 user_repo_perms = Permission.get_default_repo_perms(
747 764 self.user_id, self.scope_repo_id)
748 765 for perm in user_repo_perms:
749 766 r_k = perm.UserRepoToPerm.repository.repo_name
750 767 p = perm.Permission.permission_name
751 768 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
752 769
753 770 if not self.explicit:
754 771 cur_perm = self.permissions_repositories.get(
755 772 r_k, 'repository.none')
756 773 p = self._choose_permission(p, cur_perm)
757 774
758 775 self.permissions_repositories[r_k] = p, o
759 776
760 777 if perm.Repository.user_id == self.user_id:
761 778 # set admin if owner
762 779 p = 'repository.admin'
763 780 o = PermOrigin.REPO_OWNER
764 781 self.permissions_repositories[r_k] = p, o
765 782
766 783 if self.user_is_admin:
767 784 p = 'repository.admin'
768 785 o = PermOrigin.SUPER_ADMIN
769 786 self.permissions_repositories[r_k] = p, o
770 787
771 788 def _calculate_repository_branch_permissions(self):
772 789 # user group for repositories permissions
773 790 user_repo_branch_perms_from_user_group = Permission\
774 791 .get_default_repo_branch_perms_from_user_group(
775 792 self.user_id, self.scope_repo_id)
776 793
777 794 multiple_counter = collections.defaultdict(int)
778 795 for perm in user_repo_branch_perms_from_user_group:
779 796 r_k = perm.UserGroupRepoToPerm.repository.repo_name
780 797 p = perm.Permission.permission_name
781 798 pattern = perm.UserGroupToRepoBranchPermission.branch_pattern
782 799 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
783 800 .users_group.users_group_name
784 801
785 802 multiple_counter[r_k] += 1
786 803 if multiple_counter[r_k] > 1:
787 804 # TODO(marcink): fix this for multi branch support, and multiple entries
788 805 cur_perm = self.permissions_repository_branches[r_k]
789 806 p = self._choose_permission(p, cur_perm)
790 807
791 808 self.permissions_repository_branches[r_k] = pattern, p, o
792 809
793 810 # user explicit branch permissions for repositories, overrides
794 811 # any specified by the group permission
795 812 user_repo_branch_perms = Permission.get_default_repo_branch_perms(
796 813 self.user_id, self.scope_repo_id)
797 814
798 815 for perm in user_repo_branch_perms:
799 816
800 817 r_k = perm.UserRepoToPerm.repository.repo_name
801 818 p = perm.Permission.permission_name
802 819 pattern = perm.UserToRepoBranchPermission.branch_pattern
803 820 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
804 821
805 822 if not self.explicit:
806 823 # TODO(marcink): fix this for multiple entries
807 824 cur_perm = self.permissions_repository_branches.get(r_k) or 'branch.none'
808 825 p = self._choose_permission(p, cur_perm)
809 826
810 827 # NOTE(marcink): register all pattern/perm instances in this
811 828 # special dict that aggregates entries
812 829 self.permissions_repository_branches[r_k] = pattern, p, o
813 830
814 831 def _calculate_repository_group_permissions(self):
815 832 """
816 833 Repository group permissions for the current user.
817 834
818 835 Check if the user is part of user groups for repository groups and
819 836 fill in the permissions from it. `_choose_permission` decides of which
820 837 permission should be selected based on selected method.
821 838 """
822 839 # user group for repo groups permissions
823 840 user_repo_group_perms_from_user_group = Permission\
824 841 .get_default_group_perms_from_user_group(
825 842 self.user_id, self.scope_repo_group_id)
826 843
827 844 multiple_counter = collections.defaultdict(int)
828 845 for perm in user_repo_group_perms_from_user_group:
829 846 rg_k = perm.UserGroupRepoGroupToPerm.group.group_name
830 847 multiple_counter[rg_k] += 1
831 848 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\
832 849 .users_group.users_group_name
833 850 p = perm.Permission.permission_name
834 851
835 852 if multiple_counter[rg_k] > 1:
836 853 cur_perm = self.permissions_repository_groups[rg_k]
837 854 p = self._choose_permission(p, cur_perm)
838 855 self.permissions_repository_groups[rg_k] = p, o
839 856
840 857 if perm.RepoGroup.user_id == self.user_id:
841 858 # set admin if owner, even for member of other user group
842 859 p = 'group.admin'
843 860 o = PermOrigin.REPOGROUP_OWNER
844 861 self.permissions_repository_groups[rg_k] = p, o
845 862
846 863 if self.user_is_admin:
847 864 p = 'group.admin'
848 865 o = PermOrigin.SUPER_ADMIN
849 866 self.permissions_repository_groups[rg_k] = p, o
850 867
851 868 # user explicit permissions for repository groups
852 869 user_repo_groups_perms = Permission.get_default_group_perms(
853 870 self.user_id, self.scope_repo_group_id)
854 871 for perm in user_repo_groups_perms:
855 872 rg_k = perm.UserRepoGroupToPerm.group.group_name
856 873 o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\
857 874 .user.username
858 875 p = perm.Permission.permission_name
859 876
860 877 if not self.explicit:
861 878 cur_perm = self.permissions_repository_groups.get(
862 879 rg_k, 'group.none')
863 880 p = self._choose_permission(p, cur_perm)
864 881
865 882 self.permissions_repository_groups[rg_k] = p, o
866 883
867 884 if perm.RepoGroup.user_id == self.user_id:
868 885 # set admin if owner
869 886 p = 'group.admin'
870 887 o = PermOrigin.REPOGROUP_OWNER
871 888 self.permissions_repository_groups[rg_k] = p, o
872 889
873 890 if self.user_is_admin:
874 891 p = 'group.admin'
875 892 o = PermOrigin.SUPER_ADMIN
876 893 self.permissions_repository_groups[rg_k] = p, o
877 894
878 895 def _calculate_user_group_permissions(self):
879 896 """
880 897 User group permissions for the current user.
881 898 """
882 899 # user group for user group permissions
883 900 user_group_from_user_group = Permission\
884 901 .get_default_user_group_perms_from_user_group(
885 902 self.user_id, self.scope_user_group_id)
886 903
887 904 multiple_counter = collections.defaultdict(int)
888 905 for perm in user_group_from_user_group:
889 906 ug_k = perm.UserGroupUserGroupToPerm\
890 907 .target_user_group.users_group_name
891 908 multiple_counter[ug_k] += 1
892 909 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\
893 910 .user_group.users_group_name
894 911 p = perm.Permission.permission_name
895 912
896 913 if multiple_counter[ug_k] > 1:
897 914 cur_perm = self.permissions_user_groups[ug_k]
898 915 p = self._choose_permission(p, cur_perm)
899 916
900 917 self.permissions_user_groups[ug_k] = p, o
901 918
902 919 if perm.UserGroup.user_id == self.user_id:
903 920 # set admin if owner, even for member of other user group
904 921 p = 'usergroup.admin'
905 922 o = PermOrigin.USERGROUP_OWNER
906 923 self.permissions_user_groups[ug_k] = p, o
907 924
908 925 if self.user_is_admin:
909 926 p = 'usergroup.admin'
910 927 o = PermOrigin.SUPER_ADMIN
911 928 self.permissions_user_groups[ug_k] = p, o
912 929
913 930 # user explicit permission for user groups
914 931 user_user_groups_perms = Permission.get_default_user_group_perms(
915 932 self.user_id, self.scope_user_group_id)
916 933 for perm in user_user_groups_perms:
917 934 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
918 935 o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\
919 936 .user.username
920 937 p = perm.Permission.permission_name
921 938
922 939 if not self.explicit:
923 940 cur_perm = self.permissions_user_groups.get(
924 941 ug_k, 'usergroup.none')
925 942 p = self._choose_permission(p, cur_perm)
926 943
927 944 self.permissions_user_groups[ug_k] = p, o
928 945
929 946 if perm.UserGroup.user_id == self.user_id:
930 947 # set admin if owner
931 948 p = 'usergroup.admin'
932 949 o = PermOrigin.USERGROUP_OWNER
933 950 self.permissions_user_groups[ug_k] = p, o
934 951
935 952 if self.user_is_admin:
936 953 p = 'usergroup.admin'
937 954 o = PermOrigin.SUPER_ADMIN
938 955 self.permissions_user_groups[ug_k] = p, o
939 956
940 957 def _choose_permission(self, new_perm, cur_perm):
941 958 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
942 959 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
943 960 if self.algo == 'higherwin':
944 961 if new_perm_val > cur_perm_val:
945 962 return new_perm
946 963 return cur_perm
947 964 elif self.algo == 'lowerwin':
948 965 if new_perm_val < cur_perm_val:
949 966 return new_perm
950 967 return cur_perm
951 968
952 969 def _permission_structure(self):
953 970 return {
954 971 'global': self.permissions_global,
955 972 'repositories': self.permissions_repositories,
956 973 'repository_branches': self.permissions_repository_branches,
957 974 'repositories_groups': self.permissions_repository_groups,
958 975 'user_groups': self.permissions_user_groups,
959 976 }
960 977
961 978
962 979 def allowed_auth_token_access(view_name, auth_token, whitelist=None):
963 980 """
964 981 Check if given controller_name is in whitelist of auth token access
965 982 """
966 983 if not whitelist:
967 984 from rhodecode import CONFIG
968 985 whitelist = aslist(
969 986 CONFIG.get('api_access_controllers_whitelist'), sep=',')
970 987 # backward compat translation
971 988 compat = {
972 989 # old controller, new VIEW
973 990 'ChangesetController:*': 'RepoCommitsView:*',
974 991 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch',
975 992 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw',
976 993 'FilesController:raw': 'RepoCommitsView:repo_commit_raw',
977 994 'FilesController:archivefile': 'RepoFilesView:repo_archivefile',
978 995 'GistsController:*': 'GistView:*',
979 996 }
980 997
981 998 log.debug(
982 999 'Allowed views for AUTH TOKEN access: %s', whitelist)
983 1000 auth_token_access_valid = False
984 1001
985 1002 for entry in whitelist:
986 1003 token_match = True
987 1004 if entry in compat:
988 1005 # translate from old Controllers to Pyramid Views
989 1006 entry = compat[entry]
990 1007
991 1008 if '@' in entry:
992 1009 # specific AuthToken
993 1010 entry, allowed_token = entry.split('@', 1)
994 1011 token_match = auth_token == allowed_token
995 1012
996 1013 if fnmatch.fnmatch(view_name, entry) and token_match:
997 1014 auth_token_access_valid = True
998 1015 break
999 1016
1000 1017 if auth_token_access_valid:
1001 1018 log.debug('view: `%s` matches entry in whitelist: %s',
1002 1019 view_name, whitelist)
1003 1020
1004 1021 else:
1005 1022 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
1006 1023 % (view_name, whitelist))
1007 1024 if auth_token:
1008 1025 # if we use auth token key and don't have access it's a warning
1009 1026 log.warning(msg)
1010 1027 else:
1011 1028 log.debug(msg)
1012 1029
1013 1030 return auth_token_access_valid
1014 1031
1015 1032
1016 1033 class AuthUser(object):
1017 1034 """
1018 1035 A simple object that handles all attributes of user in RhodeCode
1019 1036
1020 1037 It does lookup based on API key,given user, or user present in session
1021 1038 Then it fills all required information for such user. It also checks if
1022 1039 anonymous access is enabled and if so, it returns default user as logged in
1023 1040 """
1024 1041 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
1025 1042
1026 1043 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
1027 1044
1028 1045 self.user_id = user_id
1029 1046 self._api_key = api_key
1030 1047
1031 1048 self.api_key = None
1032 1049 self.username = username
1033 1050 self.ip_addr = ip_addr
1034 1051 self.name = ''
1035 1052 self.lastname = ''
1036 1053 self.first_name = ''
1037 1054 self.last_name = ''
1038 1055 self.email = ''
1039 1056 self.is_authenticated = False
1040 1057 self.admin = False
1041 1058 self.inherit_default_permissions = False
1042 1059 self.password = ''
1043 1060
1044 1061 self.anonymous_user = None # propagated on propagate_data
1045 1062 self.propagate_data()
1046 1063 self._instance = None
1047 1064 self._permissions_scoped_cache = {} # used to bind scoped calculation
1048 1065
1049 1066 @LazyProperty
1050 1067 def permissions(self):
1051 1068 return self.get_perms(user=self, cache=None)
1052 1069
1053 1070 @LazyProperty
1054 1071 def permissions_safe(self):
1055 1072 """
1056 1073 Filtered permissions excluding not allowed repositories
1057 1074 """
1058 1075 perms = self.get_perms(user=self, cache=None)
1059 1076
1060 1077 perms['repositories'] = {
1061 1078 k: v for k, v in perms['repositories'].items()
1062 1079 if v != 'repository.none'}
1063 1080 perms['repositories_groups'] = {
1064 1081 k: v for k, v in perms['repositories_groups'].items()
1065 1082 if v != 'group.none'}
1066 1083 perms['user_groups'] = {
1067 1084 k: v for k, v in perms['user_groups'].items()
1068 1085 if v != 'usergroup.none'}
1069 1086 perms['repository_branches'] = {
1070 1087 k: v for k, v in perms['repository_branches'].iteritems()
1071 1088 if v != 'branch.none'}
1072 1089 return perms
1073 1090
1074 1091 @LazyProperty
1075 1092 def permissions_full_details(self):
1076 1093 return self.get_perms(
1077 1094 user=self, cache=None, calculate_super_admin=True)
1078 1095
1079 1096 def permissions_with_scope(self, scope):
1080 1097 """
1081 1098 Call the get_perms function with scoped data. The scope in that function
1082 1099 narrows the SQL calls to the given ID of objects resulting in fetching
1083 1100 Just particular permission we want to obtain. If scope is an empty dict
1084 1101 then it basically narrows the scope to GLOBAL permissions only.
1085 1102
1086 1103 :param scope: dict
1087 1104 """
1088 1105 if 'repo_name' in scope:
1089 1106 obj = Repository.get_by_repo_name(scope['repo_name'])
1090 1107 if obj:
1091 1108 scope['repo_id'] = obj.repo_id
1092 1109 _scope = collections.OrderedDict()
1093 1110 _scope['repo_id'] = -1
1094 1111 _scope['user_group_id'] = -1
1095 1112 _scope['repo_group_id'] = -1
1096 1113
1097 1114 for k in sorted(scope.keys()):
1098 1115 _scope[k] = scope[k]
1099 1116
1100 1117 # store in cache to mimic how the @LazyProperty works,
1101 1118 # the difference here is that we use the unique key calculated
1102 1119 # from params and values
1103 1120 return self.get_perms(user=self, cache=None, scope=_scope)
1104 1121
1105 1122 def get_instance(self):
1106 1123 return User.get(self.user_id)
1107 1124
1108 1125 def propagate_data(self):
1109 1126 """
1110 1127 Fills in user data and propagates values to this instance. Maps fetched
1111 1128 user attributes to this class instance attributes
1112 1129 """
1113 1130 log.debug('AuthUser: starting data propagation for new potential user')
1114 1131 user_model = UserModel()
1115 1132 anon_user = self.anonymous_user = User.get_default_user(cache=True)
1116 1133 is_user_loaded = False
1117 1134
1118 1135 # lookup by userid
1119 1136 if self.user_id is not None and self.user_id != anon_user.user_id:
1120 1137 log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id)
1121 1138 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
1122 1139
1123 1140 # try go get user by api key
1124 1141 elif self._api_key and self._api_key != anon_user.api_key:
1125 1142 log.debug('Trying Auth User lookup by API KEY: `%s`', self._api_key)
1126 1143 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
1127 1144
1128 1145 # lookup by username
1129 1146 elif self.username:
1130 1147 log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username)
1131 1148 is_user_loaded = user_model.fill_data(self, username=self.username)
1132 1149 else:
1133 1150 log.debug('No data in %s that could been used to log in', self)
1134 1151
1135 1152 if not is_user_loaded:
1136 1153 log.debug(
1137 1154 'Failed to load user. Fallback to default user %s', anon_user)
1138 1155 # if we cannot authenticate user try anonymous
1139 1156 if anon_user.active:
1140 1157 log.debug('default user is active, using it as a session user')
1141 1158 user_model.fill_data(self, user_id=anon_user.user_id)
1142 1159 # then we set this user is logged in
1143 1160 self.is_authenticated = True
1144 1161 else:
1145 1162 log.debug('default user is NOT active')
1146 1163 # in case of disabled anonymous user we reset some of the
1147 1164 # parameters so such user is "corrupted", skipping the fill_data
1148 1165 for attr in ['user_id', 'username', 'admin', 'active']:
1149 1166 setattr(self, attr, None)
1150 1167 self.is_authenticated = False
1151 1168
1152 1169 if not self.username:
1153 1170 self.username = 'None'
1154 1171
1155 1172 log.debug('AuthUser: propagated user is now %s', self)
1156 1173
1157 1174 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
1158 1175 calculate_super_admin=False, cache=None):
1159 1176 """
1160 1177 Fills user permission attribute with permissions taken from database
1161 1178 works for permissions given for repositories, and for permissions that
1162 1179 are granted to groups
1163 1180
1164 1181 :param user: instance of User object from database
1165 1182 :param explicit: In case there are permissions both for user and a group
1166 1183 that user is part of, explicit flag will defiine if user will
1167 1184 explicitly override permissions from group, if it's False it will
1168 1185 make decision based on the algo
1169 1186 :param algo: algorithm to decide what permission should be choose if
1170 1187 it's multiple defined, eg user in two different groups. It also
1171 1188 decides if explicit flag is turned off how to specify the permission
1172 1189 for case when user is in a group + have defined separate permission
1173 1190 :param calculate_super_admin: calculate permissions for super-admin in the
1174 1191 same way as for regular user without speedups
1175 1192 :param cache: Use caching for calculation, None = let the cache backend decide
1176 1193 """
1177 1194 user_id = user.user_id
1178 1195 user_is_admin = user.is_admin
1179 1196
1180 1197 # inheritance of global permissions like create repo/fork repo etc
1181 1198 user_inherit_default_permissions = user.inherit_default_permissions
1182 1199
1183 1200 cache_seconds = safe_int(
1184 1201 rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time'))
1185 1202
1186 1203 if cache is None:
1187 1204 # let the backend cache decide
1188 1205 cache_on = cache_seconds > 0
1189 1206 else:
1190 1207 cache_on = cache
1191 1208
1192 1209 log.debug(
1193 1210 'Computing PERMISSION tree for user %s scope `%s` '
1194 1211 'with caching: %s[TTL: %ss]', user, scope, cache_on, cache_seconds or 0)
1195 1212
1196 1213 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
1197 1214 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1198 1215
1199 1216 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
1200 1217 condition=cache_on)
1201 1218 def compute_perm_tree(cache_name,
1202 1219 user_id, scope, user_is_admin,user_inherit_default_permissions,
1203 1220 explicit, algo, calculate_super_admin):
1204 1221 return _cached_perms_data(
1205 1222 user_id, scope, user_is_admin, user_inherit_default_permissions,
1206 1223 explicit, algo, calculate_super_admin)
1207 1224
1208 1225 start = time.time()
1209 1226 result = compute_perm_tree(
1210 1227 'permissions', user_id, scope, user_is_admin,
1211 1228 user_inherit_default_permissions, explicit, algo,
1212 1229 calculate_super_admin)
1213 1230
1214 1231 result_repr = []
1215 1232 for k in result:
1216 1233 result_repr.append((k, len(result[k])))
1217 1234 total = time.time() - start
1218 1235 log.debug('PERMISSION tree for user %s computed in %.3fs: %s',
1219 1236 user, total, result_repr)
1220 1237
1221 1238 return result
1222 1239
1223 1240 @property
1224 1241 def is_default(self):
1225 1242 return self.username == User.DEFAULT_USER
1226 1243
1227 1244 @property
1228 1245 def is_admin(self):
1229 1246 return self.admin
1230 1247
1231 1248 @property
1232 1249 def is_user_object(self):
1233 1250 return self.user_id is not None
1234 1251
1235 1252 @property
1236 1253 def repositories_admin(self):
1237 1254 """
1238 1255 Returns list of repositories you're an admin of
1239 1256 """
1240 1257 return [
1241 1258 x[0] for x in self.permissions['repositories'].items()
1242 1259 if x[1] == 'repository.admin']
1243 1260
1244 1261 @property
1245 1262 def repository_groups_admin(self):
1246 1263 """
1247 1264 Returns list of repository groups you're an admin of
1248 1265 """
1249 1266 return [
1250 1267 x[0] for x in self.permissions['repositories_groups'].items()
1251 1268 if x[1] == 'group.admin']
1252 1269
1253 1270 @property
1254 1271 def user_groups_admin(self):
1255 1272 """
1256 1273 Returns list of user groups you're an admin of
1257 1274 """
1258 1275 return [
1259 1276 x[0] for x in self.permissions['user_groups'].items()
1260 1277 if x[1] == 'usergroup.admin']
1261 1278
1262 1279 def repo_acl_ids(self, perms=None, name_filter=None, cache=False):
1263 1280 """
1264 1281 Returns list of repository ids that user have access to based on given
1265 1282 perms. The cache flag should be only used in cases that are used for
1266 1283 display purposes, NOT IN ANY CASE for permission checks.
1267 1284 """
1268 1285 from rhodecode.model.scm import RepoList
1269 1286 if not perms:
1270 1287 perms = [
1271 1288 'repository.read', 'repository.write', 'repository.admin']
1272 1289
1273 1290 def _cached_repo_acl(user_id, perm_def, _name_filter):
1274 1291 qry = Repository.query()
1275 1292 if _name_filter:
1276 1293 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1277 1294 qry = qry.filter(
1278 1295 Repository.repo_name.ilike(ilike_expression))
1279 1296
1280 1297 return [x.repo_id for x in
1281 1298 RepoList(qry, perm_set=perm_def)]
1282 1299
1283 1300 return _cached_repo_acl(self.user_id, perms, name_filter)
1284 1301
1285 1302 def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1286 1303 """
1287 1304 Returns list of repository group ids that user have access to based on given
1288 1305 perms. The cache flag should be only used in cases that are used for
1289 1306 display purposes, NOT IN ANY CASE for permission checks.
1290 1307 """
1291 1308 from rhodecode.model.scm import RepoGroupList
1292 1309 if not perms:
1293 1310 perms = [
1294 1311 'group.read', 'group.write', 'group.admin']
1295 1312
1296 1313 def _cached_repo_group_acl(user_id, perm_def, _name_filter):
1297 1314 qry = RepoGroup.query()
1298 1315 if _name_filter:
1299 1316 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1300 1317 qry = qry.filter(
1301 1318 RepoGroup.group_name.ilike(ilike_expression))
1302 1319
1303 1320 return [x.group_id for x in
1304 1321 RepoGroupList(qry, perm_set=perm_def)]
1305 1322
1306 1323 return _cached_repo_group_acl(self.user_id, perms, name_filter)
1307 1324
1308 1325 def user_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1309 1326 """
1310 1327 Returns list of user group ids that user have access to based on given
1311 1328 perms. The cache flag should be only used in cases that are used for
1312 1329 display purposes, NOT IN ANY CASE for permission checks.
1313 1330 """
1314 1331 from rhodecode.model.scm import UserGroupList
1315 1332 if not perms:
1316 1333 perms = [
1317 1334 'usergroup.read', 'usergroup.write', 'usergroup.admin']
1318 1335
1319 1336 def _cached_user_group_acl(user_id, perm_def, name_filter):
1320 1337 qry = UserGroup.query()
1321 1338 if name_filter:
1322 1339 ilike_expression = u'%{}%'.format(safe_unicode(name_filter))
1323 1340 qry = qry.filter(
1324 1341 UserGroup.users_group_name.ilike(ilike_expression))
1325 1342
1326 1343 return [x.users_group_id for x in
1327 1344 UserGroupList(qry, perm_set=perm_def)]
1328 1345
1329 1346 return _cached_user_group_acl(self.user_id, perms, name_filter)
1330 1347
1331 1348 @property
1332 1349 def ip_allowed(self):
1333 1350 """
1334 1351 Checks if ip_addr used in constructor is allowed from defined list of
1335 1352 allowed ip_addresses for user
1336 1353
1337 1354 :returns: boolean, True if ip is in allowed ip range
1338 1355 """
1339 1356 # check IP
1340 1357 inherit = self.inherit_default_permissions
1341 1358 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
1342 1359 inherit_from_default=inherit)
1343 1360 @property
1344 1361 def personal_repo_group(self):
1345 1362 return RepoGroup.get_user_personal_repo_group(self.user_id)
1346 1363
1347 1364 @LazyProperty
1348 1365 def feed_token(self):
1349 1366 return self.get_instance().feed_token
1350 1367
1351 1368 @classmethod
1352 1369 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1353 1370 allowed_ips = AuthUser.get_allowed_ips(
1354 1371 user_id, cache=True, inherit_from_default=inherit_from_default)
1355 1372 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1356 1373 log.debug('IP:%s for user %s is in range of %s',
1357 1374 ip_addr, user_id, allowed_ips)
1358 1375 return True
1359 1376 else:
1360 1377 log.info('Access for IP:%s forbidden for user %s, '
1361 1378 'not in %s', ip_addr, user_id, allowed_ips)
1362 1379 return False
1363 1380
1364 1381 def get_branch_permissions(self, repo_name, perms=None):
1365 1382 perms = perms or self.permissions_with_scope({'repo_name': repo_name})
1366 1383 branch_perms = perms.get('repository_branches', {})
1367 1384 if not branch_perms:
1368 1385 return {}
1369 1386 repo_branch_perms = branch_perms.get(repo_name)
1370 1387 return repo_branch_perms or {}
1371 1388
1372 1389 def get_rule_and_branch_permission(self, repo_name, branch_name):
1373 1390 """
1374 1391 Check if this AuthUser has defined any permissions for branches. If any of
1375 1392 the rules match in order, we return the matching permissions
1376 1393 """
1377 1394
1378 1395 rule = default_perm = ''
1379 1396
1380 1397 repo_branch_perms = self.get_branch_permissions(repo_name=repo_name)
1381 1398 if not repo_branch_perms:
1382 1399 return rule, default_perm
1383 1400
1384 1401 # now calculate the permissions
1385 1402 for pattern, branch_perm in repo_branch_perms.items():
1386 1403 if fnmatch.fnmatch(branch_name, pattern):
1387 1404 rule = '`{}`=>{}'.format(pattern, branch_perm)
1388 1405 return rule, branch_perm
1389 1406
1390 1407 return rule, default_perm
1391 1408
1392 1409 def __repr__(self):
1393 1410 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1394 1411 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1395 1412
1396 1413 def set_authenticated(self, authenticated=True):
1397 1414 if self.user_id != self.anonymous_user.user_id:
1398 1415 self.is_authenticated = authenticated
1399 1416
1400 1417 def get_cookie_store(self):
1401 1418 return {
1402 1419 'username': self.username,
1403 1420 'password': md5(self.password or ''),
1404 1421 'user_id': self.user_id,
1405 1422 'is_authenticated': self.is_authenticated
1406 1423 }
1407 1424
1408 1425 @classmethod
1409 1426 def from_cookie_store(cls, cookie_store):
1410 1427 """
1411 1428 Creates AuthUser from a cookie store
1412 1429
1413 1430 :param cls:
1414 1431 :param cookie_store:
1415 1432 """
1416 1433 user_id = cookie_store.get('user_id')
1417 1434 username = cookie_store.get('username')
1418 1435 api_key = cookie_store.get('api_key')
1419 1436 return AuthUser(user_id, api_key, username)
1420 1437
1421 1438 @classmethod
1422 1439 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1423 1440 _set = set()
1424 1441
1425 1442 if inherit_from_default:
1426 1443 def_user_id = User.get_default_user(cache=True).user_id
1427 1444 default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id)
1428 1445 if cache:
1429 1446 default_ips = default_ips.options(
1430 1447 FromCache("sql_cache_short", "get_user_ips_default"))
1431 1448
1432 1449 # populate from default user
1433 1450 for ip in default_ips:
1434 1451 try:
1435 1452 _set.add(ip.ip_addr)
1436 1453 except ObjectDeletedError:
1437 1454 # since we use heavy caching sometimes it happens that
1438 1455 # we get deleted objects here, we just skip them
1439 1456 pass
1440 1457
1441 1458 # NOTE:(marcink) we don't want to load any rules for empty
1442 1459 # user_id which is the case of access of non logged users when anonymous
1443 1460 # access is disabled
1444 1461 user_ips = []
1445 1462 if user_id:
1446 1463 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1447 1464 if cache:
1448 1465 user_ips = user_ips.options(
1449 1466 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1450 1467
1451 1468 for ip in user_ips:
1452 1469 try:
1453 1470 _set.add(ip.ip_addr)
1454 1471 except ObjectDeletedError:
1455 1472 # since we use heavy caching sometimes it happens that we get
1456 1473 # deleted objects here, we just skip them
1457 1474 pass
1458 1475 return _set or {ip for ip in ['0.0.0.0/0', '::/0']}
1459 1476
1460 1477
1461 1478 def set_available_permissions(settings):
1462 1479 """
1463 1480 This function will propagate pyramid settings with all available defined
1464 1481 permission given in db. We don't want to check each time from db for new
1465 1482 permissions since adding a new permission also requires application restart
1466 1483 ie. to decorate new views with the newly created permission
1467 1484
1468 1485 :param settings: current pyramid registry.settings
1469 1486
1470 1487 """
1471 1488 log.debug('auth: getting information about all available permissions')
1472 1489 try:
1473 1490 sa = meta.Session
1474 1491 all_perms = sa.query(Permission).all()
1475 1492 settings.setdefault('available_permissions',
1476 1493 [x.permission_name for x in all_perms])
1477 1494 log.debug('auth: set available permissions')
1478 1495 except Exception:
1479 1496 log.exception('Failed to fetch permissions from the database.')
1480 1497 raise
1481 1498
1482 1499
1483 1500 def get_csrf_token(session, force_new=False, save_if_missing=True):
1484 1501 """
1485 1502 Return the current authentication token, creating one if one doesn't
1486 1503 already exist and the save_if_missing flag is present.
1487 1504
1488 1505 :param session: pass in the pyramid session, else we use the global ones
1489 1506 :param force_new: force to re-generate the token and store it in session
1490 1507 :param save_if_missing: save the newly generated token if it's missing in
1491 1508 session
1492 1509 """
1493 1510 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1494 1511 # from pyramid.csrf import get_csrf_token
1495 1512
1496 1513 if (csrf_token_key not in session and save_if_missing) or force_new:
1497 1514 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1498 1515 session[csrf_token_key] = token
1499 1516 if hasattr(session, 'save'):
1500 1517 session.save()
1501 1518 return session.get(csrf_token_key)
1502 1519
1503 1520
1504 1521 def get_request(perm_class_instance):
1505 1522 from pyramid.threadlocal import get_current_request
1506 1523 pyramid_request = get_current_request()
1507 1524 return pyramid_request
1508 1525
1509 1526
1510 1527 # CHECK DECORATORS
1511 1528 class CSRFRequired(object):
1512 1529 """
1513 1530 Decorator for authenticating a form
1514 1531
1515 1532 This decorator uses an authorization token stored in the client's
1516 1533 session for prevention of certain Cross-site request forgery (CSRF)
1517 1534 attacks (See
1518 1535 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1519 1536 information).
1520 1537
1521 1538 For use with the ``webhelpers.secure_form`` helper functions.
1522 1539
1523 1540 """
1524 1541 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1525 1542 except_methods=None):
1526 1543 self.token = token
1527 1544 self.header = header
1528 1545 self.except_methods = except_methods or []
1529 1546
1530 1547 def __call__(self, func):
1531 1548 return get_cython_compat_decorator(self.__wrapper, func)
1532 1549
1533 1550 def _get_csrf(self, _request):
1534 1551 return _request.POST.get(self.token, _request.headers.get(self.header))
1535 1552
1536 1553 def check_csrf(self, _request, cur_token):
1537 1554 supplied_token = self._get_csrf(_request)
1538 1555 return supplied_token and supplied_token == cur_token
1539 1556
1540 1557 def _get_request(self):
1541 1558 return get_request(self)
1542 1559
1543 1560 def __wrapper(self, func, *fargs, **fkwargs):
1544 1561 request = self._get_request()
1545 1562
1546 1563 if request.method in self.except_methods:
1547 1564 return func(*fargs, **fkwargs)
1548 1565
1549 1566 cur_token = get_csrf_token(request.session, save_if_missing=False)
1550 1567 if self.check_csrf(request, cur_token):
1551 1568 if request.POST.get(self.token):
1552 1569 del request.POST[self.token]
1553 1570 return func(*fargs, **fkwargs)
1554 1571 else:
1555 1572 reason = 'token-missing'
1556 1573 supplied_token = self._get_csrf(request)
1557 1574 if supplied_token and cur_token != supplied_token:
1558 1575 reason = 'token-mismatch [%s:%s]' % (
1559 1576 cur_token or ''[:6], supplied_token or ''[:6])
1560 1577
1561 1578 csrf_message = \
1562 1579 ("Cross-site request forgery detected, request denied. See "
1563 1580 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1564 1581 "more information.")
1565 1582 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1566 1583 'REMOTE_ADDR:%s, HEADERS:%s' % (
1567 1584 request, reason, request.remote_addr, request.headers))
1568 1585
1569 1586 raise HTTPForbidden(explanation=csrf_message)
1570 1587
1571 1588
1572 1589 class LoginRequired(object):
1573 1590 """
1574 1591 Must be logged in to execute this function else
1575 1592 redirect to login page
1576 1593
1577 1594 :param api_access: if enabled this checks only for valid auth token
1578 1595 and grants access based on valid token
1579 1596 """
1580 1597 def __init__(self, auth_token_access=None):
1581 1598 self.auth_token_access = auth_token_access
1582 1599
1583 1600 def __call__(self, func):
1584 1601 return get_cython_compat_decorator(self.__wrapper, func)
1585 1602
1586 1603 def _get_request(self):
1587 1604 return get_request(self)
1588 1605
1589 1606 def __wrapper(self, func, *fargs, **fkwargs):
1590 1607 from rhodecode.lib import helpers as h
1591 1608 cls = fargs[0]
1592 1609 user = cls._rhodecode_user
1593 1610 request = self._get_request()
1594 1611 _ = request.translate
1595 1612
1596 1613 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1597 1614 log.debug('Starting login restriction checks for user: %s', user)
1598 1615 # check if our IP is allowed
1599 1616 ip_access_valid = True
1600 1617 if not user.ip_allowed:
1601 1618 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1602 1619 category='warning')
1603 1620 ip_access_valid = False
1604 1621
1605 1622 # check if we used an APIKEY and it's a valid one
1606 1623 # defined white-list of controllers which API access will be enabled
1607 1624 _auth_token = request.GET.get(
1608 1625 'auth_token', '') or request.GET.get('api_key', '')
1609 1626 auth_token_access_valid = allowed_auth_token_access(
1610 1627 loc, auth_token=_auth_token)
1611 1628
1612 1629 # explicit controller is enabled or API is in our whitelist
1613 1630 if self.auth_token_access or auth_token_access_valid:
1614 1631 log.debug('Checking AUTH TOKEN access for %s', cls)
1615 1632 db_user = user.get_instance()
1616 1633
1617 1634 if db_user:
1618 1635 if self.auth_token_access:
1619 1636 roles = self.auth_token_access
1620 1637 else:
1621 1638 roles = [UserApiKeys.ROLE_HTTP]
1622 1639 token_match = db_user.authenticate_by_token(
1623 1640 _auth_token, roles=roles)
1624 1641 else:
1625 1642 log.debug('Unable to fetch db instance for auth user: %s', user)
1626 1643 token_match = False
1627 1644
1628 1645 if _auth_token and token_match:
1629 1646 auth_token_access_valid = True
1630 1647 log.debug('AUTH TOKEN ****%s is VALID', _auth_token[-4:])
1631 1648 else:
1632 1649 auth_token_access_valid = False
1633 1650 if not _auth_token:
1634 1651 log.debug("AUTH TOKEN *NOT* present in request")
1635 1652 else:
1636 1653 log.warning("AUTH TOKEN ****%s *NOT* valid", _auth_token[-4:])
1637 1654
1638 1655 log.debug('Checking if %s is authenticated @ %s', user.username, loc)
1639 1656 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1640 1657 else 'AUTH_TOKEN_AUTH'
1641 1658
1642 1659 if ip_access_valid and (
1643 1660 user.is_authenticated or auth_token_access_valid):
1644 1661 log.info('user %s authenticating with:%s IS authenticated on func %s',
1645 1662 user, reason, loc)
1646 1663
1647 1664 return func(*fargs, **fkwargs)
1648 1665 else:
1649 1666 log.warning(
1650 1667 'user %s authenticating with:%s NOT authenticated on '
1651 1668 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s',
1652 1669 user, reason, loc, ip_access_valid, auth_token_access_valid)
1653 1670 # we preserve the get PARAM
1654 1671 came_from = get_came_from(request)
1655 1672
1656 1673 log.debug('redirecting to login page with %s', came_from)
1657 1674 raise HTTPFound(
1658 1675 h.route_path('login', _query={'came_from': came_from}))
1659 1676
1660 1677
1661 1678 class NotAnonymous(object):
1662 1679 """
1663 1680 Must be logged in to execute this function else
1664 1681 redirect to login page
1665 1682 """
1666 1683
1667 1684 def __call__(self, func):
1668 1685 return get_cython_compat_decorator(self.__wrapper, func)
1669 1686
1670 1687 def _get_request(self):
1671 1688 return get_request(self)
1672 1689
1673 1690 def __wrapper(self, func, *fargs, **fkwargs):
1674 1691 import rhodecode.lib.helpers as h
1675 1692 cls = fargs[0]
1676 1693 self.user = cls._rhodecode_user
1677 1694 request = self._get_request()
1678 1695 _ = request.translate
1679 1696 log.debug('Checking if user is not anonymous @%s', cls)
1680 1697
1681 1698 anonymous = self.user.username == User.DEFAULT_USER
1682 1699
1683 1700 if anonymous:
1684 1701 came_from = get_came_from(request)
1685 1702 h.flash(_('You need to be a registered user to '
1686 1703 'perform this action'),
1687 1704 category='warning')
1688 1705 raise HTTPFound(
1689 1706 h.route_path('login', _query={'came_from': came_from}))
1690 1707 else:
1691 1708 return func(*fargs, **fkwargs)
1692 1709
1693 1710
1694 1711 class PermsDecorator(object):
1695 1712 """
1696 1713 Base class for controller decorators, we extract the current user from
1697 1714 the class itself, which has it stored in base controllers
1698 1715 """
1699 1716
1700 1717 def __init__(self, *required_perms):
1701 1718 self.required_perms = set(required_perms)
1702 1719
1703 1720 def __call__(self, func):
1704 1721 return get_cython_compat_decorator(self.__wrapper, func)
1705 1722
1706 1723 def _get_request(self):
1707 1724 return get_request(self)
1708 1725
1709 1726 def __wrapper(self, func, *fargs, **fkwargs):
1710 1727 import rhodecode.lib.helpers as h
1711 1728 cls = fargs[0]
1712 1729 _user = cls._rhodecode_user
1713 1730 request = self._get_request()
1714 1731 _ = request.translate
1715 1732
1716 1733 log.debug('checking %s permissions %s for %s %s',
1717 1734 self.__class__.__name__, self.required_perms, cls, _user)
1718 1735
1719 1736 if self.check_permissions(_user):
1720 1737 log.debug('Permission granted for %s %s', cls, _user)
1721 1738 return func(*fargs, **fkwargs)
1722 1739
1723 1740 else:
1724 1741 log.debug('Permission denied for %s %s', cls, _user)
1725 1742 anonymous = _user.username == User.DEFAULT_USER
1726 1743
1727 1744 if anonymous:
1728 1745 came_from = get_came_from(self._get_request())
1729 1746 h.flash(_('You need to be signed in to view this page'),
1730 1747 category='warning')
1731 1748 raise HTTPFound(
1732 1749 h.route_path('login', _query={'came_from': came_from}))
1733 1750
1734 1751 else:
1735 1752 # redirect with 404 to prevent resource discovery
1736 1753 raise HTTPNotFound()
1737 1754
1738 1755 def check_permissions(self, user):
1739 1756 """Dummy function for overriding"""
1740 1757 raise NotImplementedError(
1741 1758 'You have to write this function in child class')
1742 1759
1743 1760
1744 1761 class HasPermissionAllDecorator(PermsDecorator):
1745 1762 """
1746 1763 Checks for access permission for all given predicates. All of them
1747 1764 have to be meet in order to fulfill the request
1748 1765 """
1749 1766
1750 1767 def check_permissions(self, user):
1751 1768 perms = user.permissions_with_scope({})
1752 1769 if self.required_perms.issubset(perms['global']):
1753 1770 return True
1754 1771 return False
1755 1772
1756 1773
1757 1774 class HasPermissionAnyDecorator(PermsDecorator):
1758 1775 """
1759 1776 Checks for access permission for any of given predicates. In order to
1760 1777 fulfill the request any of predicates must be meet
1761 1778 """
1762 1779
1763 1780 def check_permissions(self, user):
1764 1781 perms = user.permissions_with_scope({})
1765 1782 if self.required_perms.intersection(perms['global']):
1766 1783 return True
1767 1784 return False
1768 1785
1769 1786
1770 1787 class HasRepoPermissionAllDecorator(PermsDecorator):
1771 1788 """
1772 1789 Checks for access permission for all given predicates for specific
1773 1790 repository. All of them have to be meet in order to fulfill the request
1774 1791 """
1775 1792 def _get_repo_name(self):
1776 1793 _request = self._get_request()
1777 1794 return get_repo_slug(_request)
1778 1795
1779 1796 def check_permissions(self, user):
1780 1797 perms = user.permissions
1781 1798 repo_name = self._get_repo_name()
1782 1799
1783 1800 try:
1784 1801 user_perms = {perms['repositories'][repo_name]}
1785 1802 except KeyError:
1786 1803 log.debug('cannot locate repo with name: `%s` in permissions defs',
1787 1804 repo_name)
1788 1805 return False
1789 1806
1790 1807 log.debug('checking `%s` permissions for repo `%s`',
1791 1808 user_perms, repo_name)
1792 1809 if self.required_perms.issubset(user_perms):
1793 1810 return True
1794 1811 return False
1795 1812
1796 1813
1797 1814 class HasRepoPermissionAnyDecorator(PermsDecorator):
1798 1815 """
1799 1816 Checks for access permission for any of given predicates for specific
1800 1817 repository. In order to fulfill the request any of predicates must be meet
1801 1818 """
1802 1819 def _get_repo_name(self):
1803 1820 _request = self._get_request()
1804 1821 return get_repo_slug(_request)
1805 1822
1806 1823 def check_permissions(self, user):
1807 1824 perms = user.permissions
1808 1825 repo_name = self._get_repo_name()
1809 1826
1810 1827 try:
1811 1828 user_perms = {perms['repositories'][repo_name]}
1812 1829 except KeyError:
1813 1830 log.debug(
1814 1831 'cannot locate repo with name: `%s` in permissions defs',
1815 1832 repo_name)
1816 1833 return False
1817 1834
1818 1835 log.debug('checking `%s` permissions for repo `%s`',
1819 1836 user_perms, repo_name)
1820 1837 if self.required_perms.intersection(user_perms):
1821 1838 return True
1822 1839 return False
1823 1840
1824 1841
1825 1842 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1826 1843 """
1827 1844 Checks for access permission for all given predicates for specific
1828 1845 repository group. All of them have to be meet in order to
1829 1846 fulfill the request
1830 1847 """
1831 1848 def _get_repo_group_name(self):
1832 1849 _request = self._get_request()
1833 1850 return get_repo_group_slug(_request)
1834 1851
1835 1852 def check_permissions(self, user):
1836 1853 perms = user.permissions
1837 1854 group_name = self._get_repo_group_name()
1838 1855 try:
1839 1856 user_perms = {perms['repositories_groups'][group_name]}
1840 1857 except KeyError:
1841 1858 log.debug(
1842 1859 'cannot locate repo group with name: `%s` in permissions defs',
1843 1860 group_name)
1844 1861 return False
1845 1862
1846 1863 log.debug('checking `%s` permissions for repo group `%s`',
1847 1864 user_perms, group_name)
1848 1865 if self.required_perms.issubset(user_perms):
1849 1866 return True
1850 1867 return False
1851 1868
1852 1869
1853 1870 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1854 1871 """
1855 1872 Checks for access permission for any of given predicates for specific
1856 1873 repository group. In order to fulfill the request any
1857 1874 of predicates must be met
1858 1875 """
1859 1876 def _get_repo_group_name(self):
1860 1877 _request = self._get_request()
1861 1878 return get_repo_group_slug(_request)
1862 1879
1863 1880 def check_permissions(self, user):
1864 1881 perms = user.permissions
1865 1882 group_name = self._get_repo_group_name()
1866 1883
1867 1884 try:
1868 1885 user_perms = {perms['repositories_groups'][group_name]}
1869 1886 except KeyError:
1870 1887 log.debug(
1871 1888 'cannot locate repo group with name: `%s` in permissions defs',
1872 1889 group_name)
1873 1890 return False
1874 1891
1875 1892 log.debug('checking `%s` permissions for repo group `%s`',
1876 1893 user_perms, group_name)
1877 1894 if self.required_perms.intersection(user_perms):
1878 1895 return True
1879 1896 return False
1880 1897
1881 1898
1882 1899 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1883 1900 """
1884 1901 Checks for access permission for all given predicates for specific
1885 1902 user group. All of them have to be meet in order to fulfill the request
1886 1903 """
1887 1904 def _get_user_group_name(self):
1888 1905 _request = self._get_request()
1889 1906 return get_user_group_slug(_request)
1890 1907
1891 1908 def check_permissions(self, user):
1892 1909 perms = user.permissions
1893 1910 group_name = self._get_user_group_name()
1894 1911 try:
1895 1912 user_perms = {perms['user_groups'][group_name]}
1896 1913 except KeyError:
1897 1914 return False
1898 1915
1899 1916 if self.required_perms.issubset(user_perms):
1900 1917 return True
1901 1918 return False
1902 1919
1903 1920
1904 1921 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1905 1922 """
1906 1923 Checks for access permission for any of given predicates for specific
1907 1924 user group. In order to fulfill the request any of predicates must be meet
1908 1925 """
1909 1926 def _get_user_group_name(self):
1910 1927 _request = self._get_request()
1911 1928 return get_user_group_slug(_request)
1912 1929
1913 1930 def check_permissions(self, user):
1914 1931 perms = user.permissions
1915 1932 group_name = self._get_user_group_name()
1916 1933 try:
1917 1934 user_perms = {perms['user_groups'][group_name]}
1918 1935 except KeyError:
1919 1936 return False
1920 1937
1921 1938 if self.required_perms.intersection(user_perms):
1922 1939 return True
1923 1940 return False
1924 1941
1925 1942
1926 1943 # CHECK FUNCTIONS
1927 1944 class PermsFunction(object):
1928 1945 """Base function for other check functions"""
1929 1946
1930 1947 def __init__(self, *perms):
1931 1948 self.required_perms = set(perms)
1932 1949 self.repo_name = None
1933 1950 self.repo_group_name = None
1934 1951 self.user_group_name = None
1935 1952
1936 1953 def __bool__(self):
1937 1954 frame = inspect.currentframe()
1938 1955 stack_trace = traceback.format_stack(frame)
1939 1956 log.error('Checking bool value on a class instance of perm '
1940 1957 'function is not allowed: %s', ''.join(stack_trace))
1941 1958 # rather than throwing errors, here we always return False so if by
1942 1959 # accident someone checks truth for just an instance it will always end
1943 1960 # up in returning False
1944 1961 return False
1945 1962 __nonzero__ = __bool__
1946 1963
1947 1964 def __call__(self, check_location='', user=None):
1948 1965 if not user:
1949 1966 log.debug('Using user attribute from global request')
1950 1967 request = self._get_request()
1951 1968 user = request.user
1952 1969
1953 1970 # init auth user if not already given
1954 1971 if not isinstance(user, AuthUser):
1955 1972 log.debug('Wrapping user %s into AuthUser', user)
1956 1973 user = AuthUser(user.user_id)
1957 1974
1958 1975 cls_name = self.__class__.__name__
1959 1976 check_scope = self._get_check_scope(cls_name)
1960 1977 check_location = check_location or 'unspecified location'
1961 1978
1962 1979 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1963 1980 self.required_perms, user, check_scope, check_location)
1964 1981 if not user:
1965 1982 log.warning('Empty user given for permission check')
1966 1983 return False
1967 1984
1968 1985 if self.check_permissions(user):
1969 1986 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1970 1987 check_scope, user, check_location)
1971 1988 return True
1972 1989
1973 1990 else:
1974 1991 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1975 1992 check_scope, user, check_location)
1976 1993 return False
1977 1994
1978 1995 def _get_request(self):
1979 1996 return get_request(self)
1980 1997
1981 1998 def _get_check_scope(self, cls_name):
1982 1999 return {
1983 2000 'HasPermissionAll': 'GLOBAL',
1984 2001 'HasPermissionAny': 'GLOBAL',
1985 2002 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1986 2003 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1987 2004 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1988 2005 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1989 2006 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1990 2007 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1991 2008 }.get(cls_name, '?:%s' % cls_name)
1992 2009
1993 2010 def check_permissions(self, user):
1994 2011 """Dummy function for overriding"""
1995 2012 raise Exception('You have to write this function in child class')
1996 2013
1997 2014
1998 2015 class HasPermissionAll(PermsFunction):
1999 2016 def check_permissions(self, user):
2000 2017 perms = user.permissions_with_scope({})
2001 2018 if self.required_perms.issubset(perms.get('global')):
2002 2019 return True
2003 2020 return False
2004 2021
2005 2022
2006 2023 class HasPermissionAny(PermsFunction):
2007 2024 def check_permissions(self, user):
2008 2025 perms = user.permissions_with_scope({})
2009 2026 if self.required_perms.intersection(perms.get('global')):
2010 2027 return True
2011 2028 return False
2012 2029
2013 2030
2014 2031 class HasRepoPermissionAll(PermsFunction):
2015 2032 def __call__(self, repo_name=None, check_location='', user=None):
2016 2033 self.repo_name = repo_name
2017 2034 return super(HasRepoPermissionAll, self).__call__(check_location, user)
2018 2035
2019 2036 def _get_repo_name(self):
2020 2037 if not self.repo_name:
2021 2038 _request = self._get_request()
2022 2039 self.repo_name = get_repo_slug(_request)
2023 2040 return self.repo_name
2024 2041
2025 2042 def check_permissions(self, user):
2026 2043 self.repo_name = self._get_repo_name()
2027 2044 perms = user.permissions
2028 2045 try:
2029 2046 user_perms = {perms['repositories'][self.repo_name]}
2030 2047 except KeyError:
2031 2048 return False
2032 2049 if self.required_perms.issubset(user_perms):
2033 2050 return True
2034 2051 return False
2035 2052
2036 2053
2037 2054 class HasRepoPermissionAny(PermsFunction):
2038 2055 def __call__(self, repo_name=None, check_location='', user=None):
2039 2056 self.repo_name = repo_name
2040 2057 return super(HasRepoPermissionAny, self).__call__(check_location, user)
2041 2058
2042 2059 def _get_repo_name(self):
2043 2060 if not self.repo_name:
2044 2061 _request = self._get_request()
2045 2062 self.repo_name = get_repo_slug(_request)
2046 2063 return self.repo_name
2047 2064
2048 2065 def check_permissions(self, user):
2049 2066 self.repo_name = self._get_repo_name()
2050 2067 perms = user.permissions
2051 2068 try:
2052 2069 user_perms = {perms['repositories'][self.repo_name]}
2053 2070 except KeyError:
2054 2071 return False
2055 2072 if self.required_perms.intersection(user_perms):
2056 2073 return True
2057 2074 return False
2058 2075
2059 2076
2060 2077 class HasRepoGroupPermissionAny(PermsFunction):
2061 2078 def __call__(self, group_name=None, check_location='', user=None):
2062 2079 self.repo_group_name = group_name
2063 2080 return super(HasRepoGroupPermissionAny, self).__call__(
2064 2081 check_location, user)
2065 2082
2066 2083 def check_permissions(self, user):
2067 2084 perms = user.permissions
2068 2085 try:
2069 2086 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2070 2087 except KeyError:
2071 2088 return False
2072 2089 if self.required_perms.intersection(user_perms):
2073 2090 return True
2074 2091 return False
2075 2092
2076 2093
2077 2094 class HasRepoGroupPermissionAll(PermsFunction):
2078 2095 def __call__(self, group_name=None, check_location='', user=None):
2079 2096 self.repo_group_name = group_name
2080 2097 return super(HasRepoGroupPermissionAll, self).__call__(
2081 2098 check_location, user)
2082 2099
2083 2100 def check_permissions(self, user):
2084 2101 perms = user.permissions
2085 2102 try:
2086 2103 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2087 2104 except KeyError:
2088 2105 return False
2089 2106 if self.required_perms.issubset(user_perms):
2090 2107 return True
2091 2108 return False
2092 2109
2093 2110
2094 2111 class HasUserGroupPermissionAny(PermsFunction):
2095 2112 def __call__(self, user_group_name=None, check_location='', user=None):
2096 2113 self.user_group_name = user_group_name
2097 2114 return super(HasUserGroupPermissionAny, self).__call__(
2098 2115 check_location, user)
2099 2116
2100 2117 def check_permissions(self, user):
2101 2118 perms = user.permissions
2102 2119 try:
2103 2120 user_perms = {perms['user_groups'][self.user_group_name]}
2104 2121 except KeyError:
2105 2122 return False
2106 2123 if self.required_perms.intersection(user_perms):
2107 2124 return True
2108 2125 return False
2109 2126
2110 2127
2111 2128 class HasUserGroupPermissionAll(PermsFunction):
2112 2129 def __call__(self, user_group_name=None, check_location='', user=None):
2113 2130 self.user_group_name = user_group_name
2114 2131 return super(HasUserGroupPermissionAll, self).__call__(
2115 2132 check_location, user)
2116 2133
2117 2134 def check_permissions(self, user):
2118 2135 perms = user.permissions
2119 2136 try:
2120 2137 user_perms = {perms['user_groups'][self.user_group_name]}
2121 2138 except KeyError:
2122 2139 return False
2123 2140 if self.required_perms.issubset(user_perms):
2124 2141 return True
2125 2142 return False
2126 2143
2127 2144
2128 2145 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
2129 2146 class HasPermissionAnyMiddleware(object):
2130 2147 def __init__(self, *perms):
2131 2148 self.required_perms = set(perms)
2132 2149
2133 2150 def __call__(self, auth_user, repo_name):
2134 2151 # repo_name MUST be unicode, since we handle keys in permission
2135 2152 # dict by unicode
2136 2153 repo_name = safe_unicode(repo_name)
2137 2154 log.debug(
2138 2155 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
2139 2156 self.required_perms, auth_user, repo_name)
2140 2157
2141 2158 if self.check_permissions(auth_user, repo_name):
2142 2159 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
2143 2160 repo_name, auth_user, 'PermissionMiddleware')
2144 2161 return True
2145 2162
2146 2163 else:
2147 2164 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
2148 2165 repo_name, auth_user, 'PermissionMiddleware')
2149 2166 return False
2150 2167
2151 2168 def check_permissions(self, user, repo_name):
2152 2169 perms = user.permissions_with_scope({'repo_name': repo_name})
2153 2170
2154 2171 try:
2155 2172 user_perms = {perms['repositories'][repo_name]}
2156 2173 except Exception:
2157 2174 log.exception('Error while accessing user permissions')
2158 2175 return False
2159 2176
2160 2177 if self.required_perms.intersection(user_perms):
2161 2178 return True
2162 2179 return False
2163 2180
2164 2181
2165 2182 # SPECIAL VERSION TO HANDLE API AUTH
2166 2183 class _BaseApiPerm(object):
2167 2184 def __init__(self, *perms):
2168 2185 self.required_perms = set(perms)
2169 2186
2170 2187 def __call__(self, check_location=None, user=None, repo_name=None,
2171 2188 group_name=None, user_group_name=None):
2172 2189 cls_name = self.__class__.__name__
2173 2190 check_scope = 'global:%s' % (self.required_perms,)
2174 2191 if repo_name:
2175 2192 check_scope += ', repo_name:%s' % (repo_name,)
2176 2193
2177 2194 if group_name:
2178 2195 check_scope += ', repo_group_name:%s' % (group_name,)
2179 2196
2180 2197 if user_group_name:
2181 2198 check_scope += ', user_group_name:%s' % (user_group_name,)
2182 2199
2183 2200 log.debug('checking cls:%s %s %s @ %s',
2184 2201 cls_name, self.required_perms, check_scope, check_location)
2185 2202 if not user:
2186 2203 log.debug('Empty User passed into arguments')
2187 2204 return False
2188 2205
2189 2206 # process user
2190 2207 if not isinstance(user, AuthUser):
2191 2208 user = AuthUser(user.user_id)
2192 2209 if not check_location:
2193 2210 check_location = 'unspecified'
2194 2211 if self.check_permissions(user.permissions, repo_name, group_name,
2195 2212 user_group_name):
2196 2213 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2197 2214 check_scope, user, check_location)
2198 2215 return True
2199 2216
2200 2217 else:
2201 2218 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2202 2219 check_scope, user, check_location)
2203 2220 return False
2204 2221
2205 2222 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2206 2223 user_group_name=None):
2207 2224 """
2208 2225 implement in child class should return True if permissions are ok,
2209 2226 False otherwise
2210 2227
2211 2228 :param perm_defs: dict with permission definitions
2212 2229 :param repo_name: repo name
2213 2230 """
2214 2231 raise NotImplementedError()
2215 2232
2216 2233
2217 2234 class HasPermissionAllApi(_BaseApiPerm):
2218 2235 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2219 2236 user_group_name=None):
2220 2237 if self.required_perms.issubset(perm_defs.get('global')):
2221 2238 return True
2222 2239 return False
2223 2240
2224 2241
2225 2242 class HasPermissionAnyApi(_BaseApiPerm):
2226 2243 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2227 2244 user_group_name=None):
2228 2245 if self.required_perms.intersection(perm_defs.get('global')):
2229 2246 return True
2230 2247 return False
2231 2248
2232 2249
2233 2250 class HasRepoPermissionAllApi(_BaseApiPerm):
2234 2251 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2235 2252 user_group_name=None):
2236 2253 try:
2237 2254 _user_perms = {perm_defs['repositories'][repo_name]}
2238 2255 except KeyError:
2239 2256 log.warning(traceback.format_exc())
2240 2257 return False
2241 2258 if self.required_perms.issubset(_user_perms):
2242 2259 return True
2243 2260 return False
2244 2261
2245 2262
2246 2263 class HasRepoPermissionAnyApi(_BaseApiPerm):
2247 2264 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2248 2265 user_group_name=None):
2249 2266 try:
2250 2267 _user_perms = {perm_defs['repositories'][repo_name]}
2251 2268 except KeyError:
2252 2269 log.warning(traceback.format_exc())
2253 2270 return False
2254 2271 if self.required_perms.intersection(_user_perms):
2255 2272 return True
2256 2273 return False
2257 2274
2258 2275
2259 2276 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
2260 2277 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2261 2278 user_group_name=None):
2262 2279 try:
2263 2280 _user_perms = {perm_defs['repositories_groups'][group_name]}
2264 2281 except KeyError:
2265 2282 log.warning(traceback.format_exc())
2266 2283 return False
2267 2284 if self.required_perms.intersection(_user_perms):
2268 2285 return True
2269 2286 return False
2270 2287
2271 2288
2272 2289 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
2273 2290 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2274 2291 user_group_name=None):
2275 2292 try:
2276 2293 _user_perms = {perm_defs['repositories_groups'][group_name]}
2277 2294 except KeyError:
2278 2295 log.warning(traceback.format_exc())
2279 2296 return False
2280 2297 if self.required_perms.issubset(_user_perms):
2281 2298 return True
2282 2299 return False
2283 2300
2284 2301
2285 2302 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
2286 2303 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2287 2304 user_group_name=None):
2288 2305 try:
2289 2306 _user_perms = {perm_defs['user_groups'][user_group_name]}
2290 2307 except KeyError:
2291 2308 log.warning(traceback.format_exc())
2292 2309 return False
2293 2310 if self.required_perms.intersection(_user_perms):
2294 2311 return True
2295 2312 return False
2296 2313
2297 2314
2298 2315 def check_ip_access(source_ip, allowed_ips=None):
2299 2316 """
2300 2317 Checks if source_ip is a subnet of any of allowed_ips.
2301 2318
2302 2319 :param source_ip:
2303 2320 :param allowed_ips: list of allowed ips together with mask
2304 2321 """
2305 2322 log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
2306 2323 source_ip_address = ipaddress.ip_address(safe_unicode(source_ip))
2307 2324 if isinstance(allowed_ips, (tuple, list, set)):
2308 2325 for ip in allowed_ips:
2309 2326 ip = safe_unicode(ip)
2310 2327 try:
2311 2328 network_address = ipaddress.ip_network(ip, strict=False)
2312 2329 if source_ip_address in network_address:
2313 2330 log.debug('IP %s is network %s', source_ip_address, network_address)
2314 2331 return True
2315 2332 # for any case we cannot determine the IP, don't crash just
2316 2333 # skip it and log as error, we want to say forbidden still when
2317 2334 # sending bad IP
2318 2335 except Exception:
2319 2336 log.error(traceback.format_exc())
2320 2337 continue
2321 2338 return False
2322 2339
2323 2340
2324 2341 def get_cython_compat_decorator(wrapper, func):
2325 2342 """
2326 2343 Creates a cython compatible decorator. The previously used
2327 2344 decorator.decorator() function seems to be incompatible with cython.
2328 2345
2329 2346 :param wrapper: __wrapper method of the decorator class
2330 2347 :param func: decorated function
2331 2348 """
2332 2349 @wraps(func)
2333 2350 def local_wrapper(*args, **kwds):
2334 2351 return wrapper(func, *args, **kwds)
2335 2352 local_wrapper.__wrapped__ = func
2336 2353 return local_wrapper
2337 2354
2338 2355
@@ -1,4706 +1,4712 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, TypeDecorator, event,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType)
42 42 from sqlalchemy.sql.expression import true, false
43 43 from sqlalchemy.sql.functions import coalesce, count # noqa
44 44 from sqlalchemy.orm import (
45 45 relationship, joinedload, class_mapper, validates, aliased)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # noqa
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from zope.cachedescriptors.property import Lazy as LazyProperty
51 51
52 52 from pyramid.threadlocal import get_current_request
53 53
54 54 from rhodecode.translation import _
55 55 from rhodecode.lib.vcs import get_vcs_instance
56 56 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
57 57 from rhodecode.lib.utils2 import (
58 58 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
59 59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
60 60 glob2re, StrictAttributeDict, cleaned_uri)
61 61 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
62 62 JsonRaw
63 63 from rhodecode.lib.ext_json import json
64 64 from rhodecode.lib.caching_query import FromCache
65 65 from rhodecode.lib.encrypt import AESCipher
66 66
67 67 from rhodecode.model.meta import Base, Session
68 68
69 69 URL_SEP = '/'
70 70 log = logging.getLogger(__name__)
71 71
72 72 # =============================================================================
73 73 # BASE CLASSES
74 74 # =============================================================================
75 75
76 76 # this is propagated from .ini file rhodecode.encrypted_values.secret or
77 77 # beaker.session.secret if first is not set.
78 78 # and initialized at environment.py
79 79 ENCRYPTION_KEY = None
80 80
81 81 # used to sort permissions by types, '#' used here is not allowed to be in
82 82 # usernames, and it's very early in sorted string.printable table.
83 83 PERMISSION_TYPE_SORT = {
84 84 'admin': '####',
85 85 'write': '###',
86 86 'read': '##',
87 87 'none': '#',
88 88 }
89 89
90 90
91 91 def display_user_sort(obj):
92 92 """
93 93 Sort function used to sort permissions in .permissions() function of
94 94 Repository, RepoGroup, UserGroup. Also it put the default user in front
95 95 of all other resources
96 96 """
97 97
98 98 if obj.username == User.DEFAULT_USER:
99 99 return '#####'
100 100 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
101 101 return prefix + obj.username
102 102
103 103
104 104 def display_user_group_sort(obj):
105 105 """
106 106 Sort function used to sort permissions in .permissions() function of
107 107 Repository, RepoGroup, UserGroup. Also it put the default user in front
108 108 of all other resources
109 109 """
110 110
111 111 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
112 112 return prefix + obj.users_group_name
113 113
114 114
115 115 def _hash_key(k):
116 116 return sha1_safe(k)
117 117
118 118
119 119 def in_filter_generator(qry, items, limit=500):
120 120 """
121 121 Splits IN() into multiple with OR
122 122 e.g.::
123 123 cnt = Repository.query().filter(
124 124 or_(
125 125 *in_filter_generator(Repository.repo_id, range(100000))
126 126 )).count()
127 127 """
128 128 if not items:
129 129 # empty list will cause empty query which might cause security issues
130 130 # this can lead to hidden unpleasant results
131 131 items = [-1]
132 132
133 133 parts = []
134 134 for chunk in xrange(0, len(items), limit):
135 135 parts.append(
136 136 qry.in_(items[chunk: chunk + limit])
137 137 )
138 138
139 139 return parts
140 140
141 141
142 142 base_table_args = {
143 143 'extend_existing': True,
144 144 'mysql_engine': 'InnoDB',
145 145 'mysql_charset': 'utf8',
146 146 'sqlite_autoincrement': True
147 147 }
148 148
149 149
150 150 class EncryptedTextValue(TypeDecorator):
151 151 """
152 152 Special column for encrypted long text data, use like::
153 153
154 154 value = Column("encrypted_value", EncryptedValue(), nullable=False)
155 155
156 156 This column is intelligent so if value is in unencrypted form it return
157 157 unencrypted form, but on save it always encrypts
158 158 """
159 159 impl = Text
160 160
161 161 def process_bind_param(self, value, dialect):
162 162 if not value:
163 163 return value
164 164 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
165 165 # protect against double encrypting if someone manually starts
166 166 # doing
167 167 raise ValueError('value needs to be in unencrypted format, ie. '
168 168 'not starting with enc$aes')
169 169 return 'enc$aes_hmac$%s' % AESCipher(
170 170 ENCRYPTION_KEY, hmac=True).encrypt(value)
171 171
172 172 def process_result_value(self, value, dialect):
173 173 import rhodecode
174 174
175 175 if not value:
176 176 return value
177 177
178 178 parts = value.split('$', 3)
179 179 if not len(parts) == 3:
180 180 # probably not encrypted values
181 181 return value
182 182 else:
183 183 if parts[0] != 'enc':
184 184 # parts ok but without our header ?
185 185 return value
186 186 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
187 187 'rhodecode.encrypted_values.strict') or True)
188 188 # at that stage we know it's our encryption
189 189 if parts[1] == 'aes':
190 190 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
191 191 elif parts[1] == 'aes_hmac':
192 192 decrypted_data = AESCipher(
193 193 ENCRYPTION_KEY, hmac=True,
194 194 strict_verification=enc_strict_mode).decrypt(parts[2])
195 195 else:
196 196 raise ValueError(
197 197 'Encryption type part is wrong, must be `aes` '
198 198 'or `aes_hmac`, got `%s` instead' % (parts[1]))
199 199 return decrypted_data
200 200
201 201
202 202 class BaseModel(object):
203 203 """
204 204 Base Model for all classes
205 205 """
206 206
207 207 @classmethod
208 208 def _get_keys(cls):
209 209 """return column names for this model """
210 210 return class_mapper(cls).c.keys()
211 211
212 212 def get_dict(self):
213 213 """
214 214 return dict with keys and values corresponding
215 215 to this model data """
216 216
217 217 d = {}
218 218 for k in self._get_keys():
219 219 d[k] = getattr(self, k)
220 220
221 221 # also use __json__() if present to get additional fields
222 222 _json_attr = getattr(self, '__json__', None)
223 223 if _json_attr:
224 224 # update with attributes from __json__
225 225 if callable(_json_attr):
226 226 _json_attr = _json_attr()
227 227 for k, val in _json_attr.iteritems():
228 228 d[k] = val
229 229 return d
230 230
231 231 def get_appstruct(self):
232 232 """return list with keys and values tuples corresponding
233 233 to this model data """
234 234
235 235 lst = []
236 236 for k in self._get_keys():
237 237 lst.append((k, getattr(self, k),))
238 238 return lst
239 239
240 240 def populate_obj(self, populate_dict):
241 241 """populate model with data from given populate_dict"""
242 242
243 243 for k in self._get_keys():
244 244 if k in populate_dict:
245 245 setattr(self, k, populate_dict[k])
246 246
247 247 @classmethod
248 248 def query(cls):
249 249 return Session().query(cls)
250 250
251 251 @classmethod
252 252 def get(cls, id_):
253 253 if id_:
254 254 return cls.query().get(id_)
255 255
256 256 @classmethod
257 257 def get_or_404(cls, id_):
258 258 from pyramid.httpexceptions import HTTPNotFound
259 259
260 260 try:
261 261 id_ = int(id_)
262 262 except (TypeError, ValueError):
263 263 raise HTTPNotFound()
264 264
265 265 res = cls.query().get(id_)
266 266 if not res:
267 267 raise HTTPNotFound()
268 268 return res
269 269
270 270 @classmethod
271 271 def getAll(cls):
272 272 # deprecated and left for backward compatibility
273 273 return cls.get_all()
274 274
275 275 @classmethod
276 276 def get_all(cls):
277 277 return cls.query().all()
278 278
279 279 @classmethod
280 280 def delete(cls, id_):
281 281 obj = cls.query().get(id_)
282 282 Session().delete(obj)
283 283
284 284 @classmethod
285 285 def identity_cache(cls, session, attr_name, value):
286 286 exist_in_session = []
287 287 for (item_cls, pkey), instance in session.identity_map.items():
288 288 if cls == item_cls and getattr(instance, attr_name) == value:
289 289 exist_in_session.append(instance)
290 290 if exist_in_session:
291 291 if len(exist_in_session) == 1:
292 292 return exist_in_session[0]
293 293 log.exception(
294 294 'multiple objects with attr %s and '
295 295 'value %s found with same name: %r',
296 296 attr_name, value, exist_in_session)
297 297
298 298 def __repr__(self):
299 299 if hasattr(self, '__unicode__'):
300 300 # python repr needs to return str
301 301 try:
302 302 return safe_str(self.__unicode__())
303 303 except UnicodeDecodeError:
304 304 pass
305 305 return '<DB:%s>' % (self.__class__.__name__)
306 306
307 307
308 308 class RhodeCodeSetting(Base, BaseModel):
309 309 __tablename__ = 'rhodecode_settings'
310 310 __table_args__ = (
311 311 UniqueConstraint('app_settings_name'),
312 312 base_table_args
313 313 )
314 314
315 315 SETTINGS_TYPES = {
316 316 'str': safe_str,
317 317 'int': safe_int,
318 318 'unicode': safe_unicode,
319 319 'bool': str2bool,
320 320 'list': functools.partial(aslist, sep=',')
321 321 }
322 322 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
323 323 GLOBAL_CONF_KEY = 'app_settings'
324 324
325 325 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
326 326 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
327 327 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
328 328 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
329 329
330 330 def __init__(self, key='', val='', type='unicode'):
331 331 self.app_settings_name = key
332 332 self.app_settings_type = type
333 333 self.app_settings_value = val
334 334
335 335 @validates('_app_settings_value')
336 336 def validate_settings_value(self, key, val):
337 337 assert type(val) == unicode
338 338 return val
339 339
340 340 @hybrid_property
341 341 def app_settings_value(self):
342 342 v = self._app_settings_value
343 343 _type = self.app_settings_type
344 344 if _type:
345 345 _type = self.app_settings_type.split('.')[0]
346 346 # decode the encrypted value
347 347 if 'encrypted' in self.app_settings_type:
348 348 cipher = EncryptedTextValue()
349 349 v = safe_unicode(cipher.process_result_value(v, None))
350 350
351 351 converter = self.SETTINGS_TYPES.get(_type) or \
352 352 self.SETTINGS_TYPES['unicode']
353 353 return converter(v)
354 354
355 355 @app_settings_value.setter
356 356 def app_settings_value(self, val):
357 357 """
358 358 Setter that will always make sure we use unicode in app_settings_value
359 359
360 360 :param val:
361 361 """
362 362 val = safe_unicode(val)
363 363 # encode the encrypted value
364 364 if 'encrypted' in self.app_settings_type:
365 365 cipher = EncryptedTextValue()
366 366 val = safe_unicode(cipher.process_bind_param(val, None))
367 367 self._app_settings_value = val
368 368
369 369 @hybrid_property
370 370 def app_settings_type(self):
371 371 return self._app_settings_type
372 372
373 373 @app_settings_type.setter
374 374 def app_settings_type(self, val):
375 375 if val.split('.')[0] not in self.SETTINGS_TYPES:
376 376 raise Exception('type must be one of %s got %s'
377 377 % (self.SETTINGS_TYPES.keys(), val))
378 378 self._app_settings_type = val
379 379
380 380 def __unicode__(self):
381 381 return u"<%s('%s:%s[%s]')>" % (
382 382 self.__class__.__name__,
383 383 self.app_settings_name, self.app_settings_value,
384 384 self.app_settings_type
385 385 )
386 386
387 387
388 388 class RhodeCodeUi(Base, BaseModel):
389 389 __tablename__ = 'rhodecode_ui'
390 390 __table_args__ = (
391 391 UniqueConstraint('ui_key'),
392 392 base_table_args
393 393 )
394 394
395 395 HOOK_REPO_SIZE = 'changegroup.repo_size'
396 396 # HG
397 397 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
398 398 HOOK_PULL = 'outgoing.pull_logger'
399 399 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
400 400 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
401 401 HOOK_PUSH = 'changegroup.push_logger'
402 402 HOOK_PUSH_KEY = 'pushkey.key_push'
403 403
404 404 # TODO: johbo: Unify way how hooks are configured for git and hg,
405 405 # git part is currently hardcoded.
406 406
407 407 # SVN PATTERNS
408 408 SVN_BRANCH_ID = 'vcs_svn_branch'
409 409 SVN_TAG_ID = 'vcs_svn_tag'
410 410
411 411 ui_id = Column(
412 412 "ui_id", Integer(), nullable=False, unique=True, default=None,
413 413 primary_key=True)
414 414 ui_section = Column(
415 415 "ui_section", String(255), nullable=True, unique=None, default=None)
416 416 ui_key = Column(
417 417 "ui_key", String(255), nullable=True, unique=None, default=None)
418 418 ui_value = Column(
419 419 "ui_value", String(255), nullable=True, unique=None, default=None)
420 420 ui_active = Column(
421 421 "ui_active", Boolean(), nullable=True, unique=None, default=True)
422 422
423 423 def __repr__(self):
424 424 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
425 425 self.ui_key, self.ui_value)
426 426
427 427
428 428 class RepoRhodeCodeSetting(Base, BaseModel):
429 429 __tablename__ = 'repo_rhodecode_settings'
430 430 __table_args__ = (
431 431 UniqueConstraint(
432 432 'app_settings_name', 'repository_id',
433 433 name='uq_repo_rhodecode_setting_name_repo_id'),
434 434 base_table_args
435 435 )
436 436
437 437 repository_id = Column(
438 438 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
439 439 nullable=False)
440 440 app_settings_id = Column(
441 441 "app_settings_id", Integer(), nullable=False, unique=True,
442 442 default=None, primary_key=True)
443 443 app_settings_name = Column(
444 444 "app_settings_name", String(255), nullable=True, unique=None,
445 445 default=None)
446 446 _app_settings_value = Column(
447 447 "app_settings_value", String(4096), nullable=True, unique=None,
448 448 default=None)
449 449 _app_settings_type = Column(
450 450 "app_settings_type", String(255), nullable=True, unique=None,
451 451 default=None)
452 452
453 453 repository = relationship('Repository')
454 454
455 455 def __init__(self, repository_id, key='', val='', type='unicode'):
456 456 self.repository_id = repository_id
457 457 self.app_settings_name = key
458 458 self.app_settings_type = type
459 459 self.app_settings_value = val
460 460
461 461 @validates('_app_settings_value')
462 462 def validate_settings_value(self, key, val):
463 463 assert type(val) == unicode
464 464 return val
465 465
466 466 @hybrid_property
467 467 def app_settings_value(self):
468 468 v = self._app_settings_value
469 469 type_ = self.app_settings_type
470 470 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
471 471 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
472 472 return converter(v)
473 473
474 474 @app_settings_value.setter
475 475 def app_settings_value(self, val):
476 476 """
477 477 Setter that will always make sure we use unicode in app_settings_value
478 478
479 479 :param val:
480 480 """
481 481 self._app_settings_value = safe_unicode(val)
482 482
483 483 @hybrid_property
484 484 def app_settings_type(self):
485 485 return self._app_settings_type
486 486
487 487 @app_settings_type.setter
488 488 def app_settings_type(self, val):
489 489 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
490 490 if val not in SETTINGS_TYPES:
491 491 raise Exception('type must be one of %s got %s'
492 492 % (SETTINGS_TYPES.keys(), val))
493 493 self._app_settings_type = val
494 494
495 495 def __unicode__(self):
496 496 return u"<%s('%s:%s:%s[%s]')>" % (
497 497 self.__class__.__name__, self.repository.repo_name,
498 498 self.app_settings_name, self.app_settings_value,
499 499 self.app_settings_type
500 500 )
501 501
502 502
503 503 class RepoRhodeCodeUi(Base, BaseModel):
504 504 __tablename__ = 'repo_rhodecode_ui'
505 505 __table_args__ = (
506 506 UniqueConstraint(
507 507 'repository_id', 'ui_section', 'ui_key',
508 508 name='uq_repo_rhodecode_ui_repository_id_section_key'),
509 509 base_table_args
510 510 )
511 511
512 512 repository_id = Column(
513 513 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
514 514 nullable=False)
515 515 ui_id = Column(
516 516 "ui_id", Integer(), nullable=False, unique=True, default=None,
517 517 primary_key=True)
518 518 ui_section = Column(
519 519 "ui_section", String(255), nullable=True, unique=None, default=None)
520 520 ui_key = Column(
521 521 "ui_key", String(255), nullable=True, unique=None, default=None)
522 522 ui_value = Column(
523 523 "ui_value", String(255), nullable=True, unique=None, default=None)
524 524 ui_active = Column(
525 525 "ui_active", Boolean(), nullable=True, unique=None, default=True)
526 526
527 527 repository = relationship('Repository')
528 528
529 529 def __repr__(self):
530 530 return '<%s[%s:%s]%s=>%s]>' % (
531 531 self.__class__.__name__, self.repository.repo_name,
532 532 self.ui_section, self.ui_key, self.ui_value)
533 533
534 534
535 535 class User(Base, BaseModel):
536 536 __tablename__ = 'users'
537 537 __table_args__ = (
538 538 UniqueConstraint('username'), UniqueConstraint('email'),
539 539 Index('u_username_idx', 'username'),
540 540 Index('u_email_idx', 'email'),
541 541 base_table_args
542 542 )
543 543
544 544 DEFAULT_USER = 'default'
545 545 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
546 546 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
547 547
548 548 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
549 549 username = Column("username", String(255), nullable=True, unique=None, default=None)
550 550 password = Column("password", String(255), nullable=True, unique=None, default=None)
551 551 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
552 552 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
553 553 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
554 554 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
555 555 _email = Column("email", String(255), nullable=True, unique=None, default=None)
556 556 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
557 557 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
558 558
559 559 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
560 560 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
561 561 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
562 562 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
563 563 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
564 564 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
565 565
566 566 user_log = relationship('UserLog')
567 567 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
568 568
569 569 repositories = relationship('Repository')
570 570 repository_groups = relationship('RepoGroup')
571 571 user_groups = relationship('UserGroup')
572 572
573 573 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
574 574 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
575 575
576 576 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
577 577 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
578 578 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
579 579
580 580 group_member = relationship('UserGroupMember', cascade='all')
581 581
582 582 notifications = relationship('UserNotification', cascade='all')
583 583 # notifications assigned to this user
584 584 user_created_notifications = relationship('Notification', cascade='all')
585 585 # comments created by this user
586 586 user_comments = relationship('ChangesetComment', cascade='all')
587 587 # user profile extra info
588 588 user_emails = relationship('UserEmailMap', cascade='all')
589 589 user_ip_map = relationship('UserIpMap', cascade='all')
590 590 user_auth_tokens = relationship('UserApiKeys', cascade='all')
591 591 user_ssh_keys = relationship('UserSshKeys', cascade='all')
592 592
593 593 # gists
594 594 user_gists = relationship('Gist', cascade='all')
595 595 # user pull requests
596 596 user_pull_requests = relationship('PullRequest', cascade='all')
597 597 # external identities
598 598 extenal_identities = relationship(
599 599 'ExternalIdentity',
600 600 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
601 601 cascade='all')
602 602 # review rules
603 603 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
604 604
605 605 def __unicode__(self):
606 606 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
607 607 self.user_id, self.username)
608 608
609 609 @hybrid_property
610 610 def email(self):
611 611 return self._email
612 612
613 613 @email.setter
614 614 def email(self, val):
615 615 self._email = val.lower() if val else None
616 616
617 617 @hybrid_property
618 618 def first_name(self):
619 619 from rhodecode.lib import helpers as h
620 620 if self.name:
621 621 return h.escape(self.name)
622 622 return self.name
623 623
624 624 @hybrid_property
625 625 def last_name(self):
626 626 from rhodecode.lib import helpers as h
627 627 if self.lastname:
628 628 return h.escape(self.lastname)
629 629 return self.lastname
630 630
631 631 @hybrid_property
632 632 def api_key(self):
633 633 """
634 634 Fetch if exist an auth-token with role ALL connected to this user
635 635 """
636 636 user_auth_token = UserApiKeys.query()\
637 637 .filter(UserApiKeys.user_id == self.user_id)\
638 638 .filter(or_(UserApiKeys.expires == -1,
639 639 UserApiKeys.expires >= time.time()))\
640 640 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
641 641 if user_auth_token:
642 642 user_auth_token = user_auth_token.api_key
643 643
644 644 return user_auth_token
645 645
646 646 @api_key.setter
647 647 def api_key(self, val):
648 648 # don't allow to set API key this is deprecated for now
649 649 self._api_key = None
650 650
651 651 @property
652 652 def reviewer_pull_requests(self):
653 653 return PullRequestReviewers.query() \
654 654 .options(joinedload(PullRequestReviewers.pull_request)) \
655 655 .filter(PullRequestReviewers.user_id == self.user_id) \
656 656 .all()
657 657
658 658 @property
659 659 def firstname(self):
660 660 # alias for future
661 661 return self.name
662 662
663 663 @property
664 664 def emails(self):
665 665 other = UserEmailMap.query()\
666 666 .filter(UserEmailMap.user == self) \
667 667 .order_by(UserEmailMap.email_id.asc()) \
668 668 .all()
669 669 return [self.email] + [x.email for x in other]
670 670
671 671 @property
672 672 def auth_tokens(self):
673 673 auth_tokens = self.get_auth_tokens()
674 674 return [x.api_key for x in auth_tokens]
675 675
676 676 def get_auth_tokens(self):
677 677 return UserApiKeys.query()\
678 678 .filter(UserApiKeys.user == self)\
679 679 .order_by(UserApiKeys.user_api_key_id.asc())\
680 680 .all()
681 681
682 682 @LazyProperty
683 683 def feed_token(self):
684 684 return self.get_feed_token()
685 685
686 686 def get_feed_token(self, cache=True):
687 687 feed_tokens = UserApiKeys.query()\
688 688 .filter(UserApiKeys.user == self)\
689 689 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
690 690 if cache:
691 691 feed_tokens = feed_tokens.options(
692 692 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
693 693
694 694 feed_tokens = feed_tokens.all()
695 695 if feed_tokens:
696 696 return feed_tokens[0].api_key
697 697 return 'NO_FEED_TOKEN_AVAILABLE'
698 698
699 699 @classmethod
700 700 def get(cls, user_id, cache=False):
701 701 if not user_id:
702 702 return
703 703
704 704 user = cls.query()
705 705 if cache:
706 706 user = user.options(
707 707 FromCache("sql_cache_short", "get_users_%s" % user_id))
708 708 return user.get(user_id)
709 709
710 710 @classmethod
711 711 def extra_valid_auth_tokens(cls, user, role=None):
712 712 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
713 713 .filter(or_(UserApiKeys.expires == -1,
714 714 UserApiKeys.expires >= time.time()))
715 715 if role:
716 716 tokens = tokens.filter(or_(UserApiKeys.role == role,
717 717 UserApiKeys.role == UserApiKeys.ROLE_ALL))
718 718 return tokens.all()
719 719
720 720 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
721 721 from rhodecode.lib import auth
722 722
723 723 log.debug('Trying to authenticate user: %s via auth-token, '
724 724 'and roles: %s', self, roles)
725 725
726 726 if not auth_token:
727 727 return False
728 728
729 729 crypto_backend = auth.crypto_backend()
730 730
731 731 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
732 732 tokens_q = UserApiKeys.query()\
733 733 .filter(UserApiKeys.user_id == self.user_id)\
734 734 .filter(or_(UserApiKeys.expires == -1,
735 735 UserApiKeys.expires >= time.time()))
736 736
737 737 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
738 738
739 739 plain_tokens = []
740 740 hash_tokens = []
741 741
742 742 user_tokens = tokens_q.all()
743 743 log.debug('Found %s user tokens to check for authentication', len(user_tokens))
744 744 for token in user_tokens:
745 745 log.debug('AUTH_TOKEN: checking if user token with id `%s` matches',
746 746 token.user_api_key_id)
747 747 # verify scope first, since it's way faster than hash calculation of
748 748 # encrypted tokens
749 749 if token.repo_id:
750 750 # token has a scope, we need to verify it
751 751 if scope_repo_id != token.repo_id:
752 752 log.debug(
753 753 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
754 754 'and calling scope is:%s, skipping further checks',
755 755 token.repo, scope_repo_id)
756 756 # token has a scope, and it doesn't match, skip token
757 757 continue
758 758
759 759 if token.api_key.startswith(crypto_backend.ENC_PREF):
760 760 hash_tokens.append(token.api_key)
761 761 else:
762 762 plain_tokens.append(token.api_key)
763 763
764 764 is_plain_match = auth_token in plain_tokens
765 765 if is_plain_match:
766 766 return True
767 767
768 768 for hashed in hash_tokens:
769 769 # NOTE(marcink): this is expensive to calculate, but most secure
770 770 match = crypto_backend.hash_check(auth_token, hashed)
771 771 if match:
772 772 return True
773 773
774 774 return False
775 775
776 776 @property
777 777 def ip_addresses(self):
778 778 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
779 779 return [x.ip_addr for x in ret]
780 780
781 781 @property
782 782 def username_and_name(self):
783 783 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
784 784
785 785 @property
786 786 def username_or_name_or_email(self):
787 787 full_name = self.full_name if self.full_name is not ' ' else None
788 788 return self.username or full_name or self.email
789 789
790 790 @property
791 791 def full_name(self):
792 792 return '%s %s' % (self.first_name, self.last_name)
793 793
794 794 @property
795 795 def full_name_or_username(self):
796 796 return ('%s %s' % (self.first_name, self.last_name)
797 797 if (self.first_name and self.last_name) else self.username)
798 798
799 799 @property
800 800 def full_contact(self):
801 801 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
802 802
803 803 @property
804 804 def short_contact(self):
805 805 return '%s %s' % (self.first_name, self.last_name)
806 806
807 807 @property
808 808 def is_admin(self):
809 809 return self.admin
810 810
811 811 def AuthUser(self, **kwargs):
812 812 """
813 813 Returns instance of AuthUser for this user
814 814 """
815 815 from rhodecode.lib.auth import AuthUser
816 816 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
817 817
818 818 @hybrid_property
819 819 def user_data(self):
820 820 if not self._user_data:
821 821 return {}
822 822
823 823 try:
824 824 return json.loads(self._user_data)
825 825 except TypeError:
826 826 return {}
827 827
828 828 @user_data.setter
829 829 def user_data(self, val):
830 830 if not isinstance(val, dict):
831 831 raise Exception('user_data must be dict, got %s' % type(val))
832 832 try:
833 833 self._user_data = json.dumps(val)
834 834 except Exception:
835 835 log.error(traceback.format_exc())
836 836
837 837 @classmethod
838 838 def get_by_username(cls, username, case_insensitive=False,
839 839 cache=False, identity_cache=False):
840 840 session = Session()
841 841
842 842 if case_insensitive:
843 843 q = cls.query().filter(
844 844 func.lower(cls.username) == func.lower(username))
845 845 else:
846 846 q = cls.query().filter(cls.username == username)
847 847
848 848 if cache:
849 849 if identity_cache:
850 850 val = cls.identity_cache(session, 'username', username)
851 851 if val:
852 852 return val
853 853 else:
854 854 cache_key = "get_user_by_name_%s" % _hash_key(username)
855 855 q = q.options(
856 856 FromCache("sql_cache_short", cache_key))
857 857
858 858 return q.scalar()
859 859
860 860 @classmethod
861 861 def get_by_auth_token(cls, auth_token, cache=False):
862 862 q = UserApiKeys.query()\
863 863 .filter(UserApiKeys.api_key == auth_token)\
864 864 .filter(or_(UserApiKeys.expires == -1,
865 865 UserApiKeys.expires >= time.time()))
866 866 if cache:
867 867 q = q.options(
868 868 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
869 869
870 870 match = q.first()
871 871 if match:
872 872 return match.user
873 873
874 874 @classmethod
875 875 def get_by_email(cls, email, case_insensitive=False, cache=False):
876 876
877 877 if case_insensitive:
878 878 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
879 879
880 880 else:
881 881 q = cls.query().filter(cls.email == email)
882 882
883 883 email_key = _hash_key(email)
884 884 if cache:
885 885 q = q.options(
886 886 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
887 887
888 888 ret = q.scalar()
889 889 if ret is None:
890 890 q = UserEmailMap.query()
891 891 # try fetching in alternate email map
892 892 if case_insensitive:
893 893 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
894 894 else:
895 895 q = q.filter(UserEmailMap.email == email)
896 896 q = q.options(joinedload(UserEmailMap.user))
897 897 if cache:
898 898 q = q.options(
899 899 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
900 900 ret = getattr(q.scalar(), 'user', None)
901 901
902 902 return ret
903 903
904 904 @classmethod
905 905 def get_from_cs_author(cls, author):
906 906 """
907 907 Tries to get User objects out of commit author string
908 908
909 909 :param author:
910 910 """
911 911 from rhodecode.lib.helpers import email, author_name
912 912 # Valid email in the attribute passed, see if they're in the system
913 913 _email = email(author)
914 914 if _email:
915 915 user = cls.get_by_email(_email, case_insensitive=True)
916 916 if user:
917 917 return user
918 918 # Maybe we can match by username?
919 919 _author = author_name(author)
920 920 user = cls.get_by_username(_author, case_insensitive=True)
921 921 if user:
922 922 return user
923 923
924 924 def update_userdata(self, **kwargs):
925 925 usr = self
926 926 old = usr.user_data
927 927 old.update(**kwargs)
928 928 usr.user_data = old
929 929 Session().add(usr)
930 930 log.debug('updated userdata with ', kwargs)
931 931
932 932 def update_lastlogin(self):
933 933 """Update user lastlogin"""
934 934 self.last_login = datetime.datetime.now()
935 935 Session().add(self)
936 936 log.debug('updated user %s lastlogin', self.username)
937 937
938 938 def update_password(self, new_password):
939 939 from rhodecode.lib.auth import get_crypt_password
940 940
941 941 self.password = get_crypt_password(new_password)
942 942 Session().add(self)
943 943
944 944 @classmethod
945 945 def get_first_super_admin(cls):
946 946 user = User.query()\
947 947 .filter(User.admin == true()) \
948 948 .order_by(User.user_id.asc()) \
949 949 .first()
950 950
951 951 if user is None:
952 952 raise Exception('FATAL: Missing administrative account!')
953 953 return user
954 954
955 955 @classmethod
956 956 def get_all_super_admins(cls):
957 957 """
958 958 Returns all admin accounts sorted by username
959 959 """
960 960 return User.query().filter(User.admin == true())\
961 961 .order_by(User.username.asc()).all()
962 962
963 963 @classmethod
964 964 def get_default_user(cls, cache=False, refresh=False):
965 965 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
966 966 if user is None:
967 967 raise Exception('FATAL: Missing default account!')
968 968 if refresh:
969 969 # The default user might be based on outdated state which
970 970 # has been loaded from the cache.
971 971 # A call to refresh() ensures that the
972 972 # latest state from the database is used.
973 973 Session().refresh(user)
974 974 return user
975 975
976 976 def _get_default_perms(self, user, suffix=''):
977 977 from rhodecode.model.permission import PermissionModel
978 978 return PermissionModel().get_default_perms(user.user_perms, suffix)
979 979
980 980 def get_default_perms(self, suffix=''):
981 981 return self._get_default_perms(self, suffix)
982 982
983 983 def get_api_data(self, include_secrets=False, details='full'):
984 984 """
985 985 Common function for generating user related data for API
986 986
987 987 :param include_secrets: By default secrets in the API data will be replaced
988 988 by a placeholder value to prevent exposing this data by accident. In case
989 989 this data shall be exposed, set this flag to ``True``.
990 990
991 991 :param details: details can be 'basic|full' basic gives only a subset of
992 992 the available user information that includes user_id, name and emails.
993 993 """
994 994 user = self
995 995 user_data = self.user_data
996 996 data = {
997 997 'user_id': user.user_id,
998 998 'username': user.username,
999 999 'firstname': user.name,
1000 1000 'lastname': user.lastname,
1001 1001 'email': user.email,
1002 1002 'emails': user.emails,
1003 1003 }
1004 1004 if details == 'basic':
1005 1005 return data
1006 1006
1007 1007 auth_token_length = 40
1008 1008 auth_token_replacement = '*' * auth_token_length
1009 1009
1010 1010 extras = {
1011 1011 'auth_tokens': [auth_token_replacement],
1012 1012 'active': user.active,
1013 1013 'admin': user.admin,
1014 1014 'extern_type': user.extern_type,
1015 1015 'extern_name': user.extern_name,
1016 1016 'last_login': user.last_login,
1017 1017 'last_activity': user.last_activity,
1018 1018 'ip_addresses': user.ip_addresses,
1019 1019 'language': user_data.get('language')
1020 1020 }
1021 1021 data.update(extras)
1022 1022
1023 1023 if include_secrets:
1024 1024 data['auth_tokens'] = user.auth_tokens
1025 1025 return data
1026 1026
1027 1027 def __json__(self):
1028 1028 data = {
1029 1029 'full_name': self.full_name,
1030 1030 'full_name_or_username': self.full_name_or_username,
1031 1031 'short_contact': self.short_contact,
1032 1032 'full_contact': self.full_contact,
1033 1033 }
1034 1034 data.update(self.get_api_data())
1035 1035 return data
1036 1036
1037 1037
1038 1038 class UserApiKeys(Base, BaseModel):
1039 1039 __tablename__ = 'user_api_keys'
1040 1040 __table_args__ = (
1041 1041 Index('uak_api_key_idx', 'api_key', unique=True),
1042 1042 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1043 1043 base_table_args
1044 1044 )
1045 1045 __mapper_args__ = {}
1046 1046
1047 1047 # ApiKey role
1048 1048 ROLE_ALL = 'token_role_all'
1049 1049 ROLE_HTTP = 'token_role_http'
1050 1050 ROLE_VCS = 'token_role_vcs'
1051 1051 ROLE_API = 'token_role_api'
1052 1052 ROLE_FEED = 'token_role_feed'
1053 1053 ROLE_PASSWORD_RESET = 'token_password_reset'
1054 1054
1055 1055 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1056 1056
1057 1057 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1058 1058 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1059 1059 api_key = Column("api_key", String(255), nullable=False, unique=True)
1060 1060 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1061 1061 expires = Column('expires', Float(53), nullable=False)
1062 1062 role = Column('role', String(255), nullable=True)
1063 1063 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1064 1064
1065 1065 # scope columns
1066 1066 repo_id = Column(
1067 1067 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1068 1068 nullable=True, unique=None, default=None)
1069 1069 repo = relationship('Repository', lazy='joined')
1070 1070
1071 1071 repo_group_id = Column(
1072 1072 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1073 1073 nullable=True, unique=None, default=None)
1074 1074 repo_group = relationship('RepoGroup', lazy='joined')
1075 1075
1076 1076 user = relationship('User', lazy='joined')
1077 1077
1078 1078 def __unicode__(self):
1079 1079 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1080 1080
1081 1081 def __json__(self):
1082 1082 data = {
1083 1083 'auth_token': self.api_key,
1084 1084 'role': self.role,
1085 1085 'scope': self.scope_humanized,
1086 1086 'expired': self.expired
1087 1087 }
1088 1088 return data
1089 1089
1090 1090 def get_api_data(self, include_secrets=False):
1091 1091 data = self.__json__()
1092 1092 if include_secrets:
1093 1093 return data
1094 1094 else:
1095 1095 data['auth_token'] = self.token_obfuscated
1096 1096 return data
1097 1097
1098 1098 @hybrid_property
1099 1099 def description_safe(self):
1100 1100 from rhodecode.lib import helpers as h
1101 1101 return h.escape(self.description)
1102 1102
1103 1103 @property
1104 1104 def expired(self):
1105 1105 if self.expires == -1:
1106 1106 return False
1107 1107 return time.time() > self.expires
1108 1108
1109 1109 @classmethod
1110 1110 def _get_role_name(cls, role):
1111 1111 return {
1112 1112 cls.ROLE_ALL: _('all'),
1113 1113 cls.ROLE_HTTP: _('http/web interface'),
1114 1114 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1115 1115 cls.ROLE_API: _('api calls'),
1116 1116 cls.ROLE_FEED: _('feed access'),
1117 1117 }.get(role, role)
1118 1118
1119 1119 @property
1120 1120 def role_humanized(self):
1121 1121 return self._get_role_name(self.role)
1122 1122
1123 1123 def _get_scope(self):
1124 1124 if self.repo:
1125 1125 return repr(self.repo)
1126 1126 if self.repo_group:
1127 1127 return repr(self.repo_group) + ' (recursive)'
1128 1128 return 'global'
1129 1129
1130 1130 @property
1131 1131 def scope_humanized(self):
1132 1132 return self._get_scope()
1133 1133
1134 1134 @property
1135 1135 def token_obfuscated(self):
1136 1136 if self.api_key:
1137 1137 return self.api_key[:4] + "****"
1138 1138
1139 1139
1140 1140 class UserEmailMap(Base, BaseModel):
1141 1141 __tablename__ = 'user_email_map'
1142 1142 __table_args__ = (
1143 1143 Index('uem_email_idx', 'email'),
1144 1144 UniqueConstraint('email'),
1145 1145 base_table_args
1146 1146 )
1147 1147 __mapper_args__ = {}
1148 1148
1149 1149 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1150 1150 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1151 1151 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1152 1152 user = relationship('User', lazy='joined')
1153 1153
1154 1154 @validates('_email')
1155 1155 def validate_email(self, key, email):
1156 1156 # check if this email is not main one
1157 1157 main_email = Session().query(User).filter(User.email == email).scalar()
1158 1158 if main_email is not None:
1159 1159 raise AttributeError('email %s is present is user table' % email)
1160 1160 return email
1161 1161
1162 1162 @hybrid_property
1163 1163 def email(self):
1164 1164 return self._email
1165 1165
1166 1166 @email.setter
1167 1167 def email(self, val):
1168 1168 self._email = val.lower() if val else None
1169 1169
1170 1170
1171 1171 class UserIpMap(Base, BaseModel):
1172 1172 __tablename__ = 'user_ip_map'
1173 1173 __table_args__ = (
1174 1174 UniqueConstraint('user_id', 'ip_addr'),
1175 1175 base_table_args
1176 1176 )
1177 1177 __mapper_args__ = {}
1178 1178
1179 1179 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1180 1180 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1181 1181 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1182 1182 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1183 1183 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1184 1184 user = relationship('User', lazy='joined')
1185 1185
1186 1186 @hybrid_property
1187 1187 def description_safe(self):
1188 1188 from rhodecode.lib import helpers as h
1189 1189 return h.escape(self.description)
1190 1190
1191 1191 @classmethod
1192 1192 def _get_ip_range(cls, ip_addr):
1193 1193 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1194 1194 return [str(net.network_address), str(net.broadcast_address)]
1195 1195
1196 1196 def __json__(self):
1197 1197 return {
1198 1198 'ip_addr': self.ip_addr,
1199 1199 'ip_range': self._get_ip_range(self.ip_addr),
1200 1200 }
1201 1201
1202 1202 def __unicode__(self):
1203 1203 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1204 1204 self.user_id, self.ip_addr)
1205 1205
1206 1206
1207 1207 class UserSshKeys(Base, BaseModel):
1208 1208 __tablename__ = 'user_ssh_keys'
1209 1209 __table_args__ = (
1210 1210 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1211 1211
1212 1212 UniqueConstraint('ssh_key_fingerprint'),
1213 1213
1214 1214 base_table_args
1215 1215 )
1216 1216 __mapper_args__ = {}
1217 1217
1218 1218 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1219 1219 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1220 1220 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1221 1221
1222 1222 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1223 1223
1224 1224 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1225 1225 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1226 1226 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1227 1227
1228 1228 user = relationship('User', lazy='joined')
1229 1229
1230 1230 def __json__(self):
1231 1231 data = {
1232 1232 'ssh_fingerprint': self.ssh_key_fingerprint,
1233 1233 'description': self.description,
1234 1234 'created_on': self.created_on
1235 1235 }
1236 1236 return data
1237 1237
1238 1238 def get_api_data(self):
1239 1239 data = self.__json__()
1240 1240 return data
1241 1241
1242 1242
1243 1243 class UserLog(Base, BaseModel):
1244 1244 __tablename__ = 'user_logs'
1245 1245 __table_args__ = (
1246 1246 base_table_args,
1247 1247 )
1248 1248
1249 1249 VERSION_1 = 'v1'
1250 1250 VERSION_2 = 'v2'
1251 1251 VERSIONS = [VERSION_1, VERSION_2]
1252 1252
1253 1253 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1254 1254 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1255 1255 username = Column("username", String(255), nullable=True, unique=None, default=None)
1256 1256 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1257 1257 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1258 1258 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1259 1259 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1260 1260 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1261 1261
1262 1262 version = Column("version", String(255), nullable=True, default=VERSION_1)
1263 1263 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1264 1264 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1265 1265
1266 1266 def __unicode__(self):
1267 1267 return u"<%s('id:%s:%s')>" % (
1268 1268 self.__class__.__name__, self.repository_name, self.action)
1269 1269
1270 1270 def __json__(self):
1271 1271 return {
1272 1272 'user_id': self.user_id,
1273 1273 'username': self.username,
1274 1274 'repository_id': self.repository_id,
1275 1275 'repository_name': self.repository_name,
1276 1276 'user_ip': self.user_ip,
1277 1277 'action_date': self.action_date,
1278 1278 'action': self.action,
1279 1279 }
1280 1280
1281 1281 @hybrid_property
1282 1282 def entry_id(self):
1283 1283 return self.user_log_id
1284 1284
1285 1285 @property
1286 1286 def action_as_day(self):
1287 1287 return datetime.date(*self.action_date.timetuple()[:3])
1288 1288
1289 1289 user = relationship('User')
1290 1290 repository = relationship('Repository', cascade='')
1291 1291
1292 1292
1293 1293 class UserGroup(Base, BaseModel):
1294 1294 __tablename__ = 'users_groups'
1295 1295 __table_args__ = (
1296 1296 base_table_args,
1297 1297 )
1298 1298
1299 1299 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1300 1300 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1301 1301 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1302 1302 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1303 1303 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1304 1304 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1305 1305 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1306 1306 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1307 1307
1308 1308 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1309 1309 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1310 1310 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1311 1311 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1312 1312 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1313 1313 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1314 1314
1315 1315 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1316 1316 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1317 1317
1318 1318 @classmethod
1319 1319 def _load_group_data(cls, column):
1320 1320 if not column:
1321 1321 return {}
1322 1322
1323 1323 try:
1324 1324 return json.loads(column) or {}
1325 1325 except TypeError:
1326 1326 return {}
1327 1327
1328 1328 @hybrid_property
1329 1329 def description_safe(self):
1330 1330 from rhodecode.lib import helpers as h
1331 1331 return h.escape(self.user_group_description)
1332 1332
1333 1333 @hybrid_property
1334 1334 def group_data(self):
1335 1335 return self._load_group_data(self._group_data)
1336 1336
1337 1337 @group_data.expression
1338 1338 def group_data(self, **kwargs):
1339 1339 return self._group_data
1340 1340
1341 1341 @group_data.setter
1342 1342 def group_data(self, val):
1343 1343 try:
1344 1344 self._group_data = json.dumps(val)
1345 1345 except Exception:
1346 1346 log.error(traceback.format_exc())
1347 1347
1348 1348 @classmethod
1349 1349 def _load_sync(cls, group_data):
1350 1350 if group_data:
1351 1351 return group_data.get('extern_type')
1352 1352
1353 1353 @property
1354 1354 def sync(self):
1355 1355 return self._load_sync(self.group_data)
1356 1356
1357 1357 def __unicode__(self):
1358 1358 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1359 1359 self.users_group_id,
1360 1360 self.users_group_name)
1361 1361
1362 1362 @classmethod
1363 1363 def get_by_group_name(cls, group_name, cache=False,
1364 1364 case_insensitive=False):
1365 1365 if case_insensitive:
1366 1366 q = cls.query().filter(func.lower(cls.users_group_name) ==
1367 1367 func.lower(group_name))
1368 1368
1369 1369 else:
1370 1370 q = cls.query().filter(cls.users_group_name == group_name)
1371 1371 if cache:
1372 1372 q = q.options(
1373 1373 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1374 1374 return q.scalar()
1375 1375
1376 1376 @classmethod
1377 1377 def get(cls, user_group_id, cache=False):
1378 1378 if not user_group_id:
1379 1379 return
1380 1380
1381 1381 user_group = cls.query()
1382 1382 if cache:
1383 1383 user_group = user_group.options(
1384 1384 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1385 1385 return user_group.get(user_group_id)
1386 1386
1387 1387 def permissions(self, with_admins=True, with_owner=True):
1388 1388 """
1389 1389 Permissions for user groups
1390 1390 """
1391 1391 _admin_perm = 'usergroup.admin'
1392 1392
1393 1393 owner_row = []
1394 1394 if with_owner:
1395 1395 usr = AttributeDict(self.user.get_dict())
1396 1396 usr.owner_row = True
1397 1397 usr.permission = _admin_perm
1398 1398 owner_row.append(usr)
1399 1399
1400 1400 super_admin_ids = []
1401 1401 super_admin_rows = []
1402 1402 if with_admins:
1403 1403 for usr in User.get_all_super_admins():
1404 1404 super_admin_ids.append(usr.user_id)
1405 1405 # if this admin is also owner, don't double the record
1406 1406 if usr.user_id == owner_row[0].user_id:
1407 1407 owner_row[0].admin_row = True
1408 1408 else:
1409 1409 usr = AttributeDict(usr.get_dict())
1410 1410 usr.admin_row = True
1411 1411 usr.permission = _admin_perm
1412 1412 super_admin_rows.append(usr)
1413 1413
1414 1414 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1415 1415 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1416 1416 joinedload(UserUserGroupToPerm.user),
1417 1417 joinedload(UserUserGroupToPerm.permission),)
1418 1418
1419 1419 # get owners and admins and permissions. We do a trick of re-writing
1420 1420 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1421 1421 # has a global reference and changing one object propagates to all
1422 1422 # others. This means if admin is also an owner admin_row that change
1423 1423 # would propagate to both objects
1424 1424 perm_rows = []
1425 1425 for _usr in q.all():
1426 1426 usr = AttributeDict(_usr.user.get_dict())
1427 1427 # if this user is also owner/admin, mark as duplicate record
1428 1428 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1429 1429 usr.duplicate_perm = True
1430 1430 usr.permission = _usr.permission.permission_name
1431 1431 perm_rows.append(usr)
1432 1432
1433 1433 # filter the perm rows by 'default' first and then sort them by
1434 1434 # admin,write,read,none permissions sorted again alphabetically in
1435 1435 # each group
1436 1436 perm_rows = sorted(perm_rows, key=display_user_sort)
1437 1437
1438 1438 return super_admin_rows + owner_row + perm_rows
1439 1439
1440 1440 def permission_user_groups(self):
1441 1441 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1442 1442 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1443 1443 joinedload(UserGroupUserGroupToPerm.target_user_group),
1444 1444 joinedload(UserGroupUserGroupToPerm.permission),)
1445 1445
1446 1446 perm_rows = []
1447 1447 for _user_group in q.all():
1448 1448 usr = AttributeDict(_user_group.user_group.get_dict())
1449 1449 usr.permission = _user_group.permission.permission_name
1450 1450 perm_rows.append(usr)
1451 1451
1452 1452 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1453 1453 return perm_rows
1454 1454
1455 1455 def _get_default_perms(self, user_group, suffix=''):
1456 1456 from rhodecode.model.permission import PermissionModel
1457 1457 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1458 1458
1459 1459 def get_default_perms(self, suffix=''):
1460 1460 return self._get_default_perms(self, suffix)
1461 1461
1462 1462 def get_api_data(self, with_group_members=True, include_secrets=False):
1463 1463 """
1464 1464 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1465 1465 basically forwarded.
1466 1466
1467 1467 """
1468 1468 user_group = self
1469 1469 data = {
1470 1470 'users_group_id': user_group.users_group_id,
1471 1471 'group_name': user_group.users_group_name,
1472 1472 'group_description': user_group.user_group_description,
1473 1473 'active': user_group.users_group_active,
1474 1474 'owner': user_group.user.username,
1475 1475 'sync': user_group.sync,
1476 1476 'owner_email': user_group.user.email,
1477 1477 }
1478 1478
1479 1479 if with_group_members:
1480 1480 users = []
1481 1481 for user in user_group.members:
1482 1482 user = user.user
1483 1483 users.append(user.get_api_data(include_secrets=include_secrets))
1484 1484 data['users'] = users
1485 1485
1486 1486 return data
1487 1487
1488 1488
1489 1489 class UserGroupMember(Base, BaseModel):
1490 1490 __tablename__ = 'users_groups_members'
1491 1491 __table_args__ = (
1492 1492 base_table_args,
1493 1493 )
1494 1494
1495 1495 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1496 1496 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1497 1497 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1498 1498
1499 1499 user = relationship('User', lazy='joined')
1500 1500 users_group = relationship('UserGroup')
1501 1501
1502 1502 def __init__(self, gr_id='', u_id=''):
1503 1503 self.users_group_id = gr_id
1504 1504 self.user_id = u_id
1505 1505
1506 1506
1507 1507 class RepositoryField(Base, BaseModel):
1508 1508 __tablename__ = 'repositories_fields'
1509 1509 __table_args__ = (
1510 1510 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1511 1511 base_table_args,
1512 1512 )
1513 1513
1514 1514 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1515 1515
1516 1516 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1517 1517 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1518 1518 field_key = Column("field_key", String(250))
1519 1519 field_label = Column("field_label", String(1024), nullable=False)
1520 1520 field_value = Column("field_value", String(10000), nullable=False)
1521 1521 field_desc = Column("field_desc", String(1024), nullable=False)
1522 1522 field_type = Column("field_type", String(255), nullable=False, unique=None)
1523 1523 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1524 1524
1525 1525 repository = relationship('Repository')
1526 1526
1527 1527 @property
1528 1528 def field_key_prefixed(self):
1529 1529 return 'ex_%s' % self.field_key
1530 1530
1531 1531 @classmethod
1532 1532 def un_prefix_key(cls, key):
1533 1533 if key.startswith(cls.PREFIX):
1534 1534 return key[len(cls.PREFIX):]
1535 1535 return key
1536 1536
1537 1537 @classmethod
1538 1538 def get_by_key_name(cls, key, repo):
1539 1539 row = cls.query()\
1540 1540 .filter(cls.repository == repo)\
1541 1541 .filter(cls.field_key == key).scalar()
1542 1542 return row
1543 1543
1544 1544
1545 1545 class Repository(Base, BaseModel):
1546 1546 __tablename__ = 'repositories'
1547 1547 __table_args__ = (
1548 1548 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1549 1549 base_table_args,
1550 1550 )
1551 1551 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1552 1552 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1553 1553 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1554 1554
1555 1555 STATE_CREATED = 'repo_state_created'
1556 1556 STATE_PENDING = 'repo_state_pending'
1557 1557 STATE_ERROR = 'repo_state_error'
1558 1558
1559 1559 LOCK_AUTOMATIC = 'lock_auto'
1560 1560 LOCK_API = 'lock_api'
1561 1561 LOCK_WEB = 'lock_web'
1562 1562 LOCK_PULL = 'lock_pull'
1563 1563
1564 1564 NAME_SEP = URL_SEP
1565 1565
1566 1566 repo_id = Column(
1567 1567 "repo_id", Integer(), nullable=False, unique=True, default=None,
1568 1568 primary_key=True)
1569 1569 _repo_name = Column(
1570 1570 "repo_name", Text(), nullable=False, default=None)
1571 1571 _repo_name_hash = Column(
1572 1572 "repo_name_hash", String(255), nullable=False, unique=True)
1573 1573 repo_state = Column("repo_state", String(255), nullable=True)
1574 1574
1575 1575 clone_uri = Column(
1576 1576 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1577 1577 default=None)
1578 1578 push_uri = Column(
1579 1579 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1580 1580 default=None)
1581 1581 repo_type = Column(
1582 1582 "repo_type", String(255), nullable=False, unique=False, default=None)
1583 1583 user_id = Column(
1584 1584 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1585 1585 unique=False, default=None)
1586 1586 private = Column(
1587 1587 "private", Boolean(), nullable=True, unique=None, default=None)
1588 archived = Column(
1589 "archived", Boolean(), nullable=True, unique=None, default=None)
1588 1590 enable_statistics = Column(
1589 1591 "statistics", Boolean(), nullable=True, unique=None, default=True)
1590 1592 enable_downloads = Column(
1591 1593 "downloads", Boolean(), nullable=True, unique=None, default=True)
1592 1594 description = Column(
1593 1595 "description", String(10000), nullable=True, unique=None, default=None)
1594 1596 created_on = Column(
1595 1597 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1596 1598 default=datetime.datetime.now)
1597 1599 updated_on = Column(
1598 1600 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1599 1601 default=datetime.datetime.now)
1600 1602 _landing_revision = Column(
1601 1603 "landing_revision", String(255), nullable=False, unique=False,
1602 1604 default=None)
1603 1605 enable_locking = Column(
1604 1606 "enable_locking", Boolean(), nullable=False, unique=None,
1605 1607 default=False)
1606 1608 _locked = Column(
1607 1609 "locked", String(255), nullable=True, unique=False, default=None)
1608 1610 _changeset_cache = Column(
1609 1611 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1610 1612
1611 1613 fork_id = Column(
1612 1614 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1613 1615 nullable=True, unique=False, default=None)
1614 1616 group_id = Column(
1615 1617 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1616 1618 unique=False, default=None)
1617 1619
1618 1620 user = relationship('User', lazy='joined')
1619 1621 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1620 1622 group = relationship('RepoGroup', lazy='joined')
1621 1623 repo_to_perm = relationship(
1622 1624 'UserRepoToPerm', cascade='all',
1623 1625 order_by='UserRepoToPerm.repo_to_perm_id')
1624 1626 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1625 1627 stats = relationship('Statistics', cascade='all', uselist=False)
1626 1628
1627 1629 followers = relationship(
1628 1630 'UserFollowing',
1629 1631 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1630 1632 cascade='all')
1631 1633 extra_fields = relationship(
1632 1634 'RepositoryField', cascade="all, delete, delete-orphan")
1633 1635 logs = relationship('UserLog')
1634 1636 comments = relationship(
1635 1637 'ChangesetComment', cascade="all, delete, delete-orphan")
1636 1638 pull_requests_source = relationship(
1637 1639 'PullRequest',
1638 1640 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1639 1641 cascade="all, delete, delete-orphan")
1640 1642 pull_requests_target = relationship(
1641 1643 'PullRequest',
1642 1644 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1643 1645 cascade="all, delete, delete-orphan")
1644 1646 ui = relationship('RepoRhodeCodeUi', cascade="all")
1645 1647 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1646 1648 integrations = relationship('Integration',
1647 1649 cascade="all, delete, delete-orphan")
1648 1650
1649 1651 scoped_tokens = relationship('UserApiKeys', cascade="all")
1650 1652
1651 1653 def __unicode__(self):
1652 1654 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1653 1655 safe_unicode(self.repo_name))
1654 1656
1655 1657 @hybrid_property
1656 1658 def description_safe(self):
1657 1659 from rhodecode.lib import helpers as h
1658 1660 return h.escape(self.description)
1659 1661
1660 1662 @hybrid_property
1661 1663 def landing_rev(self):
1662 1664 # always should return [rev_type, rev]
1663 1665 if self._landing_revision:
1664 1666 _rev_info = self._landing_revision.split(':')
1665 1667 if len(_rev_info) < 2:
1666 1668 _rev_info.insert(0, 'rev')
1667 1669 return [_rev_info[0], _rev_info[1]]
1668 1670 return [None, None]
1669 1671
1670 1672 @landing_rev.setter
1671 1673 def landing_rev(self, val):
1672 1674 if ':' not in val:
1673 1675 raise ValueError('value must be delimited with `:` and consist '
1674 1676 'of <rev_type>:<rev>, got %s instead' % val)
1675 1677 self._landing_revision = val
1676 1678
1677 1679 @hybrid_property
1678 1680 def locked(self):
1679 1681 if self._locked:
1680 1682 user_id, timelocked, reason = self._locked.split(':')
1681 1683 lock_values = int(user_id), timelocked, reason
1682 1684 else:
1683 1685 lock_values = [None, None, None]
1684 1686 return lock_values
1685 1687
1686 1688 @locked.setter
1687 1689 def locked(self, val):
1688 1690 if val and isinstance(val, (list, tuple)):
1689 1691 self._locked = ':'.join(map(str, val))
1690 1692 else:
1691 1693 self._locked = None
1692 1694
1693 1695 @hybrid_property
1694 1696 def changeset_cache(self):
1695 1697 from rhodecode.lib.vcs.backends.base import EmptyCommit
1696 1698 dummy = EmptyCommit().__json__()
1697 1699 if not self._changeset_cache:
1698 1700 return dummy
1699 1701 try:
1700 1702 return json.loads(self._changeset_cache)
1701 1703 except TypeError:
1702 1704 return dummy
1703 1705 except Exception:
1704 1706 log.error(traceback.format_exc())
1705 1707 return dummy
1706 1708
1707 1709 @changeset_cache.setter
1708 1710 def changeset_cache(self, val):
1709 1711 try:
1710 1712 self._changeset_cache = json.dumps(val)
1711 1713 except Exception:
1712 1714 log.error(traceback.format_exc())
1713 1715
1714 1716 @hybrid_property
1715 1717 def repo_name(self):
1716 1718 return self._repo_name
1717 1719
1718 1720 @repo_name.setter
1719 1721 def repo_name(self, value):
1720 1722 self._repo_name = value
1721 1723 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1722 1724
1723 1725 @classmethod
1724 1726 def normalize_repo_name(cls, repo_name):
1725 1727 """
1726 1728 Normalizes os specific repo_name to the format internally stored inside
1727 1729 database using URL_SEP
1728 1730
1729 1731 :param cls:
1730 1732 :param repo_name:
1731 1733 """
1732 1734 return cls.NAME_SEP.join(repo_name.split(os.sep))
1733 1735
1734 1736 @classmethod
1735 1737 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1736 1738 session = Session()
1737 1739 q = session.query(cls).filter(cls.repo_name == repo_name)
1738 1740
1739 1741 if cache:
1740 1742 if identity_cache:
1741 1743 val = cls.identity_cache(session, 'repo_name', repo_name)
1742 1744 if val:
1743 1745 return val
1744 1746 else:
1745 1747 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1746 1748 q = q.options(
1747 1749 FromCache("sql_cache_short", cache_key))
1748 1750
1749 1751 return q.scalar()
1750 1752
1751 1753 @classmethod
1752 1754 def get_by_id_or_repo_name(cls, repoid):
1753 1755 if isinstance(repoid, (int, long)):
1754 1756 try:
1755 1757 repo = cls.get(repoid)
1756 1758 except ValueError:
1757 1759 repo = None
1758 1760 else:
1759 1761 repo = cls.get_by_repo_name(repoid)
1760 1762 return repo
1761 1763
1762 1764 @classmethod
1763 1765 def get_by_full_path(cls, repo_full_path):
1764 1766 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1765 1767 repo_name = cls.normalize_repo_name(repo_name)
1766 1768 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1767 1769
1768 1770 @classmethod
1769 1771 def get_repo_forks(cls, repo_id):
1770 1772 return cls.query().filter(Repository.fork_id == repo_id)
1771 1773
1772 1774 @classmethod
1773 1775 def base_path(cls):
1774 1776 """
1775 1777 Returns base path when all repos are stored
1776 1778
1777 1779 :param cls:
1778 1780 """
1779 1781 q = Session().query(RhodeCodeUi)\
1780 1782 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1781 1783 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1782 1784 return q.one().ui_value
1783 1785
1784 1786 @classmethod
1785 1787 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1786 case_insensitive=True):
1788 case_insensitive=True, archived=False):
1787 1789 q = Repository.query()
1788 1790
1791 if not archived:
1792 q = q.filter(Repository.archived.isnot(true()))
1793
1789 1794 if not isinstance(user_id, Optional):
1790 1795 q = q.filter(Repository.user_id == user_id)
1791 1796
1792 1797 if not isinstance(group_id, Optional):
1793 1798 q = q.filter(Repository.group_id == group_id)
1794 1799
1795 1800 if case_insensitive:
1796 1801 q = q.order_by(func.lower(Repository.repo_name))
1797 1802 else:
1798 1803 q = q.order_by(Repository.repo_name)
1804
1799 1805 return q.all()
1800 1806
1801 1807 @property
1802 1808 def forks(self):
1803 1809 """
1804 1810 Return forks of this repo
1805 1811 """
1806 1812 return Repository.get_repo_forks(self.repo_id)
1807 1813
1808 1814 @property
1809 1815 def parent(self):
1810 1816 """
1811 1817 Returns fork parent
1812 1818 """
1813 1819 return self.fork
1814 1820
1815 1821 @property
1816 1822 def just_name(self):
1817 1823 return self.repo_name.split(self.NAME_SEP)[-1]
1818 1824
1819 1825 @property
1820 1826 def groups_with_parents(self):
1821 1827 groups = []
1822 1828 if self.group is None:
1823 1829 return groups
1824 1830
1825 1831 cur_gr = self.group
1826 1832 groups.insert(0, cur_gr)
1827 1833 while 1:
1828 1834 gr = getattr(cur_gr, 'parent_group', None)
1829 1835 cur_gr = cur_gr.parent_group
1830 1836 if gr is None:
1831 1837 break
1832 1838 groups.insert(0, gr)
1833 1839
1834 1840 return groups
1835 1841
1836 1842 @property
1837 1843 def groups_and_repo(self):
1838 1844 return self.groups_with_parents, self
1839 1845
1840 1846 @LazyProperty
1841 1847 def repo_path(self):
1842 1848 """
1843 1849 Returns base full path for that repository means where it actually
1844 1850 exists on a filesystem
1845 1851 """
1846 1852 q = Session().query(RhodeCodeUi).filter(
1847 1853 RhodeCodeUi.ui_key == self.NAME_SEP)
1848 1854 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1849 1855 return q.one().ui_value
1850 1856
1851 1857 @property
1852 1858 def repo_full_path(self):
1853 1859 p = [self.repo_path]
1854 1860 # we need to split the name by / since this is how we store the
1855 1861 # names in the database, but that eventually needs to be converted
1856 1862 # into a valid system path
1857 1863 p += self.repo_name.split(self.NAME_SEP)
1858 1864 return os.path.join(*map(safe_unicode, p))
1859 1865
1860 1866 @property
1861 1867 def cache_keys(self):
1862 1868 """
1863 1869 Returns associated cache keys for that repo
1864 1870 """
1865 1871 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1866 1872 repo_id=self.repo_id)
1867 1873 return CacheKey.query()\
1868 1874 .filter(CacheKey.cache_args == invalidation_namespace)\
1869 1875 .order_by(CacheKey.cache_key)\
1870 1876 .all()
1871 1877
1872 1878 @property
1873 1879 def cached_diffs_relative_dir(self):
1874 1880 """
1875 1881 Return a relative to the repository store path of cached diffs
1876 1882 used for safe display for users, who shouldn't know the absolute store
1877 1883 path
1878 1884 """
1879 1885 return os.path.join(
1880 1886 os.path.dirname(self.repo_name),
1881 1887 self.cached_diffs_dir.split(os.path.sep)[-1])
1882 1888
1883 1889 @property
1884 1890 def cached_diffs_dir(self):
1885 1891 path = self.repo_full_path
1886 1892 return os.path.join(
1887 1893 os.path.dirname(path),
1888 1894 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1889 1895
1890 1896 def cached_diffs(self):
1891 1897 diff_cache_dir = self.cached_diffs_dir
1892 1898 if os.path.isdir(diff_cache_dir):
1893 1899 return os.listdir(diff_cache_dir)
1894 1900 return []
1895 1901
1896 1902 def shadow_repos(self):
1897 1903 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1898 1904 return [
1899 1905 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1900 1906 if x.startswith(shadow_repos_pattern)]
1901 1907
1902 1908 def get_new_name(self, repo_name):
1903 1909 """
1904 1910 returns new full repository name based on assigned group and new new
1905 1911
1906 1912 :param group_name:
1907 1913 """
1908 1914 path_prefix = self.group.full_path_splitted if self.group else []
1909 1915 return self.NAME_SEP.join(path_prefix + [repo_name])
1910 1916
1911 1917 @property
1912 1918 def _config(self):
1913 1919 """
1914 1920 Returns db based config object.
1915 1921 """
1916 1922 from rhodecode.lib.utils import make_db_config
1917 1923 return make_db_config(clear_session=False, repo=self)
1918 1924
1919 1925 def permissions(self, with_admins=True, with_owner=True):
1920 1926 """
1921 1927 Permissions for repositories
1922 1928 """
1923 1929 _admin_perm = 'repository.admin'
1924 1930
1925 1931 owner_row = []
1926 1932 if with_owner:
1927 1933 usr = AttributeDict(self.user.get_dict())
1928 1934 usr.owner_row = True
1929 1935 usr.permission = _admin_perm
1930 1936 usr.permission_id = None
1931 1937 owner_row.append(usr)
1932 1938
1933 1939 super_admin_ids = []
1934 1940 super_admin_rows = []
1935 1941 if with_admins:
1936 1942 for usr in User.get_all_super_admins():
1937 1943 super_admin_ids.append(usr.user_id)
1938 1944 # if this admin is also owner, don't double the record
1939 1945 if usr.user_id == owner_row[0].user_id:
1940 1946 owner_row[0].admin_row = True
1941 1947 else:
1942 1948 usr = AttributeDict(usr.get_dict())
1943 1949 usr.admin_row = True
1944 1950 usr.permission = _admin_perm
1945 1951 usr.permission_id = None
1946 1952 super_admin_rows.append(usr)
1947 1953
1948 1954 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1949 1955 q = q.options(joinedload(UserRepoToPerm.repository),
1950 1956 joinedload(UserRepoToPerm.user),
1951 1957 joinedload(UserRepoToPerm.permission),)
1952 1958
1953 1959 # get owners and admins and permissions. We do a trick of re-writing
1954 1960 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1955 1961 # has a global reference and changing one object propagates to all
1956 1962 # others. This means if admin is also an owner admin_row that change
1957 1963 # would propagate to both objects
1958 1964 perm_rows = []
1959 1965 for _usr in q.all():
1960 1966 usr = AttributeDict(_usr.user.get_dict())
1961 1967 # if this user is also owner/admin, mark as duplicate record
1962 1968 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1963 1969 usr.duplicate_perm = True
1964 1970 # also check if this permission is maybe used by branch_permissions
1965 1971 if _usr.branch_perm_entry:
1966 1972 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
1967 1973
1968 1974 usr.permission = _usr.permission.permission_name
1969 1975 usr.permission_id = _usr.repo_to_perm_id
1970 1976 perm_rows.append(usr)
1971 1977
1972 1978 # filter the perm rows by 'default' first and then sort them by
1973 1979 # admin,write,read,none permissions sorted again alphabetically in
1974 1980 # each group
1975 1981 perm_rows = sorted(perm_rows, key=display_user_sort)
1976 1982
1977 1983 return super_admin_rows + owner_row + perm_rows
1978 1984
1979 1985 def permission_user_groups(self):
1980 1986 q = UserGroupRepoToPerm.query().filter(
1981 1987 UserGroupRepoToPerm.repository == self)
1982 1988 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1983 1989 joinedload(UserGroupRepoToPerm.users_group),
1984 1990 joinedload(UserGroupRepoToPerm.permission),)
1985 1991
1986 1992 perm_rows = []
1987 1993 for _user_group in q.all():
1988 1994 usr = AttributeDict(_user_group.users_group.get_dict())
1989 1995 usr.permission = _user_group.permission.permission_name
1990 1996 perm_rows.append(usr)
1991 1997
1992 1998 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1993 1999 return perm_rows
1994 2000
1995 2001 def get_api_data(self, include_secrets=False):
1996 2002 """
1997 2003 Common function for generating repo api data
1998 2004
1999 2005 :param include_secrets: See :meth:`User.get_api_data`.
2000 2006
2001 2007 """
2002 2008 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2003 2009 # move this methods on models level.
2004 2010 from rhodecode.model.settings import SettingsModel
2005 2011 from rhodecode.model.repo import RepoModel
2006 2012
2007 2013 repo = self
2008 2014 _user_id, _time, _reason = self.locked
2009 2015
2010 2016 data = {
2011 2017 'repo_id': repo.repo_id,
2012 2018 'repo_name': repo.repo_name,
2013 2019 'repo_type': repo.repo_type,
2014 2020 'clone_uri': repo.clone_uri or '',
2015 2021 'push_uri': repo.push_uri or '',
2016 2022 'url': RepoModel().get_url(self),
2017 2023 'private': repo.private,
2018 2024 'created_on': repo.created_on,
2019 2025 'description': repo.description_safe,
2020 2026 'landing_rev': repo.landing_rev,
2021 2027 'owner': repo.user.username,
2022 2028 'fork_of': repo.fork.repo_name if repo.fork else None,
2023 2029 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2024 2030 'enable_statistics': repo.enable_statistics,
2025 2031 'enable_locking': repo.enable_locking,
2026 2032 'enable_downloads': repo.enable_downloads,
2027 2033 'last_changeset': repo.changeset_cache,
2028 2034 'locked_by': User.get(_user_id).get_api_data(
2029 2035 include_secrets=include_secrets) if _user_id else None,
2030 2036 'locked_date': time_to_datetime(_time) if _time else None,
2031 2037 'lock_reason': _reason if _reason else None,
2032 2038 }
2033 2039
2034 2040 # TODO: mikhail: should be per-repo settings here
2035 2041 rc_config = SettingsModel().get_all_settings()
2036 2042 repository_fields = str2bool(
2037 2043 rc_config.get('rhodecode_repository_fields'))
2038 2044 if repository_fields:
2039 2045 for f in self.extra_fields:
2040 2046 data[f.field_key_prefixed] = f.field_value
2041 2047
2042 2048 return data
2043 2049
2044 2050 @classmethod
2045 2051 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2046 2052 if not lock_time:
2047 2053 lock_time = time.time()
2048 2054 if not lock_reason:
2049 2055 lock_reason = cls.LOCK_AUTOMATIC
2050 2056 repo.locked = [user_id, lock_time, lock_reason]
2051 2057 Session().add(repo)
2052 2058 Session().commit()
2053 2059
2054 2060 @classmethod
2055 2061 def unlock(cls, repo):
2056 2062 repo.locked = None
2057 2063 Session().add(repo)
2058 2064 Session().commit()
2059 2065
2060 2066 @classmethod
2061 2067 def getlock(cls, repo):
2062 2068 return repo.locked
2063 2069
2064 2070 def is_user_lock(self, user_id):
2065 2071 if self.lock[0]:
2066 2072 lock_user_id = safe_int(self.lock[0])
2067 2073 user_id = safe_int(user_id)
2068 2074 # both are ints, and they are equal
2069 2075 return all([lock_user_id, user_id]) and lock_user_id == user_id
2070 2076
2071 2077 return False
2072 2078
2073 2079 def get_locking_state(self, action, user_id, only_when_enabled=True):
2074 2080 """
2075 2081 Checks locking on this repository, if locking is enabled and lock is
2076 2082 present returns a tuple of make_lock, locked, locked_by.
2077 2083 make_lock can have 3 states None (do nothing) True, make lock
2078 2084 False release lock, This value is later propagated to hooks, which
2079 2085 do the locking. Think about this as signals passed to hooks what to do.
2080 2086
2081 2087 """
2082 2088 # TODO: johbo: This is part of the business logic and should be moved
2083 2089 # into the RepositoryModel.
2084 2090
2085 2091 if action not in ('push', 'pull'):
2086 2092 raise ValueError("Invalid action value: %s" % repr(action))
2087 2093
2088 2094 # defines if locked error should be thrown to user
2089 2095 currently_locked = False
2090 2096 # defines if new lock should be made, tri-state
2091 2097 make_lock = None
2092 2098 repo = self
2093 2099 user = User.get(user_id)
2094 2100
2095 2101 lock_info = repo.locked
2096 2102
2097 2103 if repo and (repo.enable_locking or not only_when_enabled):
2098 2104 if action == 'push':
2099 2105 # check if it's already locked !, if it is compare users
2100 2106 locked_by_user_id = lock_info[0]
2101 2107 if user.user_id == locked_by_user_id:
2102 2108 log.debug(
2103 2109 'Got `push` action from user %s, now unlocking', user)
2104 2110 # unlock if we have push from user who locked
2105 2111 make_lock = False
2106 2112 else:
2107 2113 # we're not the same user who locked, ban with
2108 2114 # code defined in settings (default is 423 HTTP Locked) !
2109 2115 log.debug('Repo %s is currently locked by %s', repo, user)
2110 2116 currently_locked = True
2111 2117 elif action == 'pull':
2112 2118 # [0] user [1] date
2113 2119 if lock_info[0] and lock_info[1]:
2114 2120 log.debug('Repo %s is currently locked by %s', repo, user)
2115 2121 currently_locked = True
2116 2122 else:
2117 2123 log.debug('Setting lock on repo %s by %s', repo, user)
2118 2124 make_lock = True
2119 2125
2120 2126 else:
2121 2127 log.debug('Repository %s do not have locking enabled', repo)
2122 2128
2123 2129 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2124 2130 make_lock, currently_locked, lock_info)
2125 2131
2126 2132 from rhodecode.lib.auth import HasRepoPermissionAny
2127 2133 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2128 2134 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2129 2135 # if we don't have at least write permission we cannot make a lock
2130 2136 log.debug('lock state reset back to FALSE due to lack '
2131 2137 'of at least read permission')
2132 2138 make_lock = False
2133 2139
2134 2140 return make_lock, currently_locked, lock_info
2135 2141
2136 2142 @property
2137 2143 def last_db_change(self):
2138 2144 return self.updated_on
2139 2145
2140 2146 @property
2141 2147 def clone_uri_hidden(self):
2142 2148 clone_uri = self.clone_uri
2143 2149 if clone_uri:
2144 2150 import urlobject
2145 2151 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2146 2152 if url_obj.password:
2147 2153 clone_uri = url_obj.with_password('*****')
2148 2154 return clone_uri
2149 2155
2150 2156 @property
2151 2157 def push_uri_hidden(self):
2152 2158 push_uri = self.push_uri
2153 2159 if push_uri:
2154 2160 import urlobject
2155 2161 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2156 2162 if url_obj.password:
2157 2163 push_uri = url_obj.with_password('*****')
2158 2164 return push_uri
2159 2165
2160 2166 def clone_url(self, **override):
2161 2167 from rhodecode.model.settings import SettingsModel
2162 2168
2163 2169 uri_tmpl = None
2164 2170 if 'with_id' in override:
2165 2171 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2166 2172 del override['with_id']
2167 2173
2168 2174 if 'uri_tmpl' in override:
2169 2175 uri_tmpl = override['uri_tmpl']
2170 2176 del override['uri_tmpl']
2171 2177
2172 2178 ssh = False
2173 2179 if 'ssh' in override:
2174 2180 ssh = True
2175 2181 del override['ssh']
2176 2182
2177 2183 # we didn't override our tmpl from **overrides
2178 2184 if not uri_tmpl:
2179 2185 rc_config = SettingsModel().get_all_settings(cache=True)
2180 2186 if ssh:
2181 2187 uri_tmpl = rc_config.get(
2182 2188 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2183 2189 else:
2184 2190 uri_tmpl = rc_config.get(
2185 2191 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2186 2192
2187 2193 request = get_current_request()
2188 2194 return get_clone_url(request=request,
2189 2195 uri_tmpl=uri_tmpl,
2190 2196 repo_name=self.repo_name,
2191 2197 repo_id=self.repo_id, **override)
2192 2198
2193 2199 def set_state(self, state):
2194 2200 self.repo_state = state
2195 2201 Session().add(self)
2196 2202 #==========================================================================
2197 2203 # SCM PROPERTIES
2198 2204 #==========================================================================
2199 2205
2200 2206 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2201 2207 return get_commit_safe(
2202 2208 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2203 2209
2204 2210 def get_changeset(self, rev=None, pre_load=None):
2205 2211 warnings.warn("Use get_commit", DeprecationWarning)
2206 2212 commit_id = None
2207 2213 commit_idx = None
2208 2214 if isinstance(rev, basestring):
2209 2215 commit_id = rev
2210 2216 else:
2211 2217 commit_idx = rev
2212 2218 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2213 2219 pre_load=pre_load)
2214 2220
2215 2221 def get_landing_commit(self):
2216 2222 """
2217 2223 Returns landing commit, or if that doesn't exist returns the tip
2218 2224 """
2219 2225 _rev_type, _rev = self.landing_rev
2220 2226 commit = self.get_commit(_rev)
2221 2227 if isinstance(commit, EmptyCommit):
2222 2228 return self.get_commit()
2223 2229 return commit
2224 2230
2225 2231 def update_commit_cache(self, cs_cache=None, config=None):
2226 2232 """
2227 2233 Update cache of last changeset for repository, keys should be::
2228 2234
2229 2235 short_id
2230 2236 raw_id
2231 2237 revision
2232 2238 parents
2233 2239 message
2234 2240 date
2235 2241 author
2236 2242
2237 2243 :param cs_cache:
2238 2244 """
2239 2245 from rhodecode.lib.vcs.backends.base import BaseChangeset
2240 2246 if cs_cache is None:
2241 2247 # use no-cache version here
2242 2248 scm_repo = self.scm_instance(cache=False, config=config)
2243 2249
2244 2250 empty = scm_repo.is_empty()
2245 2251 if not empty:
2246 2252 cs_cache = scm_repo.get_commit(
2247 2253 pre_load=["author", "date", "message", "parents"])
2248 2254 else:
2249 2255 cs_cache = EmptyCommit()
2250 2256
2251 2257 if isinstance(cs_cache, BaseChangeset):
2252 2258 cs_cache = cs_cache.__json__()
2253 2259
2254 2260 def is_outdated(new_cs_cache):
2255 2261 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2256 2262 new_cs_cache['revision'] != self.changeset_cache['revision']):
2257 2263 return True
2258 2264 return False
2259 2265
2260 2266 # check if we have maybe already latest cached revision
2261 2267 if is_outdated(cs_cache) or not self.changeset_cache:
2262 2268 _default = datetime.datetime.utcnow()
2263 2269 last_change = cs_cache.get('date') or _default
2264 2270 if self.updated_on and self.updated_on > last_change:
2265 2271 # we check if last update is newer than the new value
2266 2272 # if yes, we use the current timestamp instead. Imagine you get
2267 2273 # old commit pushed 1y ago, we'd set last update 1y to ago.
2268 2274 last_change = _default
2269 2275 log.debug('updated repo %s with new cs cache %s',
2270 2276 self.repo_name, cs_cache)
2271 2277 self.updated_on = last_change
2272 2278 self.changeset_cache = cs_cache
2273 2279 Session().add(self)
2274 2280 Session().commit()
2275 2281 else:
2276 2282 log.debug('Skipping update_commit_cache for repo:`%s` '
2277 2283 'commit already with latest changes', self.repo_name)
2278 2284
2279 2285 @property
2280 2286 def tip(self):
2281 2287 return self.get_commit('tip')
2282 2288
2283 2289 @property
2284 2290 def author(self):
2285 2291 return self.tip.author
2286 2292
2287 2293 @property
2288 2294 def last_change(self):
2289 2295 return self.scm_instance().last_change
2290 2296
2291 2297 def get_comments(self, revisions=None):
2292 2298 """
2293 2299 Returns comments for this repository grouped by revisions
2294 2300
2295 2301 :param revisions: filter query by revisions only
2296 2302 """
2297 2303 cmts = ChangesetComment.query()\
2298 2304 .filter(ChangesetComment.repo == self)
2299 2305 if revisions:
2300 2306 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2301 2307 grouped = collections.defaultdict(list)
2302 2308 for cmt in cmts.all():
2303 2309 grouped[cmt.revision].append(cmt)
2304 2310 return grouped
2305 2311
2306 2312 def statuses(self, revisions=None):
2307 2313 """
2308 2314 Returns statuses for this repository
2309 2315
2310 2316 :param revisions: list of revisions to get statuses for
2311 2317 """
2312 2318 statuses = ChangesetStatus.query()\
2313 2319 .filter(ChangesetStatus.repo == self)\
2314 2320 .filter(ChangesetStatus.version == 0)
2315 2321
2316 2322 if revisions:
2317 2323 # Try doing the filtering in chunks to avoid hitting limits
2318 2324 size = 500
2319 2325 status_results = []
2320 2326 for chunk in xrange(0, len(revisions), size):
2321 2327 status_results += statuses.filter(
2322 2328 ChangesetStatus.revision.in_(
2323 2329 revisions[chunk: chunk+size])
2324 2330 ).all()
2325 2331 else:
2326 2332 status_results = statuses.all()
2327 2333
2328 2334 grouped = {}
2329 2335
2330 2336 # maybe we have open new pullrequest without a status?
2331 2337 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2332 2338 status_lbl = ChangesetStatus.get_status_lbl(stat)
2333 2339 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2334 2340 for rev in pr.revisions:
2335 2341 pr_id = pr.pull_request_id
2336 2342 pr_repo = pr.target_repo.repo_name
2337 2343 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2338 2344
2339 2345 for stat in status_results:
2340 2346 pr_id = pr_repo = None
2341 2347 if stat.pull_request:
2342 2348 pr_id = stat.pull_request.pull_request_id
2343 2349 pr_repo = stat.pull_request.target_repo.repo_name
2344 2350 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2345 2351 pr_id, pr_repo]
2346 2352 return grouped
2347 2353
2348 2354 # ==========================================================================
2349 2355 # SCM CACHE INSTANCE
2350 2356 # ==========================================================================
2351 2357
2352 2358 def scm_instance(self, **kwargs):
2353 2359 import rhodecode
2354 2360
2355 2361 # Passing a config will not hit the cache currently only used
2356 2362 # for repo2dbmapper
2357 2363 config = kwargs.pop('config', None)
2358 2364 cache = kwargs.pop('cache', None)
2359 2365 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2360 2366 # if cache is NOT defined use default global, else we have a full
2361 2367 # control over cache behaviour
2362 2368 if cache is None and full_cache and not config:
2363 2369 return self._get_instance_cached()
2364 2370 return self._get_instance(cache=bool(cache), config=config)
2365 2371
2366 2372 def _get_instance_cached(self):
2367 2373 from rhodecode.lib import rc_cache
2368 2374
2369 2375 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2370 2376 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2371 2377 repo_id=self.repo_id)
2372 2378 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2373 2379
2374 2380 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2375 2381 def get_instance_cached(repo_id, context_id):
2376 2382 return self._get_instance()
2377 2383
2378 2384 # we must use thread scoped cache here,
2379 2385 # because each thread of gevent needs it's own not shared connection and cache
2380 2386 # we also alter `args` so the cache key is individual for every green thread.
2381 2387 inv_context_manager = rc_cache.InvalidationContext(
2382 2388 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2383 2389 thread_scoped=True)
2384 2390 with inv_context_manager as invalidation_context:
2385 2391 args = (self.repo_id, inv_context_manager.cache_key)
2386 2392 # re-compute and store cache if we get invalidate signal
2387 2393 if invalidation_context.should_invalidate():
2388 2394 instance = get_instance_cached.refresh(*args)
2389 2395 else:
2390 2396 instance = get_instance_cached(*args)
2391 2397
2392 2398 log.debug(
2393 2399 'Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2394 2400 return instance
2395 2401
2396 2402 def _get_instance(self, cache=True, config=None):
2397 2403 config = config or self._config
2398 2404 custom_wire = {
2399 2405 'cache': cache # controls the vcs.remote cache
2400 2406 }
2401 2407 repo = get_vcs_instance(
2402 2408 repo_path=safe_str(self.repo_full_path),
2403 2409 config=config,
2404 2410 with_wire=custom_wire,
2405 2411 create=False,
2406 2412 _vcs_alias=self.repo_type)
2407 2413
2408 2414 return repo
2409 2415
2410 2416 def __json__(self):
2411 2417 return {'landing_rev': self.landing_rev}
2412 2418
2413 2419 def get_dict(self):
2414 2420
2415 2421 # Since we transformed `repo_name` to a hybrid property, we need to
2416 2422 # keep compatibility with the code which uses `repo_name` field.
2417 2423
2418 2424 result = super(Repository, self).get_dict()
2419 2425 result['repo_name'] = result.pop('_repo_name', None)
2420 2426 return result
2421 2427
2422 2428
2423 2429 class RepoGroup(Base, BaseModel):
2424 2430 __tablename__ = 'groups'
2425 2431 __table_args__ = (
2426 2432 UniqueConstraint('group_name', 'group_parent_id'),
2427 2433 CheckConstraint('group_id != group_parent_id'),
2428 2434 base_table_args,
2429 2435 )
2430 2436 __mapper_args__ = {'order_by': 'group_name'}
2431 2437
2432 2438 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2433 2439
2434 2440 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2435 2441 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2436 2442 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2437 2443 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2438 2444 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2439 2445 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2440 2446 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2441 2447 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2442 2448 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2443 2449
2444 2450 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2445 2451 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2446 2452 parent_group = relationship('RepoGroup', remote_side=group_id)
2447 2453 user = relationship('User')
2448 2454 integrations = relationship('Integration',
2449 2455 cascade="all, delete, delete-orphan")
2450 2456
2451 2457 def __init__(self, group_name='', parent_group=None):
2452 2458 self.group_name = group_name
2453 2459 self.parent_group = parent_group
2454 2460
2455 2461 def __unicode__(self):
2456 2462 return u"<%s('id:%s:%s')>" % (
2457 2463 self.__class__.__name__, self.group_id, self.group_name)
2458 2464
2459 2465 @hybrid_property
2460 2466 def description_safe(self):
2461 2467 from rhodecode.lib import helpers as h
2462 2468 return h.escape(self.group_description)
2463 2469
2464 2470 @classmethod
2465 2471 def _generate_choice(cls, repo_group):
2466 2472 from webhelpers.html import literal as _literal
2467 2473 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2468 2474 return repo_group.group_id, _name(repo_group.full_path_splitted)
2469 2475
2470 2476 @classmethod
2471 2477 def groups_choices(cls, groups=None, show_empty_group=True):
2472 2478 if not groups:
2473 2479 groups = cls.query().all()
2474 2480
2475 2481 repo_groups = []
2476 2482 if show_empty_group:
2477 2483 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2478 2484
2479 2485 repo_groups.extend([cls._generate_choice(x) for x in groups])
2480 2486
2481 2487 repo_groups = sorted(
2482 2488 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2483 2489 return repo_groups
2484 2490
2485 2491 @classmethod
2486 2492 def url_sep(cls):
2487 2493 return URL_SEP
2488 2494
2489 2495 @classmethod
2490 2496 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2491 2497 if case_insensitive:
2492 2498 gr = cls.query().filter(func.lower(cls.group_name)
2493 2499 == func.lower(group_name))
2494 2500 else:
2495 2501 gr = cls.query().filter(cls.group_name == group_name)
2496 2502 if cache:
2497 2503 name_key = _hash_key(group_name)
2498 2504 gr = gr.options(
2499 2505 FromCache("sql_cache_short", "get_group_%s" % name_key))
2500 2506 return gr.scalar()
2501 2507
2502 2508 @classmethod
2503 2509 def get_user_personal_repo_group(cls, user_id):
2504 2510 user = User.get(user_id)
2505 2511 if user.username == User.DEFAULT_USER:
2506 2512 return None
2507 2513
2508 2514 return cls.query()\
2509 2515 .filter(cls.personal == true()) \
2510 2516 .filter(cls.user == user) \
2511 2517 .order_by(cls.group_id.asc()) \
2512 2518 .first()
2513 2519
2514 2520 @classmethod
2515 2521 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2516 2522 case_insensitive=True):
2517 2523 q = RepoGroup.query()
2518 2524
2519 2525 if not isinstance(user_id, Optional):
2520 2526 q = q.filter(RepoGroup.user_id == user_id)
2521 2527
2522 2528 if not isinstance(group_id, Optional):
2523 2529 q = q.filter(RepoGroup.group_parent_id == group_id)
2524 2530
2525 2531 if case_insensitive:
2526 2532 q = q.order_by(func.lower(RepoGroup.group_name))
2527 2533 else:
2528 2534 q = q.order_by(RepoGroup.group_name)
2529 2535 return q.all()
2530 2536
2531 2537 @property
2532 2538 def parents(self):
2533 2539 parents_recursion_limit = 10
2534 2540 groups = []
2535 2541 if self.parent_group is None:
2536 2542 return groups
2537 2543 cur_gr = self.parent_group
2538 2544 groups.insert(0, cur_gr)
2539 2545 cnt = 0
2540 2546 while 1:
2541 2547 cnt += 1
2542 2548 gr = getattr(cur_gr, 'parent_group', None)
2543 2549 cur_gr = cur_gr.parent_group
2544 2550 if gr is None:
2545 2551 break
2546 2552 if cnt == parents_recursion_limit:
2547 2553 # this will prevent accidental infinit loops
2548 2554 log.error('more than %s parents found for group %s, stopping '
2549 2555 'recursive parent fetching', parents_recursion_limit, self)
2550 2556 break
2551 2557
2552 2558 groups.insert(0, gr)
2553 2559 return groups
2554 2560
2555 2561 @property
2556 2562 def last_db_change(self):
2557 2563 return self.updated_on
2558 2564
2559 2565 @property
2560 2566 def children(self):
2561 2567 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2562 2568
2563 2569 @property
2564 2570 def name(self):
2565 2571 return self.group_name.split(RepoGroup.url_sep())[-1]
2566 2572
2567 2573 @property
2568 2574 def full_path(self):
2569 2575 return self.group_name
2570 2576
2571 2577 @property
2572 2578 def full_path_splitted(self):
2573 2579 return self.group_name.split(RepoGroup.url_sep())
2574 2580
2575 2581 @property
2576 2582 def repositories(self):
2577 2583 return Repository.query()\
2578 2584 .filter(Repository.group == self)\
2579 2585 .order_by(Repository.repo_name)
2580 2586
2581 2587 @property
2582 2588 def repositories_recursive_count(self):
2583 2589 cnt = self.repositories.count()
2584 2590
2585 2591 def children_count(group):
2586 2592 cnt = 0
2587 2593 for child in group.children:
2588 2594 cnt += child.repositories.count()
2589 2595 cnt += children_count(child)
2590 2596 return cnt
2591 2597
2592 2598 return cnt + children_count(self)
2593 2599
2594 2600 def _recursive_objects(self, include_repos=True):
2595 2601 all_ = []
2596 2602
2597 2603 def _get_members(root_gr):
2598 2604 if include_repos:
2599 2605 for r in root_gr.repositories:
2600 2606 all_.append(r)
2601 2607 childs = root_gr.children.all()
2602 2608 if childs:
2603 2609 for gr in childs:
2604 2610 all_.append(gr)
2605 2611 _get_members(gr)
2606 2612
2607 2613 _get_members(self)
2608 2614 return [self] + all_
2609 2615
2610 2616 def recursive_groups_and_repos(self):
2611 2617 """
2612 2618 Recursive return all groups, with repositories in those groups
2613 2619 """
2614 2620 return self._recursive_objects()
2615 2621
2616 2622 def recursive_groups(self):
2617 2623 """
2618 2624 Returns all children groups for this group including children of children
2619 2625 """
2620 2626 return self._recursive_objects(include_repos=False)
2621 2627
2622 2628 def get_new_name(self, group_name):
2623 2629 """
2624 2630 returns new full group name based on parent and new name
2625 2631
2626 2632 :param group_name:
2627 2633 """
2628 2634 path_prefix = (self.parent_group.full_path_splitted if
2629 2635 self.parent_group else [])
2630 2636 return RepoGroup.url_sep().join(path_prefix + [group_name])
2631 2637
2632 2638 def permissions(self, with_admins=True, with_owner=True):
2633 2639 """
2634 2640 Permissions for repository groups
2635 2641 """
2636 2642 _admin_perm = 'group.admin'
2637 2643
2638 2644 owner_row = []
2639 2645 if with_owner:
2640 2646 usr = AttributeDict(self.user.get_dict())
2641 2647 usr.owner_row = True
2642 2648 usr.permission = _admin_perm
2643 2649 owner_row.append(usr)
2644 2650
2645 2651 super_admin_ids = []
2646 2652 super_admin_rows = []
2647 2653 if with_admins:
2648 2654 for usr in User.get_all_super_admins():
2649 2655 super_admin_ids.append(usr.user_id)
2650 2656 # if this admin is also owner, don't double the record
2651 2657 if usr.user_id == owner_row[0].user_id:
2652 2658 owner_row[0].admin_row = True
2653 2659 else:
2654 2660 usr = AttributeDict(usr.get_dict())
2655 2661 usr.admin_row = True
2656 2662 usr.permission = _admin_perm
2657 2663 super_admin_rows.append(usr)
2658 2664
2659 2665 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2660 2666 q = q.options(joinedload(UserRepoGroupToPerm.group),
2661 2667 joinedload(UserRepoGroupToPerm.user),
2662 2668 joinedload(UserRepoGroupToPerm.permission),)
2663 2669
2664 2670 # get owners and admins and permissions. We do a trick of re-writing
2665 2671 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2666 2672 # has a global reference and changing one object propagates to all
2667 2673 # others. This means if admin is also an owner admin_row that change
2668 2674 # would propagate to both objects
2669 2675 perm_rows = []
2670 2676 for _usr in q.all():
2671 2677 usr = AttributeDict(_usr.user.get_dict())
2672 2678 # if this user is also owner/admin, mark as duplicate record
2673 2679 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2674 2680 usr.duplicate_perm = True
2675 2681 usr.permission = _usr.permission.permission_name
2676 2682 perm_rows.append(usr)
2677 2683
2678 2684 # filter the perm rows by 'default' first and then sort them by
2679 2685 # admin,write,read,none permissions sorted again alphabetically in
2680 2686 # each group
2681 2687 perm_rows = sorted(perm_rows, key=display_user_sort)
2682 2688
2683 2689 return super_admin_rows + owner_row + perm_rows
2684 2690
2685 2691 def permission_user_groups(self):
2686 2692 q = UserGroupRepoGroupToPerm.query().filter(
2687 2693 UserGroupRepoGroupToPerm.group == self)
2688 2694 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2689 2695 joinedload(UserGroupRepoGroupToPerm.users_group),
2690 2696 joinedload(UserGroupRepoGroupToPerm.permission),)
2691 2697
2692 2698 perm_rows = []
2693 2699 for _user_group in q.all():
2694 2700 usr = AttributeDict(_user_group.users_group.get_dict())
2695 2701 usr.permission = _user_group.permission.permission_name
2696 2702 perm_rows.append(usr)
2697 2703
2698 2704 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2699 2705 return perm_rows
2700 2706
2701 2707 def get_api_data(self):
2702 2708 """
2703 2709 Common function for generating api data
2704 2710
2705 2711 """
2706 2712 group = self
2707 2713 data = {
2708 2714 'group_id': group.group_id,
2709 2715 'group_name': group.group_name,
2710 2716 'group_description': group.description_safe,
2711 2717 'parent_group': group.parent_group.group_name if group.parent_group else None,
2712 2718 'repositories': [x.repo_name for x in group.repositories],
2713 2719 'owner': group.user.username,
2714 2720 }
2715 2721 return data
2716 2722
2717 2723
2718 2724 class Permission(Base, BaseModel):
2719 2725 __tablename__ = 'permissions'
2720 2726 __table_args__ = (
2721 2727 Index('p_perm_name_idx', 'permission_name'),
2722 2728 base_table_args,
2723 2729 )
2724 2730
2725 2731 PERMS = [
2726 2732 ('hg.admin', _('RhodeCode Super Administrator')),
2727 2733
2728 2734 ('repository.none', _('Repository no access')),
2729 2735 ('repository.read', _('Repository read access')),
2730 2736 ('repository.write', _('Repository write access')),
2731 2737 ('repository.admin', _('Repository admin access')),
2732 2738
2733 2739 ('group.none', _('Repository group no access')),
2734 2740 ('group.read', _('Repository group read access')),
2735 2741 ('group.write', _('Repository group write access')),
2736 2742 ('group.admin', _('Repository group admin access')),
2737 2743
2738 2744 ('usergroup.none', _('User group no access')),
2739 2745 ('usergroup.read', _('User group read access')),
2740 2746 ('usergroup.write', _('User group write access')),
2741 2747 ('usergroup.admin', _('User group admin access')),
2742 2748
2743 2749 ('branch.none', _('Branch no permissions')),
2744 2750 ('branch.merge', _('Branch access by web merge')),
2745 2751 ('branch.push', _('Branch access by push')),
2746 2752 ('branch.push_force', _('Branch access by push with force')),
2747 2753
2748 2754 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2749 2755 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2750 2756
2751 2757 ('hg.usergroup.create.false', _('User Group creation disabled')),
2752 2758 ('hg.usergroup.create.true', _('User Group creation enabled')),
2753 2759
2754 2760 ('hg.create.none', _('Repository creation disabled')),
2755 2761 ('hg.create.repository', _('Repository creation enabled')),
2756 2762 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2757 2763 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2758 2764
2759 2765 ('hg.fork.none', _('Repository forking disabled')),
2760 2766 ('hg.fork.repository', _('Repository forking enabled')),
2761 2767
2762 2768 ('hg.register.none', _('Registration disabled')),
2763 2769 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2764 2770 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2765 2771
2766 2772 ('hg.password_reset.enabled', _('Password reset enabled')),
2767 2773 ('hg.password_reset.hidden', _('Password reset hidden')),
2768 2774 ('hg.password_reset.disabled', _('Password reset disabled')),
2769 2775
2770 2776 ('hg.extern_activate.manual', _('Manual activation of external account')),
2771 2777 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2772 2778
2773 2779 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2774 2780 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2775 2781 ]
2776 2782
2777 2783 # definition of system default permissions for DEFAULT user, created on
2778 2784 # system setup
2779 2785 DEFAULT_USER_PERMISSIONS = [
2780 2786 # object perms
2781 2787 'repository.read',
2782 2788 'group.read',
2783 2789 'usergroup.read',
2784 2790 # branch, for backward compat we need same value as before so forced pushed
2785 2791 'branch.push_force',
2786 2792 # global
2787 2793 'hg.create.repository',
2788 2794 'hg.repogroup.create.false',
2789 2795 'hg.usergroup.create.false',
2790 2796 'hg.create.write_on_repogroup.true',
2791 2797 'hg.fork.repository',
2792 2798 'hg.register.manual_activate',
2793 2799 'hg.password_reset.enabled',
2794 2800 'hg.extern_activate.auto',
2795 2801 'hg.inherit_default_perms.true',
2796 2802 ]
2797 2803
2798 2804 # defines which permissions are more important higher the more important
2799 2805 # Weight defines which permissions are more important.
2800 2806 # The higher number the more important.
2801 2807 PERM_WEIGHTS = {
2802 2808 'repository.none': 0,
2803 2809 'repository.read': 1,
2804 2810 'repository.write': 3,
2805 2811 'repository.admin': 4,
2806 2812
2807 2813 'group.none': 0,
2808 2814 'group.read': 1,
2809 2815 'group.write': 3,
2810 2816 'group.admin': 4,
2811 2817
2812 2818 'usergroup.none': 0,
2813 2819 'usergroup.read': 1,
2814 2820 'usergroup.write': 3,
2815 2821 'usergroup.admin': 4,
2816 2822
2817 2823 'branch.none': 0,
2818 2824 'branch.merge': 1,
2819 2825 'branch.push': 3,
2820 2826 'branch.push_force': 4,
2821 2827
2822 2828 'hg.repogroup.create.false': 0,
2823 2829 'hg.repogroup.create.true': 1,
2824 2830
2825 2831 'hg.usergroup.create.false': 0,
2826 2832 'hg.usergroup.create.true': 1,
2827 2833
2828 2834 'hg.fork.none': 0,
2829 2835 'hg.fork.repository': 1,
2830 2836 'hg.create.none': 0,
2831 2837 'hg.create.repository': 1
2832 2838 }
2833 2839
2834 2840 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2835 2841 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2836 2842 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2837 2843
2838 2844 def __unicode__(self):
2839 2845 return u"<%s('%s:%s')>" % (
2840 2846 self.__class__.__name__, self.permission_id, self.permission_name
2841 2847 )
2842 2848
2843 2849 @classmethod
2844 2850 def get_by_key(cls, key):
2845 2851 return cls.query().filter(cls.permission_name == key).scalar()
2846 2852
2847 2853 @classmethod
2848 2854 def get_default_repo_perms(cls, user_id, repo_id=None):
2849 2855 q = Session().query(UserRepoToPerm, Repository, Permission)\
2850 2856 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2851 2857 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2852 2858 .filter(UserRepoToPerm.user_id == user_id)
2853 2859 if repo_id:
2854 2860 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2855 2861 return q.all()
2856 2862
2857 2863 @classmethod
2858 2864 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
2859 2865 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
2860 2866 .join(
2861 2867 Permission,
2862 2868 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
2863 2869 .join(
2864 2870 UserRepoToPerm,
2865 2871 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
2866 2872 .filter(UserRepoToPerm.user_id == user_id)
2867 2873
2868 2874 if repo_id:
2869 2875 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
2870 2876 return q.order_by(UserToRepoBranchPermission.rule_order).all()
2871 2877
2872 2878 @classmethod
2873 2879 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2874 2880 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2875 2881 .join(
2876 2882 Permission,
2877 2883 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2878 2884 .join(
2879 2885 Repository,
2880 2886 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2881 2887 .join(
2882 2888 UserGroup,
2883 2889 UserGroupRepoToPerm.users_group_id ==
2884 2890 UserGroup.users_group_id)\
2885 2891 .join(
2886 2892 UserGroupMember,
2887 2893 UserGroupRepoToPerm.users_group_id ==
2888 2894 UserGroupMember.users_group_id)\
2889 2895 .filter(
2890 2896 UserGroupMember.user_id == user_id,
2891 2897 UserGroup.users_group_active == true())
2892 2898 if repo_id:
2893 2899 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2894 2900 return q.all()
2895 2901
2896 2902 @classmethod
2897 2903 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
2898 2904 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
2899 2905 .join(
2900 2906 Permission,
2901 2907 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
2902 2908 .join(
2903 2909 UserGroupRepoToPerm,
2904 2910 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
2905 2911 .join(
2906 2912 UserGroup,
2907 2913 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
2908 2914 .join(
2909 2915 UserGroupMember,
2910 2916 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
2911 2917 .filter(
2912 2918 UserGroupMember.user_id == user_id,
2913 2919 UserGroup.users_group_active == true())
2914 2920
2915 2921 if repo_id:
2916 2922 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
2917 2923 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
2918 2924
2919 2925 @classmethod
2920 2926 def get_default_group_perms(cls, user_id, repo_group_id=None):
2921 2927 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2922 2928 .join(
2923 2929 Permission,
2924 2930 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
2925 2931 .join(
2926 2932 RepoGroup,
2927 2933 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
2928 2934 .filter(UserRepoGroupToPerm.user_id == user_id)
2929 2935 if repo_group_id:
2930 2936 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2931 2937 return q.all()
2932 2938
2933 2939 @classmethod
2934 2940 def get_default_group_perms_from_user_group(
2935 2941 cls, user_id, repo_group_id=None):
2936 2942 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2937 2943 .join(
2938 2944 Permission,
2939 2945 UserGroupRepoGroupToPerm.permission_id ==
2940 2946 Permission.permission_id)\
2941 2947 .join(
2942 2948 RepoGroup,
2943 2949 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2944 2950 .join(
2945 2951 UserGroup,
2946 2952 UserGroupRepoGroupToPerm.users_group_id ==
2947 2953 UserGroup.users_group_id)\
2948 2954 .join(
2949 2955 UserGroupMember,
2950 2956 UserGroupRepoGroupToPerm.users_group_id ==
2951 2957 UserGroupMember.users_group_id)\
2952 2958 .filter(
2953 2959 UserGroupMember.user_id == user_id,
2954 2960 UserGroup.users_group_active == true())
2955 2961 if repo_group_id:
2956 2962 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2957 2963 return q.all()
2958 2964
2959 2965 @classmethod
2960 2966 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2961 2967 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2962 2968 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2963 2969 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2964 2970 .filter(UserUserGroupToPerm.user_id == user_id)
2965 2971 if user_group_id:
2966 2972 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2967 2973 return q.all()
2968 2974
2969 2975 @classmethod
2970 2976 def get_default_user_group_perms_from_user_group(
2971 2977 cls, user_id, user_group_id=None):
2972 2978 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2973 2979 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2974 2980 .join(
2975 2981 Permission,
2976 2982 UserGroupUserGroupToPerm.permission_id ==
2977 2983 Permission.permission_id)\
2978 2984 .join(
2979 2985 TargetUserGroup,
2980 2986 UserGroupUserGroupToPerm.target_user_group_id ==
2981 2987 TargetUserGroup.users_group_id)\
2982 2988 .join(
2983 2989 UserGroup,
2984 2990 UserGroupUserGroupToPerm.user_group_id ==
2985 2991 UserGroup.users_group_id)\
2986 2992 .join(
2987 2993 UserGroupMember,
2988 2994 UserGroupUserGroupToPerm.user_group_id ==
2989 2995 UserGroupMember.users_group_id)\
2990 2996 .filter(
2991 2997 UserGroupMember.user_id == user_id,
2992 2998 UserGroup.users_group_active == true())
2993 2999 if user_group_id:
2994 3000 q = q.filter(
2995 3001 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2996 3002
2997 3003 return q.all()
2998 3004
2999 3005
3000 3006 class UserRepoToPerm(Base, BaseModel):
3001 3007 __tablename__ = 'repo_to_perm'
3002 3008 __table_args__ = (
3003 3009 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3004 3010 base_table_args
3005 3011 )
3006 3012
3007 3013 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3008 3014 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3009 3015 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3010 3016 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3011 3017
3012 3018 user = relationship('User')
3013 3019 repository = relationship('Repository')
3014 3020 permission = relationship('Permission')
3015 3021
3016 3022 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete, delete-orphan", lazy='joined')
3017 3023
3018 3024 @classmethod
3019 3025 def create(cls, user, repository, permission):
3020 3026 n = cls()
3021 3027 n.user = user
3022 3028 n.repository = repository
3023 3029 n.permission = permission
3024 3030 Session().add(n)
3025 3031 return n
3026 3032
3027 3033 def __unicode__(self):
3028 3034 return u'<%s => %s >' % (self.user, self.repository)
3029 3035
3030 3036
3031 3037 class UserUserGroupToPerm(Base, BaseModel):
3032 3038 __tablename__ = 'user_user_group_to_perm'
3033 3039 __table_args__ = (
3034 3040 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3035 3041 base_table_args
3036 3042 )
3037 3043
3038 3044 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3039 3045 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3040 3046 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3041 3047 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3042 3048
3043 3049 user = relationship('User')
3044 3050 user_group = relationship('UserGroup')
3045 3051 permission = relationship('Permission')
3046 3052
3047 3053 @classmethod
3048 3054 def create(cls, user, user_group, permission):
3049 3055 n = cls()
3050 3056 n.user = user
3051 3057 n.user_group = user_group
3052 3058 n.permission = permission
3053 3059 Session().add(n)
3054 3060 return n
3055 3061
3056 3062 def __unicode__(self):
3057 3063 return u'<%s => %s >' % (self.user, self.user_group)
3058 3064
3059 3065
3060 3066 class UserToPerm(Base, BaseModel):
3061 3067 __tablename__ = 'user_to_perm'
3062 3068 __table_args__ = (
3063 3069 UniqueConstraint('user_id', 'permission_id'),
3064 3070 base_table_args
3065 3071 )
3066 3072
3067 3073 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3068 3074 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3069 3075 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3070 3076
3071 3077 user = relationship('User')
3072 3078 permission = relationship('Permission', lazy='joined')
3073 3079
3074 3080 def __unicode__(self):
3075 3081 return u'<%s => %s >' % (self.user, self.permission)
3076 3082
3077 3083
3078 3084 class UserGroupRepoToPerm(Base, BaseModel):
3079 3085 __tablename__ = 'users_group_repo_to_perm'
3080 3086 __table_args__ = (
3081 3087 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3082 3088 base_table_args
3083 3089 )
3084 3090
3085 3091 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3086 3092 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3087 3093 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3088 3094 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3089 3095
3090 3096 users_group = relationship('UserGroup')
3091 3097 permission = relationship('Permission')
3092 3098 repository = relationship('Repository')
3093 3099 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3094 3100
3095 3101 @classmethod
3096 3102 def create(cls, users_group, repository, permission):
3097 3103 n = cls()
3098 3104 n.users_group = users_group
3099 3105 n.repository = repository
3100 3106 n.permission = permission
3101 3107 Session().add(n)
3102 3108 return n
3103 3109
3104 3110 def __unicode__(self):
3105 3111 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3106 3112
3107 3113
3108 3114 class UserGroupUserGroupToPerm(Base, BaseModel):
3109 3115 __tablename__ = 'user_group_user_group_to_perm'
3110 3116 __table_args__ = (
3111 3117 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3112 3118 CheckConstraint('target_user_group_id != user_group_id'),
3113 3119 base_table_args
3114 3120 )
3115 3121
3116 3122 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3117 3123 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3118 3124 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3119 3125 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3120 3126
3121 3127 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3122 3128 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3123 3129 permission = relationship('Permission')
3124 3130
3125 3131 @classmethod
3126 3132 def create(cls, target_user_group, user_group, permission):
3127 3133 n = cls()
3128 3134 n.target_user_group = target_user_group
3129 3135 n.user_group = user_group
3130 3136 n.permission = permission
3131 3137 Session().add(n)
3132 3138 return n
3133 3139
3134 3140 def __unicode__(self):
3135 3141 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3136 3142
3137 3143
3138 3144 class UserGroupToPerm(Base, BaseModel):
3139 3145 __tablename__ = 'users_group_to_perm'
3140 3146 __table_args__ = (
3141 3147 UniqueConstraint('users_group_id', 'permission_id',),
3142 3148 base_table_args
3143 3149 )
3144 3150
3145 3151 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3146 3152 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3147 3153 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3148 3154
3149 3155 users_group = relationship('UserGroup')
3150 3156 permission = relationship('Permission')
3151 3157
3152 3158
3153 3159 class UserRepoGroupToPerm(Base, BaseModel):
3154 3160 __tablename__ = 'user_repo_group_to_perm'
3155 3161 __table_args__ = (
3156 3162 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3157 3163 base_table_args
3158 3164 )
3159 3165
3160 3166 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3161 3167 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3162 3168 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3163 3169 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3164 3170
3165 3171 user = relationship('User')
3166 3172 group = relationship('RepoGroup')
3167 3173 permission = relationship('Permission')
3168 3174
3169 3175 @classmethod
3170 3176 def create(cls, user, repository_group, permission):
3171 3177 n = cls()
3172 3178 n.user = user
3173 3179 n.group = repository_group
3174 3180 n.permission = permission
3175 3181 Session().add(n)
3176 3182 return n
3177 3183
3178 3184
3179 3185 class UserGroupRepoGroupToPerm(Base, BaseModel):
3180 3186 __tablename__ = 'users_group_repo_group_to_perm'
3181 3187 __table_args__ = (
3182 3188 UniqueConstraint('users_group_id', 'group_id'),
3183 3189 base_table_args
3184 3190 )
3185 3191
3186 3192 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3187 3193 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3188 3194 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3189 3195 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3190 3196
3191 3197 users_group = relationship('UserGroup')
3192 3198 permission = relationship('Permission')
3193 3199 group = relationship('RepoGroup')
3194 3200
3195 3201 @classmethod
3196 3202 def create(cls, user_group, repository_group, permission):
3197 3203 n = cls()
3198 3204 n.users_group = user_group
3199 3205 n.group = repository_group
3200 3206 n.permission = permission
3201 3207 Session().add(n)
3202 3208 return n
3203 3209
3204 3210 def __unicode__(self):
3205 3211 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3206 3212
3207 3213
3208 3214 class Statistics(Base, BaseModel):
3209 3215 __tablename__ = 'statistics'
3210 3216 __table_args__ = (
3211 3217 base_table_args
3212 3218 )
3213 3219
3214 3220 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3215 3221 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3216 3222 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3217 3223 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3218 3224 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3219 3225 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3220 3226
3221 3227 repository = relationship('Repository', single_parent=True)
3222 3228
3223 3229
3224 3230 class UserFollowing(Base, BaseModel):
3225 3231 __tablename__ = 'user_followings'
3226 3232 __table_args__ = (
3227 3233 UniqueConstraint('user_id', 'follows_repository_id'),
3228 3234 UniqueConstraint('user_id', 'follows_user_id'),
3229 3235 base_table_args
3230 3236 )
3231 3237
3232 3238 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3233 3239 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3234 3240 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3235 3241 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3236 3242 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3237 3243
3238 3244 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3239 3245
3240 3246 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3241 3247 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3242 3248
3243 3249 @classmethod
3244 3250 def get_repo_followers(cls, repo_id):
3245 3251 return cls.query().filter(cls.follows_repo_id == repo_id)
3246 3252
3247 3253
3248 3254 class CacheKey(Base, BaseModel):
3249 3255 __tablename__ = 'cache_invalidation'
3250 3256 __table_args__ = (
3251 3257 UniqueConstraint('cache_key'),
3252 3258 Index('key_idx', 'cache_key'),
3253 3259 base_table_args,
3254 3260 )
3255 3261
3256 3262 CACHE_TYPE_FEED = 'FEED'
3257 3263 CACHE_TYPE_README = 'README'
3258 3264 # namespaces used to register process/thread aware caches
3259 3265 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3260 3266 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3261 3267
3262 3268 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3263 3269 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3264 3270 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3265 3271 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3266 3272
3267 3273 def __init__(self, cache_key, cache_args=''):
3268 3274 self.cache_key = cache_key
3269 3275 self.cache_args = cache_args
3270 3276 self.cache_active = False
3271 3277
3272 3278 def __unicode__(self):
3273 3279 return u"<%s('%s:%s[%s]')>" % (
3274 3280 self.__class__.__name__,
3275 3281 self.cache_id, self.cache_key, self.cache_active)
3276 3282
3277 3283 def _cache_key_partition(self):
3278 3284 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3279 3285 return prefix, repo_name, suffix
3280 3286
3281 3287 def get_prefix(self):
3282 3288 """
3283 3289 Try to extract prefix from existing cache key. The key could consist
3284 3290 of prefix, repo_name, suffix
3285 3291 """
3286 3292 # this returns prefix, repo_name, suffix
3287 3293 return self._cache_key_partition()[0]
3288 3294
3289 3295 def get_suffix(self):
3290 3296 """
3291 3297 get suffix that might have been used in _get_cache_key to
3292 3298 generate self.cache_key. Only used for informational purposes
3293 3299 in repo_edit.mako.
3294 3300 """
3295 3301 # prefix, repo_name, suffix
3296 3302 return self._cache_key_partition()[2]
3297 3303
3298 3304 @classmethod
3299 3305 def delete_all_cache(cls):
3300 3306 """
3301 3307 Delete all cache keys from database.
3302 3308 Should only be run when all instances are down and all entries
3303 3309 thus stale.
3304 3310 """
3305 3311 cls.query().delete()
3306 3312 Session().commit()
3307 3313
3308 3314 @classmethod
3309 3315 def set_invalidate(cls, cache_uid, delete=False):
3310 3316 """
3311 3317 Mark all caches of a repo as invalid in the database.
3312 3318 """
3313 3319
3314 3320 try:
3315 3321 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3316 3322 if delete:
3317 3323 qry.delete()
3318 3324 log.debug('cache objects deleted for cache args %s',
3319 3325 safe_str(cache_uid))
3320 3326 else:
3321 3327 qry.update({"cache_active": False})
3322 3328 log.debug('cache objects marked as invalid for cache args %s',
3323 3329 safe_str(cache_uid))
3324 3330
3325 3331 Session().commit()
3326 3332 except Exception:
3327 3333 log.exception(
3328 3334 'Cache key invalidation failed for cache args %s',
3329 3335 safe_str(cache_uid))
3330 3336 Session().rollback()
3331 3337
3332 3338 @classmethod
3333 3339 def get_active_cache(cls, cache_key):
3334 3340 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3335 3341 if inv_obj:
3336 3342 return inv_obj
3337 3343 return None
3338 3344
3339 3345
3340 3346 class ChangesetComment(Base, BaseModel):
3341 3347 __tablename__ = 'changeset_comments'
3342 3348 __table_args__ = (
3343 3349 Index('cc_revision_idx', 'revision'),
3344 3350 base_table_args,
3345 3351 )
3346 3352
3347 3353 COMMENT_OUTDATED = u'comment_outdated'
3348 3354 COMMENT_TYPE_NOTE = u'note'
3349 3355 COMMENT_TYPE_TODO = u'todo'
3350 3356 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3351 3357
3352 3358 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3353 3359 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3354 3360 revision = Column('revision', String(40), nullable=True)
3355 3361 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3356 3362 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3357 3363 line_no = Column('line_no', Unicode(10), nullable=True)
3358 3364 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3359 3365 f_path = Column('f_path', Unicode(1000), nullable=True)
3360 3366 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3361 3367 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3362 3368 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3363 3369 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3364 3370 renderer = Column('renderer', Unicode(64), nullable=True)
3365 3371 display_state = Column('display_state', Unicode(128), nullable=True)
3366 3372
3367 3373 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3368 3374 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3369 3375 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3370 3376 author = relationship('User', lazy='joined')
3371 3377 repo = relationship('Repository')
3372 3378 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3373 3379 pull_request = relationship('PullRequest', lazy='joined')
3374 3380 pull_request_version = relationship('PullRequestVersion')
3375 3381
3376 3382 @classmethod
3377 3383 def get_users(cls, revision=None, pull_request_id=None):
3378 3384 """
3379 3385 Returns user associated with this ChangesetComment. ie those
3380 3386 who actually commented
3381 3387
3382 3388 :param cls:
3383 3389 :param revision:
3384 3390 """
3385 3391 q = Session().query(User)\
3386 3392 .join(ChangesetComment.author)
3387 3393 if revision:
3388 3394 q = q.filter(cls.revision == revision)
3389 3395 elif pull_request_id:
3390 3396 q = q.filter(cls.pull_request_id == pull_request_id)
3391 3397 return q.all()
3392 3398
3393 3399 @classmethod
3394 3400 def get_index_from_version(cls, pr_version, versions):
3395 3401 num_versions = [x.pull_request_version_id for x in versions]
3396 3402 try:
3397 3403 return num_versions.index(pr_version) +1
3398 3404 except (IndexError, ValueError):
3399 3405 return
3400 3406
3401 3407 @property
3402 3408 def outdated(self):
3403 3409 return self.display_state == self.COMMENT_OUTDATED
3404 3410
3405 3411 def outdated_at_version(self, version):
3406 3412 """
3407 3413 Checks if comment is outdated for given pull request version
3408 3414 """
3409 3415 return self.outdated and self.pull_request_version_id != version
3410 3416
3411 3417 def older_than_version(self, version):
3412 3418 """
3413 3419 Checks if comment is made from previous version than given
3414 3420 """
3415 3421 if version is None:
3416 3422 return self.pull_request_version_id is not None
3417 3423
3418 3424 return self.pull_request_version_id < version
3419 3425
3420 3426 @property
3421 3427 def resolved(self):
3422 3428 return self.resolved_by[0] if self.resolved_by else None
3423 3429
3424 3430 @property
3425 3431 def is_todo(self):
3426 3432 return self.comment_type == self.COMMENT_TYPE_TODO
3427 3433
3428 3434 @property
3429 3435 def is_inline(self):
3430 3436 return self.line_no and self.f_path
3431 3437
3432 3438 def get_index_version(self, versions):
3433 3439 return self.get_index_from_version(
3434 3440 self.pull_request_version_id, versions)
3435 3441
3436 3442 def __repr__(self):
3437 3443 if self.comment_id:
3438 3444 return '<DB:Comment #%s>' % self.comment_id
3439 3445 else:
3440 3446 return '<DB:Comment at %#x>' % id(self)
3441 3447
3442 3448 def get_api_data(self):
3443 3449 comment = self
3444 3450 data = {
3445 3451 'comment_id': comment.comment_id,
3446 3452 'comment_type': comment.comment_type,
3447 3453 'comment_text': comment.text,
3448 3454 'comment_status': comment.status_change,
3449 3455 'comment_f_path': comment.f_path,
3450 3456 'comment_lineno': comment.line_no,
3451 3457 'comment_author': comment.author,
3452 3458 'comment_created_on': comment.created_on
3453 3459 }
3454 3460 return data
3455 3461
3456 3462 def __json__(self):
3457 3463 data = dict()
3458 3464 data.update(self.get_api_data())
3459 3465 return data
3460 3466
3461 3467
3462 3468 class ChangesetStatus(Base, BaseModel):
3463 3469 __tablename__ = 'changeset_statuses'
3464 3470 __table_args__ = (
3465 3471 Index('cs_revision_idx', 'revision'),
3466 3472 Index('cs_version_idx', 'version'),
3467 3473 UniqueConstraint('repo_id', 'revision', 'version'),
3468 3474 base_table_args
3469 3475 )
3470 3476
3471 3477 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3472 3478 STATUS_APPROVED = 'approved'
3473 3479 STATUS_REJECTED = 'rejected'
3474 3480 STATUS_UNDER_REVIEW = 'under_review'
3475 3481
3476 3482 STATUSES = [
3477 3483 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3478 3484 (STATUS_APPROVED, _("Approved")),
3479 3485 (STATUS_REJECTED, _("Rejected")),
3480 3486 (STATUS_UNDER_REVIEW, _("Under Review")),
3481 3487 ]
3482 3488
3483 3489 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3484 3490 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3485 3491 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3486 3492 revision = Column('revision', String(40), nullable=False)
3487 3493 status = Column('status', String(128), nullable=False, default=DEFAULT)
3488 3494 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3489 3495 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3490 3496 version = Column('version', Integer(), nullable=False, default=0)
3491 3497 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3492 3498
3493 3499 author = relationship('User', lazy='joined')
3494 3500 repo = relationship('Repository')
3495 3501 comment = relationship('ChangesetComment', lazy='joined')
3496 3502 pull_request = relationship('PullRequest', lazy='joined')
3497 3503
3498 3504 def __unicode__(self):
3499 3505 return u"<%s('%s[v%s]:%s')>" % (
3500 3506 self.__class__.__name__,
3501 3507 self.status, self.version, self.author
3502 3508 )
3503 3509
3504 3510 @classmethod
3505 3511 def get_status_lbl(cls, value):
3506 3512 return dict(cls.STATUSES).get(value)
3507 3513
3508 3514 @property
3509 3515 def status_lbl(self):
3510 3516 return ChangesetStatus.get_status_lbl(self.status)
3511 3517
3512 3518 def get_api_data(self):
3513 3519 status = self
3514 3520 data = {
3515 3521 'status_id': status.changeset_status_id,
3516 3522 'status': status.status,
3517 3523 }
3518 3524 return data
3519 3525
3520 3526 def __json__(self):
3521 3527 data = dict()
3522 3528 data.update(self.get_api_data())
3523 3529 return data
3524 3530
3525 3531
3526 3532 class _PullRequestBase(BaseModel):
3527 3533 """
3528 3534 Common attributes of pull request and version entries.
3529 3535 """
3530 3536
3531 3537 # .status values
3532 3538 STATUS_NEW = u'new'
3533 3539 STATUS_OPEN = u'open'
3534 3540 STATUS_CLOSED = u'closed'
3535 3541
3536 3542 title = Column('title', Unicode(255), nullable=True)
3537 3543 description = Column(
3538 3544 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3539 3545 nullable=True)
3540 3546 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3541 3547
3542 3548 # new/open/closed status of pull request (not approve/reject/etc)
3543 3549 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3544 3550 created_on = Column(
3545 3551 'created_on', DateTime(timezone=False), nullable=False,
3546 3552 default=datetime.datetime.now)
3547 3553 updated_on = Column(
3548 3554 'updated_on', DateTime(timezone=False), nullable=False,
3549 3555 default=datetime.datetime.now)
3550 3556
3551 3557 @declared_attr
3552 3558 def user_id(cls):
3553 3559 return Column(
3554 3560 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3555 3561 unique=None)
3556 3562
3557 3563 # 500 revisions max
3558 3564 _revisions = Column(
3559 3565 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3560 3566
3561 3567 @declared_attr
3562 3568 def source_repo_id(cls):
3563 3569 # TODO: dan: rename column to source_repo_id
3564 3570 return Column(
3565 3571 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3566 3572 nullable=False)
3567 3573
3568 3574 source_ref = Column('org_ref', Unicode(255), nullable=False)
3569 3575
3570 3576 @declared_attr
3571 3577 def target_repo_id(cls):
3572 3578 # TODO: dan: rename column to target_repo_id
3573 3579 return Column(
3574 3580 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3575 3581 nullable=False)
3576 3582
3577 3583 target_ref = Column('other_ref', Unicode(255), nullable=False)
3578 3584 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3579 3585
3580 3586 # TODO: dan: rename column to last_merge_source_rev
3581 3587 _last_merge_source_rev = Column(
3582 3588 'last_merge_org_rev', String(40), nullable=True)
3583 3589 # TODO: dan: rename column to last_merge_target_rev
3584 3590 _last_merge_target_rev = Column(
3585 3591 'last_merge_other_rev', String(40), nullable=True)
3586 3592 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3587 3593 merge_rev = Column('merge_rev', String(40), nullable=True)
3588 3594
3589 3595 reviewer_data = Column(
3590 3596 'reviewer_data_json', MutationObj.as_mutable(
3591 3597 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3592 3598
3593 3599 @property
3594 3600 def reviewer_data_json(self):
3595 3601 return json.dumps(self.reviewer_data)
3596 3602
3597 3603 @hybrid_property
3598 3604 def description_safe(self):
3599 3605 from rhodecode.lib import helpers as h
3600 3606 return h.escape(self.description)
3601 3607
3602 3608 @hybrid_property
3603 3609 def revisions(self):
3604 3610 return self._revisions.split(':') if self._revisions else []
3605 3611
3606 3612 @revisions.setter
3607 3613 def revisions(self, val):
3608 3614 self._revisions = ':'.join(val)
3609 3615
3610 3616 @hybrid_property
3611 3617 def last_merge_status(self):
3612 3618 return safe_int(self._last_merge_status)
3613 3619
3614 3620 @last_merge_status.setter
3615 3621 def last_merge_status(self, val):
3616 3622 self._last_merge_status = val
3617 3623
3618 3624 @declared_attr
3619 3625 def author(cls):
3620 3626 return relationship('User', lazy='joined')
3621 3627
3622 3628 @declared_attr
3623 3629 def source_repo(cls):
3624 3630 return relationship(
3625 3631 'Repository',
3626 3632 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3627 3633
3628 3634 @property
3629 3635 def source_ref_parts(self):
3630 3636 return self.unicode_to_reference(self.source_ref)
3631 3637
3632 3638 @declared_attr
3633 3639 def target_repo(cls):
3634 3640 return relationship(
3635 3641 'Repository',
3636 3642 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3637 3643
3638 3644 @property
3639 3645 def target_ref_parts(self):
3640 3646 return self.unicode_to_reference(self.target_ref)
3641 3647
3642 3648 @property
3643 3649 def shadow_merge_ref(self):
3644 3650 return self.unicode_to_reference(self._shadow_merge_ref)
3645 3651
3646 3652 @shadow_merge_ref.setter
3647 3653 def shadow_merge_ref(self, ref):
3648 3654 self._shadow_merge_ref = self.reference_to_unicode(ref)
3649 3655
3650 3656 def unicode_to_reference(self, raw):
3651 3657 """
3652 3658 Convert a unicode (or string) to a reference object.
3653 3659 If unicode evaluates to False it returns None.
3654 3660 """
3655 3661 if raw:
3656 3662 refs = raw.split(':')
3657 3663 return Reference(*refs)
3658 3664 else:
3659 3665 return None
3660 3666
3661 3667 def reference_to_unicode(self, ref):
3662 3668 """
3663 3669 Convert a reference object to unicode.
3664 3670 If reference is None it returns None.
3665 3671 """
3666 3672 if ref:
3667 3673 return u':'.join(ref)
3668 3674 else:
3669 3675 return None
3670 3676
3671 3677 def get_api_data(self, with_merge_state=True):
3672 3678 from rhodecode.model.pull_request import PullRequestModel
3673 3679
3674 3680 pull_request = self
3675 3681 if with_merge_state:
3676 3682 merge_status = PullRequestModel().merge_status(pull_request)
3677 3683 merge_state = {
3678 3684 'status': merge_status[0],
3679 3685 'message': safe_unicode(merge_status[1]),
3680 3686 }
3681 3687 else:
3682 3688 merge_state = {'status': 'not_available',
3683 3689 'message': 'not_available'}
3684 3690
3685 3691 merge_data = {
3686 3692 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3687 3693 'reference': (
3688 3694 pull_request.shadow_merge_ref._asdict()
3689 3695 if pull_request.shadow_merge_ref else None),
3690 3696 }
3691 3697
3692 3698 data = {
3693 3699 'pull_request_id': pull_request.pull_request_id,
3694 3700 'url': PullRequestModel().get_url(pull_request),
3695 3701 'title': pull_request.title,
3696 3702 'description': pull_request.description,
3697 3703 'status': pull_request.status,
3698 3704 'created_on': pull_request.created_on,
3699 3705 'updated_on': pull_request.updated_on,
3700 3706 'commit_ids': pull_request.revisions,
3701 3707 'review_status': pull_request.calculated_review_status(),
3702 3708 'mergeable': merge_state,
3703 3709 'source': {
3704 3710 'clone_url': pull_request.source_repo.clone_url(),
3705 3711 'repository': pull_request.source_repo.repo_name,
3706 3712 'reference': {
3707 3713 'name': pull_request.source_ref_parts.name,
3708 3714 'type': pull_request.source_ref_parts.type,
3709 3715 'commit_id': pull_request.source_ref_parts.commit_id,
3710 3716 },
3711 3717 },
3712 3718 'target': {
3713 3719 'clone_url': pull_request.target_repo.clone_url(),
3714 3720 'repository': pull_request.target_repo.repo_name,
3715 3721 'reference': {
3716 3722 'name': pull_request.target_ref_parts.name,
3717 3723 'type': pull_request.target_ref_parts.type,
3718 3724 'commit_id': pull_request.target_ref_parts.commit_id,
3719 3725 },
3720 3726 },
3721 3727 'merge': merge_data,
3722 3728 'author': pull_request.author.get_api_data(include_secrets=False,
3723 3729 details='basic'),
3724 3730 'reviewers': [
3725 3731 {
3726 3732 'user': reviewer.get_api_data(include_secrets=False,
3727 3733 details='basic'),
3728 3734 'reasons': reasons,
3729 3735 'review_status': st[0][1].status if st else 'not_reviewed',
3730 3736 }
3731 3737 for obj, reviewer, reasons, mandatory, st in
3732 3738 pull_request.reviewers_statuses()
3733 3739 ]
3734 3740 }
3735 3741
3736 3742 return data
3737 3743
3738 3744
3739 3745 class PullRequest(Base, _PullRequestBase):
3740 3746 __tablename__ = 'pull_requests'
3741 3747 __table_args__ = (
3742 3748 base_table_args,
3743 3749 )
3744 3750
3745 3751 pull_request_id = Column(
3746 3752 'pull_request_id', Integer(), nullable=False, primary_key=True)
3747 3753
3748 3754 def __repr__(self):
3749 3755 if self.pull_request_id:
3750 3756 return '<DB:PullRequest #%s>' % self.pull_request_id
3751 3757 else:
3752 3758 return '<DB:PullRequest at %#x>' % id(self)
3753 3759
3754 3760 reviewers = relationship('PullRequestReviewers',
3755 3761 cascade="all, delete, delete-orphan")
3756 3762 statuses = relationship('ChangesetStatus',
3757 3763 cascade="all, delete, delete-orphan")
3758 3764 comments = relationship('ChangesetComment',
3759 3765 cascade="all, delete, delete-orphan")
3760 3766 versions = relationship('PullRequestVersion',
3761 3767 cascade="all, delete, delete-orphan",
3762 3768 lazy='dynamic')
3763 3769
3764 3770 @classmethod
3765 3771 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3766 3772 internal_methods=None):
3767 3773
3768 3774 class PullRequestDisplay(object):
3769 3775 """
3770 3776 Special object wrapper for showing PullRequest data via Versions
3771 3777 It mimics PR object as close as possible. This is read only object
3772 3778 just for display
3773 3779 """
3774 3780
3775 3781 def __init__(self, attrs, internal=None):
3776 3782 self.attrs = attrs
3777 3783 # internal have priority over the given ones via attrs
3778 3784 self.internal = internal or ['versions']
3779 3785
3780 3786 def __getattr__(self, item):
3781 3787 if item in self.internal:
3782 3788 return getattr(self, item)
3783 3789 try:
3784 3790 return self.attrs[item]
3785 3791 except KeyError:
3786 3792 raise AttributeError(
3787 3793 '%s object has no attribute %s' % (self, item))
3788 3794
3789 3795 def __repr__(self):
3790 3796 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3791 3797
3792 3798 def versions(self):
3793 3799 return pull_request_obj.versions.order_by(
3794 3800 PullRequestVersion.pull_request_version_id).all()
3795 3801
3796 3802 def is_closed(self):
3797 3803 return pull_request_obj.is_closed()
3798 3804
3799 3805 @property
3800 3806 def pull_request_version_id(self):
3801 3807 return getattr(pull_request_obj, 'pull_request_version_id', None)
3802 3808
3803 3809 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3804 3810
3805 3811 attrs.author = StrictAttributeDict(
3806 3812 pull_request_obj.author.get_api_data())
3807 3813 if pull_request_obj.target_repo:
3808 3814 attrs.target_repo = StrictAttributeDict(
3809 3815 pull_request_obj.target_repo.get_api_data())
3810 3816 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3811 3817
3812 3818 if pull_request_obj.source_repo:
3813 3819 attrs.source_repo = StrictAttributeDict(
3814 3820 pull_request_obj.source_repo.get_api_data())
3815 3821 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3816 3822
3817 3823 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3818 3824 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3819 3825 attrs.revisions = pull_request_obj.revisions
3820 3826
3821 3827 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3822 3828 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3823 3829 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3824 3830
3825 3831 return PullRequestDisplay(attrs, internal=internal_methods)
3826 3832
3827 3833 def is_closed(self):
3828 3834 return self.status == self.STATUS_CLOSED
3829 3835
3830 3836 def __json__(self):
3831 3837 return {
3832 3838 'revisions': self.revisions,
3833 3839 }
3834 3840
3835 3841 def calculated_review_status(self):
3836 3842 from rhodecode.model.changeset_status import ChangesetStatusModel
3837 3843 return ChangesetStatusModel().calculated_review_status(self)
3838 3844
3839 3845 def reviewers_statuses(self):
3840 3846 from rhodecode.model.changeset_status import ChangesetStatusModel
3841 3847 return ChangesetStatusModel().reviewers_statuses(self)
3842 3848
3843 3849 @property
3844 3850 def workspace_id(self):
3845 3851 from rhodecode.model.pull_request import PullRequestModel
3846 3852 return PullRequestModel()._workspace_id(self)
3847 3853
3848 3854 def get_shadow_repo(self):
3849 3855 workspace_id = self.workspace_id
3850 3856 vcs_obj = self.target_repo.scm_instance()
3851 3857 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3852 3858 self.target_repo.repo_id, workspace_id)
3853 3859 if os.path.isdir(shadow_repository_path):
3854 3860 return vcs_obj._get_shadow_instance(shadow_repository_path)
3855 3861
3856 3862
3857 3863 class PullRequestVersion(Base, _PullRequestBase):
3858 3864 __tablename__ = 'pull_request_versions'
3859 3865 __table_args__ = (
3860 3866 base_table_args,
3861 3867 )
3862 3868
3863 3869 pull_request_version_id = Column(
3864 3870 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3865 3871 pull_request_id = Column(
3866 3872 'pull_request_id', Integer(),
3867 3873 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3868 3874 pull_request = relationship('PullRequest')
3869 3875
3870 3876 def __repr__(self):
3871 3877 if self.pull_request_version_id:
3872 3878 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3873 3879 else:
3874 3880 return '<DB:PullRequestVersion at %#x>' % id(self)
3875 3881
3876 3882 @property
3877 3883 def reviewers(self):
3878 3884 return self.pull_request.reviewers
3879 3885
3880 3886 @property
3881 3887 def versions(self):
3882 3888 return self.pull_request.versions
3883 3889
3884 3890 def is_closed(self):
3885 3891 # calculate from original
3886 3892 return self.pull_request.status == self.STATUS_CLOSED
3887 3893
3888 3894 def calculated_review_status(self):
3889 3895 return self.pull_request.calculated_review_status()
3890 3896
3891 3897 def reviewers_statuses(self):
3892 3898 return self.pull_request.reviewers_statuses()
3893 3899
3894 3900
3895 3901 class PullRequestReviewers(Base, BaseModel):
3896 3902 __tablename__ = 'pull_request_reviewers'
3897 3903 __table_args__ = (
3898 3904 base_table_args,
3899 3905 )
3900 3906
3901 3907 @hybrid_property
3902 3908 def reasons(self):
3903 3909 if not self._reasons:
3904 3910 return []
3905 3911 return self._reasons
3906 3912
3907 3913 @reasons.setter
3908 3914 def reasons(self, val):
3909 3915 val = val or []
3910 3916 if any(not isinstance(x, basestring) for x in val):
3911 3917 raise Exception('invalid reasons type, must be list of strings')
3912 3918 self._reasons = val
3913 3919
3914 3920 pull_requests_reviewers_id = Column(
3915 3921 'pull_requests_reviewers_id', Integer(), nullable=False,
3916 3922 primary_key=True)
3917 3923 pull_request_id = Column(
3918 3924 "pull_request_id", Integer(),
3919 3925 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3920 3926 user_id = Column(
3921 3927 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3922 3928 _reasons = Column(
3923 3929 'reason', MutationList.as_mutable(
3924 3930 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3925 3931
3926 3932 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3927 3933 user = relationship('User')
3928 3934 pull_request = relationship('PullRequest')
3929 3935
3930 3936 rule_data = Column(
3931 3937 'rule_data_json',
3932 3938 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3933 3939
3934 3940 def rule_user_group_data(self):
3935 3941 """
3936 3942 Returns the voting user group rule data for this reviewer
3937 3943 """
3938 3944
3939 3945 if self.rule_data and 'vote_rule' in self.rule_data:
3940 3946 user_group_data = {}
3941 3947 if 'rule_user_group_entry_id' in self.rule_data:
3942 3948 # means a group with voting rules !
3943 3949 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3944 3950 user_group_data['name'] = self.rule_data['rule_name']
3945 3951 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3946 3952
3947 3953 return user_group_data
3948 3954
3949 3955 def __unicode__(self):
3950 3956 return u"<%s('id:%s')>" % (self.__class__.__name__,
3951 3957 self.pull_requests_reviewers_id)
3952 3958
3953 3959
3954 3960 class Notification(Base, BaseModel):
3955 3961 __tablename__ = 'notifications'
3956 3962 __table_args__ = (
3957 3963 Index('notification_type_idx', 'type'),
3958 3964 base_table_args,
3959 3965 )
3960 3966
3961 3967 TYPE_CHANGESET_COMMENT = u'cs_comment'
3962 3968 TYPE_MESSAGE = u'message'
3963 3969 TYPE_MENTION = u'mention'
3964 3970 TYPE_REGISTRATION = u'registration'
3965 3971 TYPE_PULL_REQUEST = u'pull_request'
3966 3972 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3967 3973
3968 3974 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3969 3975 subject = Column('subject', Unicode(512), nullable=True)
3970 3976 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3971 3977 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3972 3978 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3973 3979 type_ = Column('type', Unicode(255))
3974 3980
3975 3981 created_by_user = relationship('User')
3976 3982 notifications_to_users = relationship('UserNotification', lazy='joined',
3977 3983 cascade="all, delete, delete-orphan")
3978 3984
3979 3985 @property
3980 3986 def recipients(self):
3981 3987 return [x.user for x in UserNotification.query()\
3982 3988 .filter(UserNotification.notification == self)\
3983 3989 .order_by(UserNotification.user_id.asc()).all()]
3984 3990
3985 3991 @classmethod
3986 3992 def create(cls, created_by, subject, body, recipients, type_=None):
3987 3993 if type_ is None:
3988 3994 type_ = Notification.TYPE_MESSAGE
3989 3995
3990 3996 notification = cls()
3991 3997 notification.created_by_user = created_by
3992 3998 notification.subject = subject
3993 3999 notification.body = body
3994 4000 notification.type_ = type_
3995 4001 notification.created_on = datetime.datetime.now()
3996 4002
3997 4003 # For each recipient link the created notification to his account
3998 4004 for u in recipients:
3999 4005 assoc = UserNotification()
4000 4006 assoc.user_id = u.user_id
4001 4007 assoc.notification = notification
4002 4008
4003 4009 # if created_by is inside recipients mark his notification
4004 4010 # as read
4005 4011 if u.user_id == created_by.user_id:
4006 4012 assoc.read = True
4007 4013 Session().add(assoc)
4008 4014
4009 4015 Session().add(notification)
4010 4016
4011 4017 return notification
4012 4018
4013 4019
4014 4020 class UserNotification(Base, BaseModel):
4015 4021 __tablename__ = 'user_to_notification'
4016 4022 __table_args__ = (
4017 4023 UniqueConstraint('user_id', 'notification_id'),
4018 4024 base_table_args
4019 4025 )
4020 4026
4021 4027 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4022 4028 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4023 4029 read = Column('read', Boolean, default=False)
4024 4030 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4025 4031
4026 4032 user = relationship('User', lazy="joined")
4027 4033 notification = relationship('Notification', lazy="joined",
4028 4034 order_by=lambda: Notification.created_on.desc(),)
4029 4035
4030 4036 def mark_as_read(self):
4031 4037 self.read = True
4032 4038 Session().add(self)
4033 4039
4034 4040
4035 4041 class Gist(Base, BaseModel):
4036 4042 __tablename__ = 'gists'
4037 4043 __table_args__ = (
4038 4044 Index('g_gist_access_id_idx', 'gist_access_id'),
4039 4045 Index('g_created_on_idx', 'created_on'),
4040 4046 base_table_args
4041 4047 )
4042 4048
4043 4049 GIST_PUBLIC = u'public'
4044 4050 GIST_PRIVATE = u'private'
4045 4051 DEFAULT_FILENAME = u'gistfile1.txt'
4046 4052
4047 4053 ACL_LEVEL_PUBLIC = u'acl_public'
4048 4054 ACL_LEVEL_PRIVATE = u'acl_private'
4049 4055
4050 4056 gist_id = Column('gist_id', Integer(), primary_key=True)
4051 4057 gist_access_id = Column('gist_access_id', Unicode(250))
4052 4058 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4053 4059 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4054 4060 gist_expires = Column('gist_expires', Float(53), nullable=False)
4055 4061 gist_type = Column('gist_type', Unicode(128), nullable=False)
4056 4062 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4057 4063 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4058 4064 acl_level = Column('acl_level', Unicode(128), nullable=True)
4059 4065
4060 4066 owner = relationship('User')
4061 4067
4062 4068 def __repr__(self):
4063 4069 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4064 4070
4065 4071 @hybrid_property
4066 4072 def description_safe(self):
4067 4073 from rhodecode.lib import helpers as h
4068 4074 return h.escape(self.gist_description)
4069 4075
4070 4076 @classmethod
4071 4077 def get_or_404(cls, id_):
4072 4078 from pyramid.httpexceptions import HTTPNotFound
4073 4079
4074 4080 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4075 4081 if not res:
4076 4082 raise HTTPNotFound()
4077 4083 return res
4078 4084
4079 4085 @classmethod
4080 4086 def get_by_access_id(cls, gist_access_id):
4081 4087 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4082 4088
4083 4089 def gist_url(self):
4084 4090 from rhodecode.model.gist import GistModel
4085 4091 return GistModel().get_url(self)
4086 4092
4087 4093 @classmethod
4088 4094 def base_path(cls):
4089 4095 """
4090 4096 Returns base path when all gists are stored
4091 4097
4092 4098 :param cls:
4093 4099 """
4094 4100 from rhodecode.model.gist import GIST_STORE_LOC
4095 4101 q = Session().query(RhodeCodeUi)\
4096 4102 .filter(RhodeCodeUi.ui_key == URL_SEP)
4097 4103 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4098 4104 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4099 4105
4100 4106 def get_api_data(self):
4101 4107 """
4102 4108 Common function for generating gist related data for API
4103 4109 """
4104 4110 gist = self
4105 4111 data = {
4106 4112 'gist_id': gist.gist_id,
4107 4113 'type': gist.gist_type,
4108 4114 'access_id': gist.gist_access_id,
4109 4115 'description': gist.gist_description,
4110 4116 'url': gist.gist_url(),
4111 4117 'expires': gist.gist_expires,
4112 4118 'created_on': gist.created_on,
4113 4119 'modified_at': gist.modified_at,
4114 4120 'content': None,
4115 4121 'acl_level': gist.acl_level,
4116 4122 }
4117 4123 return data
4118 4124
4119 4125 def __json__(self):
4120 4126 data = dict(
4121 4127 )
4122 4128 data.update(self.get_api_data())
4123 4129 return data
4124 4130 # SCM functions
4125 4131
4126 4132 def scm_instance(self, **kwargs):
4127 4133 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4128 4134 return get_vcs_instance(
4129 4135 repo_path=safe_str(full_repo_path), create=False)
4130 4136
4131 4137
4132 4138 class ExternalIdentity(Base, BaseModel):
4133 4139 __tablename__ = 'external_identities'
4134 4140 __table_args__ = (
4135 4141 Index('local_user_id_idx', 'local_user_id'),
4136 4142 Index('external_id_idx', 'external_id'),
4137 4143 base_table_args
4138 4144 )
4139 4145
4140 4146 external_id = Column('external_id', Unicode(255), default=u'',
4141 4147 primary_key=True)
4142 4148 external_username = Column('external_username', Unicode(1024), default=u'')
4143 4149 local_user_id = Column('local_user_id', Integer(),
4144 4150 ForeignKey('users.user_id'), primary_key=True)
4145 4151 provider_name = Column('provider_name', Unicode(255), default=u'',
4146 4152 primary_key=True)
4147 4153 access_token = Column('access_token', String(1024), default=u'')
4148 4154 alt_token = Column('alt_token', String(1024), default=u'')
4149 4155 token_secret = Column('token_secret', String(1024), default=u'')
4150 4156
4151 4157 @classmethod
4152 4158 def by_external_id_and_provider(cls, external_id, provider_name,
4153 4159 local_user_id=None):
4154 4160 """
4155 4161 Returns ExternalIdentity instance based on search params
4156 4162
4157 4163 :param external_id:
4158 4164 :param provider_name:
4159 4165 :return: ExternalIdentity
4160 4166 """
4161 4167 query = cls.query()
4162 4168 query = query.filter(cls.external_id == external_id)
4163 4169 query = query.filter(cls.provider_name == provider_name)
4164 4170 if local_user_id:
4165 4171 query = query.filter(cls.local_user_id == local_user_id)
4166 4172 return query.first()
4167 4173
4168 4174 @classmethod
4169 4175 def user_by_external_id_and_provider(cls, external_id, provider_name):
4170 4176 """
4171 4177 Returns User instance based on search params
4172 4178
4173 4179 :param external_id:
4174 4180 :param provider_name:
4175 4181 :return: User
4176 4182 """
4177 4183 query = User.query()
4178 4184 query = query.filter(cls.external_id == external_id)
4179 4185 query = query.filter(cls.provider_name == provider_name)
4180 4186 query = query.filter(User.user_id == cls.local_user_id)
4181 4187 return query.first()
4182 4188
4183 4189 @classmethod
4184 4190 def by_local_user_id(cls, local_user_id):
4185 4191 """
4186 4192 Returns all tokens for user
4187 4193
4188 4194 :param local_user_id:
4189 4195 :return: ExternalIdentity
4190 4196 """
4191 4197 query = cls.query()
4192 4198 query = query.filter(cls.local_user_id == local_user_id)
4193 4199 return query
4194 4200
4195 4201
4196 4202 class Integration(Base, BaseModel):
4197 4203 __tablename__ = 'integrations'
4198 4204 __table_args__ = (
4199 4205 base_table_args
4200 4206 )
4201 4207
4202 4208 integration_id = Column('integration_id', Integer(), primary_key=True)
4203 4209 integration_type = Column('integration_type', String(255))
4204 4210 enabled = Column('enabled', Boolean(), nullable=False)
4205 4211 name = Column('name', String(255), nullable=False)
4206 4212 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4207 4213 default=False)
4208 4214
4209 4215 settings = Column(
4210 4216 'settings_json', MutationObj.as_mutable(
4211 4217 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4212 4218 repo_id = Column(
4213 4219 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4214 4220 nullable=True, unique=None, default=None)
4215 4221 repo = relationship('Repository', lazy='joined')
4216 4222
4217 4223 repo_group_id = Column(
4218 4224 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4219 4225 nullable=True, unique=None, default=None)
4220 4226 repo_group = relationship('RepoGroup', lazy='joined')
4221 4227
4222 4228 @property
4223 4229 def scope(self):
4224 4230 if self.repo:
4225 4231 return repr(self.repo)
4226 4232 if self.repo_group:
4227 4233 if self.child_repos_only:
4228 4234 return repr(self.repo_group) + ' (child repos only)'
4229 4235 else:
4230 4236 return repr(self.repo_group) + ' (recursive)'
4231 4237 if self.child_repos_only:
4232 4238 return 'root_repos'
4233 4239 return 'global'
4234 4240
4235 4241 def __repr__(self):
4236 4242 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4237 4243
4238 4244
4239 4245 class RepoReviewRuleUser(Base, BaseModel):
4240 4246 __tablename__ = 'repo_review_rules_users'
4241 4247 __table_args__ = (
4242 4248 base_table_args
4243 4249 )
4244 4250
4245 4251 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4246 4252 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4247 4253 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4248 4254 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4249 4255 user = relationship('User')
4250 4256
4251 4257 def rule_data(self):
4252 4258 return {
4253 4259 'mandatory': self.mandatory
4254 4260 }
4255 4261
4256 4262
4257 4263 class RepoReviewRuleUserGroup(Base, BaseModel):
4258 4264 __tablename__ = 'repo_review_rules_users_groups'
4259 4265 __table_args__ = (
4260 4266 base_table_args
4261 4267 )
4262 4268
4263 4269 VOTE_RULE_ALL = -1
4264 4270
4265 4271 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4266 4272 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4267 4273 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4268 4274 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4269 4275 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4270 4276 users_group = relationship('UserGroup')
4271 4277
4272 4278 def rule_data(self):
4273 4279 return {
4274 4280 'mandatory': self.mandatory,
4275 4281 'vote_rule': self.vote_rule
4276 4282 }
4277 4283
4278 4284 @property
4279 4285 def vote_rule_label(self):
4280 4286 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4281 4287 return 'all must vote'
4282 4288 else:
4283 4289 return 'min. vote {}'.format(self.vote_rule)
4284 4290
4285 4291
4286 4292 class RepoReviewRule(Base, BaseModel):
4287 4293 __tablename__ = 'repo_review_rules'
4288 4294 __table_args__ = (
4289 4295 base_table_args
4290 4296 )
4291 4297
4292 4298 repo_review_rule_id = Column(
4293 4299 'repo_review_rule_id', Integer(), primary_key=True)
4294 4300 repo_id = Column(
4295 4301 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4296 4302 repo = relationship('Repository', backref='review_rules')
4297 4303
4298 4304 review_rule_name = Column('review_rule_name', String(255))
4299 4305 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4300 4306 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4301 4307 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4302 4308
4303 4309 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4304 4310 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4305 4311 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4306 4312 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4307 4313
4308 4314 rule_users = relationship('RepoReviewRuleUser')
4309 4315 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4310 4316
4311 4317 def _validate_pattern(self, value):
4312 4318 re.compile('^' + glob2re(value) + '$')
4313 4319
4314 4320 @hybrid_property
4315 4321 def source_branch_pattern(self):
4316 4322 return self._branch_pattern or '*'
4317 4323
4318 4324 @source_branch_pattern.setter
4319 4325 def source_branch_pattern(self, value):
4320 4326 self._validate_pattern(value)
4321 4327 self._branch_pattern = value or '*'
4322 4328
4323 4329 @hybrid_property
4324 4330 def target_branch_pattern(self):
4325 4331 return self._target_branch_pattern or '*'
4326 4332
4327 4333 @target_branch_pattern.setter
4328 4334 def target_branch_pattern(self, value):
4329 4335 self._validate_pattern(value)
4330 4336 self._target_branch_pattern = value or '*'
4331 4337
4332 4338 @hybrid_property
4333 4339 def file_pattern(self):
4334 4340 return self._file_pattern or '*'
4335 4341
4336 4342 @file_pattern.setter
4337 4343 def file_pattern(self, value):
4338 4344 self._validate_pattern(value)
4339 4345 self._file_pattern = value or '*'
4340 4346
4341 4347 def matches(self, source_branch, target_branch, files_changed):
4342 4348 """
4343 4349 Check if this review rule matches a branch/files in a pull request
4344 4350
4345 4351 :param source_branch: source branch name for the commit
4346 4352 :param target_branch: target branch name for the commit
4347 4353 :param files_changed: list of file paths changed in the pull request
4348 4354 """
4349 4355
4350 4356 source_branch = source_branch or ''
4351 4357 target_branch = target_branch or ''
4352 4358 files_changed = files_changed or []
4353 4359
4354 4360 branch_matches = True
4355 4361 if source_branch or target_branch:
4356 4362 if self.source_branch_pattern == '*':
4357 4363 source_branch_match = True
4358 4364 else:
4359 4365 if self.source_branch_pattern.startswith('re:'):
4360 4366 source_pattern = self.source_branch_pattern[3:]
4361 4367 else:
4362 4368 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4363 4369 source_branch_regex = re.compile(source_pattern)
4364 4370 source_branch_match = bool(source_branch_regex.search(source_branch))
4365 4371 if self.target_branch_pattern == '*':
4366 4372 target_branch_match = True
4367 4373 else:
4368 4374 if self.target_branch_pattern.startswith('re:'):
4369 4375 target_pattern = self.target_branch_pattern[3:]
4370 4376 else:
4371 4377 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4372 4378 target_branch_regex = re.compile(target_pattern)
4373 4379 target_branch_match = bool(target_branch_regex.search(target_branch))
4374 4380
4375 4381 branch_matches = source_branch_match and target_branch_match
4376 4382
4377 4383 files_matches = True
4378 4384 if self.file_pattern != '*':
4379 4385 files_matches = False
4380 4386 if self.file_pattern.startswith('re:'):
4381 4387 file_pattern = self.file_pattern[3:]
4382 4388 else:
4383 4389 file_pattern = glob2re(self.file_pattern)
4384 4390 file_regex = re.compile(file_pattern)
4385 4391 for filename in files_changed:
4386 4392 if file_regex.search(filename):
4387 4393 files_matches = True
4388 4394 break
4389 4395
4390 4396 return branch_matches and files_matches
4391 4397
4392 4398 @property
4393 4399 def review_users(self):
4394 4400 """ Returns the users which this rule applies to """
4395 4401
4396 4402 users = collections.OrderedDict()
4397 4403
4398 4404 for rule_user in self.rule_users:
4399 4405 if rule_user.user.active:
4400 4406 if rule_user.user not in users:
4401 4407 users[rule_user.user.username] = {
4402 4408 'user': rule_user.user,
4403 4409 'source': 'user',
4404 4410 'source_data': {},
4405 4411 'data': rule_user.rule_data()
4406 4412 }
4407 4413
4408 4414 for rule_user_group in self.rule_user_groups:
4409 4415 source_data = {
4410 4416 'user_group_id': rule_user_group.users_group.users_group_id,
4411 4417 'name': rule_user_group.users_group.users_group_name,
4412 4418 'members': len(rule_user_group.users_group.members)
4413 4419 }
4414 4420 for member in rule_user_group.users_group.members:
4415 4421 if member.user.active:
4416 4422 key = member.user.username
4417 4423 if key in users:
4418 4424 # skip this member as we have him already
4419 4425 # this prevents from override the "first" matched
4420 4426 # users with duplicates in multiple groups
4421 4427 continue
4422 4428
4423 4429 users[key] = {
4424 4430 'user': member.user,
4425 4431 'source': 'user_group',
4426 4432 'source_data': source_data,
4427 4433 'data': rule_user_group.rule_data()
4428 4434 }
4429 4435
4430 4436 return users
4431 4437
4432 4438 def user_group_vote_rule(self, user_id):
4433 4439
4434 4440 rules = []
4435 4441 if not self.rule_user_groups:
4436 4442 return rules
4437 4443
4438 4444 for user_group in self.rule_user_groups:
4439 4445 user_group_members = [x.user_id for x in user_group.users_group.members]
4440 4446 if user_id in user_group_members:
4441 4447 rules.append(user_group)
4442 4448 return rules
4443 4449
4444 4450 def __repr__(self):
4445 4451 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4446 4452 self.repo_review_rule_id, self.repo)
4447 4453
4448 4454
4449 4455 class ScheduleEntry(Base, BaseModel):
4450 4456 __tablename__ = 'schedule_entries'
4451 4457 __table_args__ = (
4452 4458 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4453 4459 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4454 4460 base_table_args,
4455 4461 )
4456 4462
4457 4463 schedule_types = ['crontab', 'timedelta', 'integer']
4458 4464 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4459 4465
4460 4466 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4461 4467 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4462 4468 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4463 4469
4464 4470 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4465 4471 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4466 4472
4467 4473 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4468 4474 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4469 4475
4470 4476 # task
4471 4477 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4472 4478 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4473 4479 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4474 4480 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4475 4481
4476 4482 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4477 4483 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4478 4484
4479 4485 @hybrid_property
4480 4486 def schedule_type(self):
4481 4487 return self._schedule_type
4482 4488
4483 4489 @schedule_type.setter
4484 4490 def schedule_type(self, val):
4485 4491 if val not in self.schedule_types:
4486 4492 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4487 4493 val, self.schedule_type))
4488 4494
4489 4495 self._schedule_type = val
4490 4496
4491 4497 @classmethod
4492 4498 def get_uid(cls, obj):
4493 4499 args = obj.task_args
4494 4500 kwargs = obj.task_kwargs
4495 4501 if isinstance(args, JsonRaw):
4496 4502 try:
4497 4503 args = json.loads(args)
4498 4504 except ValueError:
4499 4505 args = tuple()
4500 4506
4501 4507 if isinstance(kwargs, JsonRaw):
4502 4508 try:
4503 4509 kwargs = json.loads(kwargs)
4504 4510 except ValueError:
4505 4511 kwargs = dict()
4506 4512
4507 4513 dot_notation = obj.task_dot_notation
4508 4514 val = '.'.join(map(safe_str, [
4509 4515 sorted(dot_notation), args, sorted(kwargs.items())]))
4510 4516 return hashlib.sha1(val).hexdigest()
4511 4517
4512 4518 @classmethod
4513 4519 def get_by_schedule_name(cls, schedule_name):
4514 4520 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4515 4521
4516 4522 @classmethod
4517 4523 def get_by_schedule_id(cls, schedule_id):
4518 4524 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4519 4525
4520 4526 @property
4521 4527 def task(self):
4522 4528 return self.task_dot_notation
4523 4529
4524 4530 @property
4525 4531 def schedule(self):
4526 4532 from rhodecode.lib.celerylib.utils import raw_2_schedule
4527 4533 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4528 4534 return schedule
4529 4535
4530 4536 @property
4531 4537 def args(self):
4532 4538 try:
4533 4539 return list(self.task_args or [])
4534 4540 except ValueError:
4535 4541 return list()
4536 4542
4537 4543 @property
4538 4544 def kwargs(self):
4539 4545 try:
4540 4546 return dict(self.task_kwargs or {})
4541 4547 except ValueError:
4542 4548 return dict()
4543 4549
4544 4550 def _as_raw(self, val):
4545 4551 if hasattr(val, 'de_coerce'):
4546 4552 val = val.de_coerce()
4547 4553 if val:
4548 4554 val = json.dumps(val)
4549 4555
4550 4556 return val
4551 4557
4552 4558 @property
4553 4559 def schedule_definition_raw(self):
4554 4560 return self._as_raw(self.schedule_definition)
4555 4561
4556 4562 @property
4557 4563 def args_raw(self):
4558 4564 return self._as_raw(self.task_args)
4559 4565
4560 4566 @property
4561 4567 def kwargs_raw(self):
4562 4568 return self._as_raw(self.task_kwargs)
4563 4569
4564 4570 def __repr__(self):
4565 4571 return '<DB:ScheduleEntry({}:{})>'.format(
4566 4572 self.schedule_entry_id, self.schedule_name)
4567 4573
4568 4574
4569 4575 @event.listens_for(ScheduleEntry, 'before_update')
4570 4576 def update_task_uid(mapper, connection, target):
4571 4577 target.task_uid = ScheduleEntry.get_uid(target)
4572 4578
4573 4579
4574 4580 @event.listens_for(ScheduleEntry, 'before_insert')
4575 4581 def set_task_uid(mapper, connection, target):
4576 4582 target.task_uid = ScheduleEntry.get_uid(target)
4577 4583
4578 4584
4579 4585 class _BaseBranchPerms(BaseModel):
4580 4586 @classmethod
4581 4587 def compute_hash(cls, value):
4582 4588 return sha1_safe(value)
4583 4589
4584 4590 @hybrid_property
4585 4591 def branch_pattern(self):
4586 4592 return self._branch_pattern or '*'
4587 4593
4588 4594 @hybrid_property
4589 4595 def branch_hash(self):
4590 4596 return self._branch_hash
4591 4597
4592 4598 def _validate_glob(self, value):
4593 4599 re.compile('^' + glob2re(value) + '$')
4594 4600
4595 4601 @branch_pattern.setter
4596 4602 def branch_pattern(self, value):
4597 4603 self._validate_glob(value)
4598 4604 self._branch_pattern = value or '*'
4599 4605 # set the Hash when setting the branch pattern
4600 4606 self._branch_hash = self.compute_hash(self._branch_pattern)
4601 4607
4602 4608 def matches(self, branch):
4603 4609 """
4604 4610 Check if this the branch matches entry
4605 4611
4606 4612 :param branch: branch name for the commit
4607 4613 """
4608 4614
4609 4615 branch = branch or ''
4610 4616
4611 4617 branch_matches = True
4612 4618 if branch:
4613 4619 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4614 4620 branch_matches = bool(branch_regex.search(branch))
4615 4621
4616 4622 return branch_matches
4617 4623
4618 4624
4619 4625 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
4620 4626 __tablename__ = 'user_to_repo_branch_permissions'
4621 4627 __table_args__ = (
4622 4628 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4623 4629 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4624 4630 )
4625 4631
4626 4632 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4627 4633
4628 4634 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4629 4635 repo = relationship('Repository', backref='user_branch_perms')
4630 4636
4631 4637 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4632 4638 permission = relationship('Permission')
4633 4639
4634 4640 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
4635 4641 user_repo_to_perm = relationship('UserRepoToPerm')
4636 4642
4637 4643 rule_order = Column('rule_order', Integer(), nullable=False)
4638 4644 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4639 4645 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4640 4646
4641 4647 def __unicode__(self):
4642 4648 return u'<UserBranchPermission(%s => %r)>' % (
4643 4649 self.user_repo_to_perm, self.branch_pattern)
4644 4650
4645 4651
4646 4652 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
4647 4653 __tablename__ = 'user_group_to_repo_branch_permissions'
4648 4654 __table_args__ = (
4649 4655 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4650 4656 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4651 4657 )
4652 4658
4653 4659 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4654 4660
4655 4661 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4656 4662 repo = relationship('Repository', backref='user_group_branch_perms')
4657 4663
4658 4664 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4659 4665 permission = relationship('Permission')
4660 4666
4661 4667 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
4662 4668 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
4663 4669
4664 4670 rule_order = Column('rule_order', Integer(), nullable=False)
4665 4671 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4666 4672 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4667 4673
4668 4674 def __unicode__(self):
4669 4675 return u'<UserBranchPermission(%s => %r)>' % (
4670 4676 self.user_group_repo_to_perm, self.branch_pattern)
4671 4677
4672 4678
4673 4679 class DbMigrateVersion(Base, BaseModel):
4674 4680 __tablename__ = 'db_migrate_version'
4675 4681 __table_args__ = (
4676 4682 base_table_args,
4677 4683 )
4678 4684
4679 4685 repository_id = Column('repository_id', String(250), primary_key=True)
4680 4686 repository_path = Column('repository_path', Text)
4681 4687 version = Column('version', Integer)
4682 4688
4683 4689 @classmethod
4684 4690 def set_version(cls, version):
4685 4691 """
4686 4692 Helper for forcing a different version, usually for debugging purposes via ishell.
4687 4693 """
4688 4694 ver = DbMigrateVersion.query().first()
4689 4695 ver.version = version
4690 4696 Session().commit()
4691 4697
4692 4698
4693 4699 class DbSession(Base, BaseModel):
4694 4700 __tablename__ = 'db_session'
4695 4701 __table_args__ = (
4696 4702 base_table_args,
4697 4703 )
4698 4704
4699 4705 def __repr__(self):
4700 4706 return '<DB:DbSession({})>'.format(self.id)
4701 4707
4702 4708 id = Column('id', Integer())
4703 4709 namespace = Column('namespace', String(255), primary_key=True)
4704 4710 accessed = Column('accessed', DateTime, nullable=False)
4705 4711 created = Column('created', DateTime, nullable=False)
4706 4712 data = Column('data', PickleType, nullable=False)
@@ -1,1053 +1,1072 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import shutil
24 24 import time
25 25 import logging
26 26 import traceback
27 27 import datetime
28 28
29 29 from pyramid.threadlocal import get_current_request
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode import events
33 33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 34 from rhodecode.lib.caching_query import FromCache
35 35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 36 from rhodecode.lib.hooks_base import log_delete_repository
37 37 from rhodecode.lib.user_log_filter import user_log_filter
38 38 from rhodecode.lib.utils import make_db_config
39 39 from rhodecode.lib.utils2 import (
40 40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 41 get_current_rhodecode_user, safe_int, datetime_to_time,
42 42 action_logger_generic)
43 43 from rhodecode.lib.vcs.backends import get_backend
44 44 from rhodecode.model import BaseModel
45 45 from rhodecode.model.db import (
46 46 _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 49
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 class RepoModel(BaseModel):
57 57
58 58 cls = Repository
59 59
60 60 def _get_user_group(self, users_group):
61 61 return self._get_instance(UserGroup, users_group,
62 62 callback=UserGroup.get_by_group_name)
63 63
64 64 def _get_repo_group(self, repo_group):
65 65 return self._get_instance(RepoGroup, repo_group,
66 66 callback=RepoGroup.get_by_group_name)
67 67
68 68 def _create_default_perms(self, repository, private):
69 69 # create default permission
70 70 default = 'repository.read'
71 71 def_user = User.get_default_user()
72 72 for p in def_user.user_perms:
73 73 if p.permission.permission_name.startswith('repository.'):
74 74 default = p.permission.permission_name
75 75 break
76 76
77 77 default_perm = 'repository.none' if private else default
78 78
79 79 repo_to_perm = UserRepoToPerm()
80 80 repo_to_perm.permission = Permission.get_by_key(default_perm)
81 81
82 82 repo_to_perm.repository = repository
83 83 repo_to_perm.user_id = def_user.user_id
84 84
85 85 return repo_to_perm
86 86
87 87 @LazyProperty
88 88 def repos_path(self):
89 89 """
90 90 Gets the repositories root path from database
91 91 """
92 92 settings_model = VcsSettingsModel(sa=self.sa)
93 93 return settings_model.get_repos_location()
94 94
95 95 def get(self, repo_id):
96 96 repo = self.sa.query(Repository) \
97 97 .filter(Repository.repo_id == repo_id)
98 98
99 99 return repo.scalar()
100 100
101 101 def get_repo(self, repository):
102 102 return self._get_repo(repository)
103 103
104 104 def get_by_repo_name(self, repo_name, cache=False):
105 105 repo = self.sa.query(Repository) \
106 106 .filter(Repository.repo_name == repo_name)
107 107
108 108 if cache:
109 109 name_key = _hash_key(repo_name)
110 110 repo = repo.options(
111 111 FromCache("sql_cache_short", "get_repo_%s" % name_key))
112 112 return repo.scalar()
113 113
114 114 def _extract_id_from_repo_name(self, repo_name):
115 115 if repo_name.startswith('/'):
116 116 repo_name = repo_name.lstrip('/')
117 117 by_id_match = re.match(r'^_(\d{1,})', repo_name)
118 118 if by_id_match:
119 119 return by_id_match.groups()[0]
120 120
121 121 def get_repo_by_id(self, repo_name):
122 122 """
123 123 Extracts repo_name by id from special urls.
124 124 Example url is _11/repo_name
125 125
126 126 :param repo_name:
127 127 :return: repo object if matched else None
128 128 """
129 129
130 130 try:
131 131 _repo_id = self._extract_id_from_repo_name(repo_name)
132 132 if _repo_id:
133 133 return self.get(_repo_id)
134 134 except Exception:
135 135 log.exception('Failed to extract repo_name from URL')
136 136
137 137 return None
138 138
139 139 def get_repos_for_root(self, root, traverse=False):
140 140 if traverse:
141 141 like_expression = u'{}%'.format(safe_unicode(root))
142 142 repos = Repository.query().filter(
143 143 Repository.repo_name.like(like_expression)).all()
144 144 else:
145 145 if root and not isinstance(root, RepoGroup):
146 146 raise ValueError(
147 147 'Root must be an instance '
148 148 'of RepoGroup, got:{} instead'.format(type(root)))
149 149 repos = Repository.query().filter(Repository.group == root).all()
150 150 return repos
151 151
152 152 def get_url(self, repo, request=None, permalink=False):
153 153 if not request:
154 154 request = get_current_request()
155 155
156 156 if not request:
157 157 return
158 158
159 159 if permalink:
160 160 return request.route_url(
161 161 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
162 162 else:
163 163 return request.route_url(
164 164 'repo_summary', repo_name=safe_str(repo.repo_name))
165 165
166 166 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
167 167 if not request:
168 168 request = get_current_request()
169 169
170 170 if not request:
171 171 return
172 172
173 173 if permalink:
174 174 return request.route_url(
175 175 'repo_commit', repo_name=safe_str(repo.repo_id),
176 176 commit_id=commit_id)
177 177
178 178 else:
179 179 return request.route_url(
180 180 'repo_commit', repo_name=safe_str(repo.repo_name),
181 181 commit_id=commit_id)
182 182
183 183 def get_repo_log(self, repo, filter_term):
184 184 repo_log = UserLog.query()\
185 185 .filter(or_(UserLog.repository_id == repo.repo_id,
186 186 UserLog.repository_name == repo.repo_name))\
187 187 .options(joinedload(UserLog.user))\
188 188 .options(joinedload(UserLog.repository))\
189 189 .order_by(UserLog.action_date.desc())
190 190
191 191 repo_log = user_log_filter(repo_log, filter_term)
192 192 return repo_log
193 193
194 194 @classmethod
195 195 def update_repoinfo(cls, repositories=None):
196 196 if not repositories:
197 197 repositories = Repository.getAll()
198 198 for repo in repositories:
199 199 repo.update_commit_cache()
200 200
201 201 def get_repos_as_dict(self, repo_list=None, admin=False,
202 202 super_user_actions=False):
203 203 _render = get_current_request().get_partial_renderer(
204 204 'rhodecode:templates/data_table/_dt_elements.mako')
205 205 c = _render.get_call_context()
206 206
207 207 def quick_menu(repo_name):
208 208 return _render('quick_menu', repo_name)
209 209
210 def repo_lnk(name, rtype, rstate, private, fork_of):
211 return _render('repo_name', name, rtype, rstate, private, fork_of,
210 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
211 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
212 212 short_name=not admin, admin=False)
213 213
214 214 def last_change(last_change):
215 215 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
216 216 last_change = last_change + datetime.timedelta(seconds=
217 217 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
218 218 return _render("last_change", last_change)
219 219
220 220 def rss_lnk(repo_name):
221 221 return _render("rss", repo_name)
222 222
223 223 def atom_lnk(repo_name):
224 224 return _render("atom", repo_name)
225 225
226 226 def last_rev(repo_name, cs_cache):
227 227 return _render('revision', repo_name, cs_cache.get('revision'),
228 228 cs_cache.get('raw_id'), cs_cache.get('author'),
229 229 cs_cache.get('message'), cs_cache.get('date'))
230 230
231 231 def desc(desc):
232 232 return _render('repo_desc', desc, c.visual.stylify_metatags)
233 233
234 234 def state(repo_state):
235 235 return _render("repo_state", repo_state)
236 236
237 237 def repo_actions(repo_name):
238 238 return _render('repo_actions', repo_name, super_user_actions)
239 239
240 240 def user_profile(username):
241 241 return _render('user_profile', username)
242 242
243 243 repos_data = []
244 244 for repo in repo_list:
245 245 cs_cache = repo.changeset_cache
246 246 row = {
247 247 "menu": quick_menu(repo.repo_name),
248 248
249 "name": repo_lnk(repo.repo_name, repo.repo_type,
250 repo.repo_state, repo.private, repo.fork),
249 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
250 repo.private, repo.archived, repo.fork),
251 251 "name_raw": repo.repo_name.lower(),
252 252
253 253 "last_change": last_change(repo.last_db_change),
254 254 "last_change_raw": datetime_to_time(repo.last_db_change),
255 255
256 256 "last_changeset": last_rev(repo.repo_name, cs_cache),
257 257 "last_changeset_raw": cs_cache.get('revision'),
258 258
259 259 "desc": desc(repo.description_safe),
260 260 "owner": user_profile(repo.user.username),
261 261
262 262 "state": state(repo.repo_state),
263 263 "rss": rss_lnk(repo.repo_name),
264 264
265 265 "atom": atom_lnk(repo.repo_name),
266 266 }
267 267 if admin:
268 268 row.update({
269 269 "action": repo_actions(repo.repo_name),
270 270 })
271 271 repos_data.append(row)
272 272
273 273 return repos_data
274 274
275 275 def _get_defaults(self, repo_name):
276 276 """
277 277 Gets information about repository, and returns a dict for
278 278 usage in forms
279 279
280 280 :param repo_name:
281 281 """
282 282
283 283 repo_info = Repository.get_by_repo_name(repo_name)
284 284
285 285 if repo_info is None:
286 286 return None
287 287
288 288 defaults = repo_info.get_dict()
289 289 defaults['repo_name'] = repo_info.just_name
290 290
291 291 groups = repo_info.groups_with_parents
292 292 parent_group = groups[-1] if groups else None
293 293
294 294 # we use -1 as this is how in HTML, we mark an empty group
295 295 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
296 296
297 297 keys_to_process = (
298 298 {'k': 'repo_type', 'strip': False},
299 299 {'k': 'repo_enable_downloads', 'strip': True},
300 300 {'k': 'repo_description', 'strip': True},
301 301 {'k': 'repo_enable_locking', 'strip': True},
302 302 {'k': 'repo_landing_rev', 'strip': True},
303 303 {'k': 'clone_uri', 'strip': False},
304 304 {'k': 'push_uri', 'strip': False},
305 305 {'k': 'repo_private', 'strip': True},
306 306 {'k': 'repo_enable_statistics', 'strip': True}
307 307 )
308 308
309 309 for item in keys_to_process:
310 310 attr = item['k']
311 311 if item['strip']:
312 312 attr = remove_prefix(item['k'], 'repo_')
313 313
314 314 val = defaults[attr]
315 315 if item['k'] == 'repo_landing_rev':
316 316 val = ':'.join(defaults[attr])
317 317 defaults[item['k']] = val
318 318 if item['k'] == 'clone_uri':
319 319 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
320 320 if item['k'] == 'push_uri':
321 321 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
322 322
323 323 # fill owner
324 324 if repo_info.user:
325 325 defaults.update({'user': repo_info.user.username})
326 326 else:
327 327 replacement_user = User.get_first_super_admin().username
328 328 defaults.update({'user': replacement_user})
329 329
330 330 return defaults
331 331
332 332 def update(self, repo, **kwargs):
333 333 try:
334 334 cur_repo = self._get_repo(repo)
335 335 source_repo_name = cur_repo.repo_name
336 336 if 'user' in kwargs:
337 337 cur_repo.user = User.get_by_username(kwargs['user'])
338 338
339 339 if 'repo_group' in kwargs:
340 340 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
341 341 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
342 342
343 343 update_keys = [
344 344 (1, 'repo_description'),
345 345 (1, 'repo_landing_rev'),
346 346 (1, 'repo_private'),
347 347 (1, 'repo_enable_downloads'),
348 348 (1, 'repo_enable_locking'),
349 349 (1, 'repo_enable_statistics'),
350 350 (0, 'clone_uri'),
351 351 (0, 'push_uri'),
352 352 (0, 'fork_id')
353 353 ]
354 354 for strip, k in update_keys:
355 355 if k in kwargs:
356 356 val = kwargs[k]
357 357 if strip:
358 358 k = remove_prefix(k, 'repo_')
359 359
360 360 setattr(cur_repo, k, val)
361 361
362 362 new_name = cur_repo.get_new_name(kwargs['repo_name'])
363 363 cur_repo.repo_name = new_name
364 364
365 365 # if private flag is set, reset default permission to NONE
366 366 if kwargs.get('repo_private'):
367 367 EMPTY_PERM = 'repository.none'
368 368 RepoModel().grant_user_permission(
369 369 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
370 370 )
371 371
372 372 # handle extra fields
373 373 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
374 374 kwargs):
375 375 k = RepositoryField.un_prefix_key(field)
376 376 ex_field = RepositoryField.get_by_key_name(
377 377 key=k, repo=cur_repo)
378 378 if ex_field:
379 379 ex_field.field_value = kwargs[field]
380 380 self.sa.add(ex_field)
381 381 cur_repo.updated_on = datetime.datetime.now()
382 382 self.sa.add(cur_repo)
383 383
384 384 if source_repo_name != new_name:
385 385 # rename repository
386 386 self._rename_filesystem_repo(
387 387 old=source_repo_name, new=new_name)
388 388
389 389 return cur_repo
390 390 except Exception:
391 391 log.error(traceback.format_exc())
392 392 raise
393 393
394 394 def _create_repo(self, repo_name, repo_type, description, owner,
395 395 private=False, clone_uri=None, repo_group=None,
396 396 landing_rev='rev:tip', fork_of=None,
397 397 copy_fork_permissions=False, enable_statistics=False,
398 398 enable_locking=False, enable_downloads=False,
399 399 copy_group_permissions=False,
400 400 state=Repository.STATE_PENDING):
401 401 """
402 402 Create repository inside database with PENDING state, this should be
403 403 only executed by create() repo. With exception of importing existing
404 404 repos
405 405 """
406 406 from rhodecode.model.scm import ScmModel
407 407
408 408 owner = self._get_user(owner)
409 409 fork_of = self._get_repo(fork_of)
410 410 repo_group = self._get_repo_group(safe_int(repo_group))
411 411
412 412 try:
413 413 repo_name = safe_unicode(repo_name)
414 414 description = safe_unicode(description)
415 415 # repo name is just a name of repository
416 416 # while repo_name_full is a full qualified name that is combined
417 417 # with name and path of group
418 418 repo_name_full = repo_name
419 419 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
420 420
421 421 new_repo = Repository()
422 422 new_repo.repo_state = state
423 423 new_repo.enable_statistics = False
424 424 new_repo.repo_name = repo_name_full
425 425 new_repo.repo_type = repo_type
426 426 new_repo.user = owner
427 427 new_repo.group = repo_group
428 428 new_repo.description = description or repo_name
429 429 new_repo.private = private
430 new_repo.archived = False
430 431 new_repo.clone_uri = clone_uri
431 432 new_repo.landing_rev = landing_rev
432 433
433 434 new_repo.enable_statistics = enable_statistics
434 435 new_repo.enable_locking = enable_locking
435 436 new_repo.enable_downloads = enable_downloads
436 437
437 438 if repo_group:
438 439 new_repo.enable_locking = repo_group.enable_locking
439 440
440 441 if fork_of:
441 442 parent_repo = fork_of
442 443 new_repo.fork = parent_repo
443 444
444 445 events.trigger(events.RepoPreCreateEvent(new_repo))
445 446
446 447 self.sa.add(new_repo)
447 448
448 449 EMPTY_PERM = 'repository.none'
449 450 if fork_of and copy_fork_permissions:
450 451 repo = fork_of
451 452 user_perms = UserRepoToPerm.query() \
452 453 .filter(UserRepoToPerm.repository == repo).all()
453 454 group_perms = UserGroupRepoToPerm.query() \
454 455 .filter(UserGroupRepoToPerm.repository == repo).all()
455 456
456 457 for perm in user_perms:
457 458 UserRepoToPerm.create(
458 459 perm.user, new_repo, perm.permission)
459 460
460 461 for perm in group_perms:
461 462 UserGroupRepoToPerm.create(
462 463 perm.users_group, new_repo, perm.permission)
463 464 # in case we copy permissions and also set this repo to private
464 465 # override the default user permission to make it a private
465 466 # repo
466 467 if private:
467 468 RepoModel(self.sa).grant_user_permission(
468 469 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
469 470
470 471 elif repo_group and copy_group_permissions:
471 472 user_perms = UserRepoGroupToPerm.query() \
472 473 .filter(UserRepoGroupToPerm.group == repo_group).all()
473 474
474 475 group_perms = UserGroupRepoGroupToPerm.query() \
475 476 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
476 477
477 478 for perm in user_perms:
478 479 perm_name = perm.permission.permission_name.replace(
479 480 'group.', 'repository.')
480 481 perm_obj = Permission.get_by_key(perm_name)
481 482 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
482 483
483 484 for perm in group_perms:
484 485 perm_name = perm.permission.permission_name.replace(
485 486 'group.', 'repository.')
486 487 perm_obj = Permission.get_by_key(perm_name)
487 488 UserGroupRepoToPerm.create(
488 489 perm.users_group, new_repo, perm_obj)
489 490
490 491 if private:
491 492 RepoModel(self.sa).grant_user_permission(
492 493 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
493 494
494 495 else:
495 496 perm_obj = self._create_default_perms(new_repo, private)
496 497 self.sa.add(perm_obj)
497 498
498 499 # now automatically start following this repository as owner
499 500 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
500 501 owner.user_id)
501 502
502 503 # we need to flush here, in order to check if database won't
503 504 # throw any exceptions, create filesystem dirs at the very end
504 505 self.sa.flush()
505 506 events.trigger(events.RepoCreateEvent(new_repo))
506 507 return new_repo
507 508
508 509 except Exception:
509 510 log.error(traceback.format_exc())
510 511 raise
511 512
512 513 def create(self, form_data, cur_user):
513 514 """
514 515 Create repository using celery tasks
515 516
516 517 :param form_data:
517 518 :param cur_user:
518 519 """
519 520 from rhodecode.lib.celerylib import tasks, run_task
520 521 return run_task(tasks.create_repo, form_data, cur_user)
521 522
522 523 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
523 524 perm_deletions=None, check_perms=True,
524 525 cur_user=None):
525 526 if not perm_additions:
526 527 perm_additions = []
527 528 if not perm_updates:
528 529 perm_updates = []
529 530 if not perm_deletions:
530 531 perm_deletions = []
531 532
532 533 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
533 534
534 535 changes = {
535 536 'added': [],
536 537 'updated': [],
537 538 'deleted': []
538 539 }
539 540 # update permissions
540 541 for member_id, perm, member_type in perm_updates:
541 542 member_id = int(member_id)
542 543 if member_type == 'user':
543 544 member_name = User.get(member_id).username
544 545 # this updates also current one if found
545 546 self.grant_user_permission(
546 547 repo=repo, user=member_id, perm=perm)
547 548 elif member_type == 'user_group':
548 549 # check if we have permissions to alter this usergroup
549 550 member_name = UserGroup.get(member_id).users_group_name
550 551 if not check_perms or HasUserGroupPermissionAny(
551 552 *req_perms)(member_name, user=cur_user):
552 553 self.grant_user_group_permission(
553 554 repo=repo, group_name=member_id, perm=perm)
554 555 else:
555 556 raise ValueError("member_type must be 'user' or 'user_group' "
556 557 "got {} instead".format(member_type))
557 558 changes['updated'].append({'type': member_type, 'id': member_id,
558 559 'name': member_name, 'new_perm': perm})
559 560
560 561 # set new permissions
561 562 for member_id, perm, member_type in perm_additions:
562 563 member_id = int(member_id)
563 564 if member_type == 'user':
564 565 member_name = User.get(member_id).username
565 566 self.grant_user_permission(
566 567 repo=repo, user=member_id, perm=perm)
567 568 elif member_type == 'user_group':
568 569 # check if we have permissions to alter this usergroup
569 570 member_name = UserGroup.get(member_id).users_group_name
570 571 if not check_perms or HasUserGroupPermissionAny(
571 572 *req_perms)(member_name, user=cur_user):
572 573 self.grant_user_group_permission(
573 574 repo=repo, group_name=member_id, perm=perm)
574 575 else:
575 576 raise ValueError("member_type must be 'user' or 'user_group' "
576 577 "got {} instead".format(member_type))
577 578
578 579 changes['added'].append({'type': member_type, 'id': member_id,
579 580 'name': member_name, 'new_perm': perm})
580 581 # delete permissions
581 582 for member_id, perm, member_type in perm_deletions:
582 583 member_id = int(member_id)
583 584 if member_type == 'user':
584 585 member_name = User.get(member_id).username
585 586 self.revoke_user_permission(repo=repo, user=member_id)
586 587 elif member_type == 'user_group':
587 588 # check if we have permissions to alter this usergroup
588 589 member_name = UserGroup.get(member_id).users_group_name
589 590 if not check_perms or HasUserGroupPermissionAny(
590 591 *req_perms)(member_name, user=cur_user):
591 592 self.revoke_user_group_permission(
592 593 repo=repo, group_name=member_id)
593 594 else:
594 595 raise ValueError("member_type must be 'user' or 'user_group' "
595 596 "got {} instead".format(member_type))
596 597
597 598 changes['deleted'].append({'type': member_type, 'id': member_id,
598 599 'name': member_name, 'new_perm': perm})
599 600 return changes
600 601
601 602 def create_fork(self, form_data, cur_user):
602 603 """
603 604 Simple wrapper into executing celery task for fork creation
604 605
605 606 :param form_data:
606 607 :param cur_user:
607 608 """
608 609 from rhodecode.lib.celerylib import tasks, run_task
609 610 return run_task(tasks.create_repo_fork, form_data, cur_user)
610 611
612 def archive(self, repo):
613 """
614 Archive given repository. Set archive flag.
615
616 :param repo:
617 """
618 repo = self._get_repo(repo)
619 if repo:
620
621 try:
622 repo.archived = True
623 self.sa.add(repo)
624 self.sa.commit()
625 except Exception:
626 log.error(traceback.format_exc())
627 raise
628
611 629 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
612 630 """
613 631 Delete given repository, forks parameter defines what do do with
614 632 attached forks. Throws AttachedForksError if deleted repo has attached
615 633 forks
616 634
617 635 :param repo:
618 636 :param forks: str 'delete' or 'detach'
637 :param pull_requests: str 'delete' or None
619 638 :param fs_remove: remove(archive) repo from filesystem
620 639 """
621 640 if not cur_user:
622 641 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
623 642 repo = self._get_repo(repo)
624 643 if repo:
625 644 if forks == 'detach':
626 645 for r in repo.forks:
627 646 r.fork = None
628 647 self.sa.add(r)
629 648 elif forks == 'delete':
630 649 for r in repo.forks:
631 650 self.delete(r, forks='delete')
632 651 elif [f for f in repo.forks]:
633 652 raise AttachedForksError()
634 653
635 654 # check for pull requests
636 655 pr_sources = repo.pull_requests_source
637 656 pr_targets = repo.pull_requests_target
638 657 if pull_requests != 'delete' and (pr_sources or pr_targets):
639 658 raise AttachedPullRequestsError()
640 659
641 660 old_repo_dict = repo.get_dict()
642 661 events.trigger(events.RepoPreDeleteEvent(repo))
643 662 try:
644 663 self.sa.delete(repo)
645 664 if fs_remove:
646 665 self._delete_filesystem_repo(repo)
647 666 else:
648 667 log.debug('skipping removal from filesystem')
649 668 old_repo_dict.update({
650 669 'deleted_by': cur_user,
651 670 'deleted_on': time.time(),
652 671 })
653 672 log_delete_repository(**old_repo_dict)
654 673 events.trigger(events.RepoDeleteEvent(repo))
655 674 except Exception:
656 675 log.error(traceback.format_exc())
657 676 raise
658 677
659 678 def grant_user_permission(self, repo, user, perm):
660 679 """
661 680 Grant permission for user on given repository, or update existing one
662 681 if found
663 682
664 683 :param repo: Instance of Repository, repository_id, or repository name
665 684 :param user: Instance of User, user_id or username
666 685 :param perm: Instance of Permission, or permission_name
667 686 """
668 687 user = self._get_user(user)
669 688 repo = self._get_repo(repo)
670 689 permission = self._get_perm(perm)
671 690
672 691 # check if we have that permission already
673 692 obj = self.sa.query(UserRepoToPerm) \
674 693 .filter(UserRepoToPerm.user == user) \
675 694 .filter(UserRepoToPerm.repository == repo) \
676 695 .scalar()
677 696 if obj is None:
678 697 # create new !
679 698 obj = UserRepoToPerm()
680 699 obj.repository = repo
681 700 obj.user = user
682 701 obj.permission = permission
683 702 self.sa.add(obj)
684 703 log.debug('Granted perm %s to %s on %s', perm, user, repo)
685 704 action_logger_generic(
686 705 'granted permission: {} to user: {} on repo: {}'.format(
687 706 perm, user, repo), namespace='security.repo')
688 707 return obj
689 708
690 709 def revoke_user_permission(self, repo, user):
691 710 """
692 711 Revoke permission for user on given repository
693 712
694 713 :param repo: Instance of Repository, repository_id, or repository name
695 714 :param user: Instance of User, user_id or username
696 715 """
697 716
698 717 user = self._get_user(user)
699 718 repo = self._get_repo(repo)
700 719
701 720 obj = self.sa.query(UserRepoToPerm) \
702 721 .filter(UserRepoToPerm.repository == repo) \
703 722 .filter(UserRepoToPerm.user == user) \
704 723 .scalar()
705 724 if obj:
706 725 self.sa.delete(obj)
707 726 log.debug('Revoked perm on %s on %s', repo, user)
708 727 action_logger_generic(
709 728 'revoked permission from user: {} on repo: {}'.format(
710 729 user, repo), namespace='security.repo')
711 730
712 731 def grant_user_group_permission(self, repo, group_name, perm):
713 732 """
714 733 Grant permission for user group on given repository, or update
715 734 existing one if found
716 735
717 736 :param repo: Instance of Repository, repository_id, or repository name
718 737 :param group_name: Instance of UserGroup, users_group_id,
719 738 or user group name
720 739 :param perm: Instance of Permission, or permission_name
721 740 """
722 741 repo = self._get_repo(repo)
723 742 group_name = self._get_user_group(group_name)
724 743 permission = self._get_perm(perm)
725 744
726 745 # check if we have that permission already
727 746 obj = self.sa.query(UserGroupRepoToPerm) \
728 747 .filter(UserGroupRepoToPerm.users_group == group_name) \
729 748 .filter(UserGroupRepoToPerm.repository == repo) \
730 749 .scalar()
731 750
732 751 if obj is None:
733 752 # create new
734 753 obj = UserGroupRepoToPerm()
735 754
736 755 obj.repository = repo
737 756 obj.users_group = group_name
738 757 obj.permission = permission
739 758 self.sa.add(obj)
740 759 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
741 760 action_logger_generic(
742 761 'granted permission: {} to usergroup: {} on repo: {}'.format(
743 762 perm, group_name, repo), namespace='security.repo')
744 763
745 764 return obj
746 765
747 766 def revoke_user_group_permission(self, repo, group_name):
748 767 """
749 768 Revoke permission for user group on given repository
750 769
751 770 :param repo: Instance of Repository, repository_id, or repository name
752 771 :param group_name: Instance of UserGroup, users_group_id,
753 772 or user group name
754 773 """
755 774 repo = self._get_repo(repo)
756 775 group_name = self._get_user_group(group_name)
757 776
758 777 obj = self.sa.query(UserGroupRepoToPerm) \
759 778 .filter(UserGroupRepoToPerm.repository == repo) \
760 779 .filter(UserGroupRepoToPerm.users_group == group_name) \
761 780 .scalar()
762 781 if obj:
763 782 self.sa.delete(obj)
764 783 log.debug('Revoked perm to %s on %s', repo, group_name)
765 784 action_logger_generic(
766 785 'revoked permission from usergroup: {} on repo: {}'.format(
767 786 group_name, repo), namespace='security.repo')
768 787
769 788 def delete_stats(self, repo_name):
770 789 """
771 790 removes stats for given repo
772 791
773 792 :param repo_name:
774 793 """
775 794 repo = self._get_repo(repo_name)
776 795 try:
777 796 obj = self.sa.query(Statistics) \
778 797 .filter(Statistics.repository == repo).scalar()
779 798 if obj:
780 799 self.sa.delete(obj)
781 800 except Exception:
782 801 log.error(traceback.format_exc())
783 802 raise
784 803
785 804 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
786 805 field_type='str', field_desc=''):
787 806
788 807 repo = self._get_repo(repo_name)
789 808
790 809 new_field = RepositoryField()
791 810 new_field.repository = repo
792 811 new_field.field_key = field_key
793 812 new_field.field_type = field_type # python type
794 813 new_field.field_value = field_value
795 814 new_field.field_desc = field_desc
796 815 new_field.field_label = field_label
797 816 self.sa.add(new_field)
798 817 return new_field
799 818
800 819 def delete_repo_field(self, repo_name, field_key):
801 820 repo = self._get_repo(repo_name)
802 821 field = RepositoryField.get_by_key_name(field_key, repo)
803 822 if field:
804 823 self.sa.delete(field)
805 824
806 825 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
807 826 clone_uri=None, repo_store_location=None,
808 827 use_global_config=False):
809 828 """
810 829 makes repository on filesystem. It's group aware means it'll create
811 830 a repository within a group, and alter the paths accordingly of
812 831 group location
813 832
814 833 :param repo_name:
815 834 :param alias:
816 835 :param parent:
817 836 :param clone_uri:
818 837 :param repo_store_location:
819 838 """
820 839 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
821 840 from rhodecode.model.scm import ScmModel
822 841
823 842 if Repository.NAME_SEP in repo_name:
824 843 raise ValueError(
825 844 'repo_name must not contain groups got `%s`' % repo_name)
826 845
827 846 if isinstance(repo_group, RepoGroup):
828 847 new_parent_path = os.sep.join(repo_group.full_path_splitted)
829 848 else:
830 849 new_parent_path = repo_group or ''
831 850
832 851 if repo_store_location:
833 852 _paths = [repo_store_location]
834 853 else:
835 854 _paths = [self.repos_path, new_parent_path, repo_name]
836 855 # we need to make it str for mercurial
837 856 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
838 857
839 858 # check if this path is not a repository
840 859 if is_valid_repo(repo_path, self.repos_path):
841 860 raise Exception('This path %s is a valid repository' % repo_path)
842 861
843 862 # check if this path is a group
844 863 if is_valid_repo_group(repo_path, self.repos_path):
845 864 raise Exception('This path %s is a valid group' % repo_path)
846 865
847 866 log.info('creating repo %s in %s from url: `%s`',
848 867 repo_name, safe_unicode(repo_path),
849 868 obfuscate_url_pw(clone_uri))
850 869
851 870 backend = get_backend(repo_type)
852 871
853 872 config_repo = None if use_global_config else repo_name
854 873 if config_repo and new_parent_path:
855 874 config_repo = Repository.NAME_SEP.join(
856 875 (new_parent_path, config_repo))
857 876 config = make_db_config(clear_session=False, repo=config_repo)
858 877 config.set('extensions', 'largefiles', '')
859 878
860 879 # patch and reset hooks section of UI config to not run any
861 880 # hooks on creating remote repo
862 881 config.clear_section('hooks')
863 882
864 883 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
865 884 if repo_type == 'git':
866 885 repo = backend(
867 886 repo_path, config=config, create=True, src_url=clone_uri,
868 887 bare=True)
869 888 else:
870 889 repo = backend(
871 890 repo_path, config=config, create=True, src_url=clone_uri)
872 891
873 892 repo.install_hooks()
874 893
875 894 log.debug('Created repo %s with %s backend',
876 895 safe_unicode(repo_name), safe_unicode(repo_type))
877 896 return repo
878 897
879 898 def _rename_filesystem_repo(self, old, new):
880 899 """
881 900 renames repository on filesystem
882 901
883 902 :param old: old name
884 903 :param new: new name
885 904 """
886 905 log.info('renaming repo from %s to %s', old, new)
887 906
888 907 old_path = os.path.join(self.repos_path, old)
889 908 new_path = os.path.join(self.repos_path, new)
890 909 if os.path.isdir(new_path):
891 910 raise Exception(
892 911 'Was trying to rename to already existing dir %s' % new_path
893 912 )
894 913 shutil.move(old_path, new_path)
895 914
896 915 def _delete_filesystem_repo(self, repo):
897 916 """
898 917 removes repo from filesystem, the removal is acctually made by
899 918 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
900 919 repository is no longer valid for rhodecode, can be undeleted later on
901 920 by reverting the renames on this repository
902 921
903 922 :param repo: repo object
904 923 """
905 924 rm_path = os.path.join(self.repos_path, repo.repo_name)
906 925 repo_group = repo.group
907 926 log.info("Removing repository %s", rm_path)
908 927 # disable hg/git internal that it doesn't get detected as repo
909 928 alias = repo.repo_type
910 929
911 930 config = make_db_config(clear_session=False)
912 931 config.set('extensions', 'largefiles', '')
913 932 bare = getattr(repo.scm_instance(config=config), 'bare', False)
914 933
915 934 # skip this for bare git repos
916 935 if not bare:
917 936 # disable VCS repo
918 937 vcs_path = os.path.join(rm_path, '.%s' % alias)
919 938 if os.path.exists(vcs_path):
920 939 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
921 940
922 941 _now = datetime.datetime.now()
923 942 _ms = str(_now.microsecond).rjust(6, '0')
924 943 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
925 944 repo.just_name)
926 945 if repo_group:
927 946 # if repository is in group, prefix the removal path with the group
928 947 args = repo_group.full_path_splitted + [_d]
929 948 _d = os.path.join(*args)
930 949
931 950 if os.path.isdir(rm_path):
932 951 shutil.move(rm_path, os.path.join(self.repos_path, _d))
933 952
934 953 # finally cleanup diff-cache if it exists
935 954 cached_diffs_dir = repo.cached_diffs_dir
936 955 if os.path.isdir(cached_diffs_dir):
937 956 shutil.rmtree(cached_diffs_dir)
938 957
939 958
940 959 class ReadmeFinder:
941 960 """
942 961 Utility which knows how to find a readme for a specific commit.
943 962
944 963 The main idea is that this is a configurable algorithm. When creating an
945 964 instance you can define parameters, currently only the `default_renderer`.
946 965 Based on this configuration the method :meth:`search` behaves slightly
947 966 different.
948 967 """
949 968
950 969 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
951 970 path_re = re.compile(r'^docs?', re.IGNORECASE)
952 971
953 972 default_priorities = {
954 973 None: 0,
955 974 '.text': 2,
956 975 '.txt': 3,
957 976 '.rst': 1,
958 977 '.rest': 2,
959 978 '.md': 1,
960 979 '.mkdn': 2,
961 980 '.mdown': 3,
962 981 '.markdown': 4,
963 982 }
964 983
965 984 path_priority = {
966 985 'doc': 0,
967 986 'docs': 1,
968 987 }
969 988
970 989 FALLBACK_PRIORITY = 99
971 990
972 991 RENDERER_TO_EXTENSION = {
973 992 'rst': ['.rst', '.rest'],
974 993 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
975 994 }
976 995
977 996 def __init__(self, default_renderer=None):
978 997 self._default_renderer = default_renderer
979 998 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
980 999 default_renderer, [])
981 1000
982 1001 def search(self, commit, path='/'):
983 1002 """
984 1003 Find a readme in the given `commit`.
985 1004 """
986 1005 nodes = commit.get_nodes(path)
987 1006 matches = self._match_readmes(nodes)
988 1007 matches = self._sort_according_to_priority(matches)
989 1008 if matches:
990 1009 return matches[0].node
991 1010
992 1011 paths = self._match_paths(nodes)
993 1012 paths = self._sort_paths_according_to_priority(paths)
994 1013 for path in paths:
995 1014 match = self.search(commit, path=path)
996 1015 if match:
997 1016 return match
998 1017
999 1018 return None
1000 1019
1001 1020 def _match_readmes(self, nodes):
1002 1021 for node in nodes:
1003 1022 if not node.is_file():
1004 1023 continue
1005 1024 path = node.path.rsplit('/', 1)[-1]
1006 1025 match = self.readme_re.match(path)
1007 1026 if match:
1008 1027 extension = match.group(1)
1009 1028 yield ReadmeMatch(node, match, self._priority(extension))
1010 1029
1011 1030 def _match_paths(self, nodes):
1012 1031 for node in nodes:
1013 1032 if not node.is_dir():
1014 1033 continue
1015 1034 match = self.path_re.match(node.path)
1016 1035 if match:
1017 1036 yield node.path
1018 1037
1019 1038 def _priority(self, extension):
1020 1039 renderer_priority = (
1021 1040 0 if extension in self._renderer_extensions else 1)
1022 1041 extension_priority = self.default_priorities.get(
1023 1042 extension, self.FALLBACK_PRIORITY)
1024 1043 return (renderer_priority, extension_priority)
1025 1044
1026 1045 def _sort_according_to_priority(self, matches):
1027 1046
1028 1047 def priority_and_path(match):
1029 1048 return (match.priority, match.path)
1030 1049
1031 1050 return sorted(matches, key=priority_and_path)
1032 1051
1033 1052 def _sort_paths_according_to_priority(self, paths):
1034 1053
1035 1054 def priority_and_path(path):
1036 1055 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1037 1056
1038 1057 return sorted(paths, key=priority_and_path)
1039 1058
1040 1059
1041 1060 class ReadmeMatch:
1042 1061
1043 1062 def __init__(self, node, match, priority):
1044 1063 self.node = node
1045 1064 self._match = match
1046 1065 self.priority = priority
1047 1066
1048 1067 @property
1049 1068 def path(self):
1050 1069 return self.node.path
1051 1070
1052 1071 def __repr__(self):
1053 1072 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,335 +1,336 b''
1 1
2 2 /******************************************************************************
3 3 * *
4 4 * DO NOT CHANGE THIS FILE MANUALLY *
5 5 * *
6 6 * *
7 7 * This file is automatically generated when the app starts up with *
8 8 * generate_js_files = true *
9 9 * *
10 10 * To add a route here pass jsroute=True to the route definition in the app *
11 11 * *
12 12 ******************************************************************************/
13 13 function registerRCRoutes() {
14 14 // routes registration
15 15 pyroutes.register('favicon', '/favicon.ico', []);
16 16 pyroutes.register('robots', '/robots.txt', []);
17 17 pyroutes.register('auth_home', '/_admin/auth*traverse', []);
18 18 pyroutes.register('global_integrations_new', '/_admin/integrations/new', []);
19 19 pyroutes.register('global_integrations_home', '/_admin/integrations', []);
20 20 pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']);
21 21 pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']);
22 22 pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']);
23 23 pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']);
24 24 pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']);
25 25 pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']);
26 26 pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']);
27 27 pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']);
28 28 pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']);
29 29 pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']);
30 30 pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']);
31 31 pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']);
32 32 pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']);
33 33 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
34 34 pyroutes.register('ops_error_test', '/_admin/ops/error', []);
35 35 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
36 36 pyroutes.register('ops_ping_legacy', '/_admin/ping', []);
37 37 pyroutes.register('ops_error_test_legacy', '/_admin/error_test', []);
38 38 pyroutes.register('admin_home', '/_admin', []);
39 39 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
40 40 pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']);
41 41 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
42 42 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
43 43 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
44 44 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
45 45 pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []);
46 46 pyroutes.register('admin_settings_system', '/_admin/settings/system', []);
47 47 pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []);
48 48 pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []);
49 49 pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions/delete', []);
50 50 pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']);
51 51 pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']);
52 52 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
53 53 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
54 54 pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []);
55 55 pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []);
56 56 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
57 57 pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []);
58 58 pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []);
59 59 pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []);
60 60 pyroutes.register('admin_settings', '/_admin/settings', []);
61 61 pyroutes.register('admin_settings_update', '/_admin/settings/update', []);
62 62 pyroutes.register('admin_settings_global', '/_admin/settings/global', []);
63 63 pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []);
64 64 pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []);
65 65 pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []);
66 66 pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []);
67 67 pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []);
68 68 pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []);
69 69 pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []);
70 70 pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []);
71 71 pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []);
72 72 pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []);
73 73 pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []);
74 74 pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []);
75 75 pyroutes.register('admin_settings_email', '/_admin/settings/email', []);
76 76 pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []);
77 77 pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []);
78 78 pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []);
79 79 pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []);
80 80 pyroutes.register('admin_settings_search', '/_admin/settings/search', []);
81 81 pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []);
82 82 pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []);
83 83 pyroutes.register('admin_settings_automation', '/_admin/_admin/settings/automation', []);
84 84 pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []);
85 85 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
86 86 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
87 87 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
88 88 pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []);
89 89 pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []);
90 90 pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []);
91 91 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
92 92 pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []);
93 93 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
94 94 pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []);
95 95 pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []);
96 96 pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []);
97 97 pyroutes.register('users', '/_admin/users', []);
98 98 pyroutes.register('users_data', '/_admin/users_data', []);
99 99 pyroutes.register('users_create', '/_admin/users/create', []);
100 100 pyroutes.register('users_new', '/_admin/users/new', []);
101 101 pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']);
102 102 pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']);
103 103 pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']);
104 104 pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']);
105 105 pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']);
106 106 pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']);
107 107 pyroutes.register('user_force_password_reset', '/_admin/users/%(user_id)s/password_reset', ['user_id']);
108 108 pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']);
109 109 pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']);
110 110 pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']);
111 111 pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']);
112 112 pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']);
113 113 pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']);
114 114 pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']);
115 115 pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']);
116 116 pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']);
117 117 pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']);
118 118 pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']);
119 119 pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']);
120 120 pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']);
121 121 pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']);
122 122 pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']);
123 123 pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']);
124 124 pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']);
125 125 pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']);
126 126 pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']);
127 127 pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']);
128 128 pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']);
129 129 pyroutes.register('user_groups', '/_admin/user_groups', []);
130 130 pyroutes.register('user_groups_data', '/_admin/user_groups_data', []);
131 131 pyroutes.register('user_groups_new', '/_admin/user_groups/new', []);
132 132 pyroutes.register('user_groups_create', '/_admin/user_groups/create', []);
133 133 pyroutes.register('repos', '/_admin/repos', []);
134 134 pyroutes.register('repo_new', '/_admin/repos/new', []);
135 135 pyroutes.register('repo_create', '/_admin/repos/create', []);
136 136 pyroutes.register('repo_groups', '/_admin/repo_groups', []);
137 137 pyroutes.register('repo_group_new', '/_admin/repo_group/new', []);
138 138 pyroutes.register('repo_group_create', '/_admin/repo_group/create', []);
139 139 pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
140 140 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
141 141 pyroutes.register('channelstream_proxy', '/_channelstream', []);
142 142 pyroutes.register('login', '/_admin/login', []);
143 143 pyroutes.register('logout', '/_admin/logout', []);
144 144 pyroutes.register('register', '/_admin/register', []);
145 145 pyroutes.register('reset_password', '/_admin/password_reset', []);
146 146 pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []);
147 147 pyroutes.register('home', '/', []);
148 148 pyroutes.register('user_autocomplete_data', '/_users', []);
149 149 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
150 150 pyroutes.register('repo_list_data', '/_repos', []);
151 151 pyroutes.register('goto_switcher_data', '/_goto_data', []);
152 152 pyroutes.register('markup_preview', '/_markup_preview', []);
153 153 pyroutes.register('store_user_session_value', '/_store_session_attr', []);
154 154 pyroutes.register('journal', '/_admin/journal', []);
155 155 pyroutes.register('journal_rss', '/_admin/journal/rss', []);
156 156 pyroutes.register('journal_atom', '/_admin/journal/atom', []);
157 157 pyroutes.register('journal_public', '/_admin/public_journal', []);
158 158 pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []);
159 159 pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []);
160 160 pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []);
161 161 pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []);
162 162 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
163 163 pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']);
164 164 pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']);
165 165 pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']);
166 166 pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']);
167 167 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
168 168 pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']);
169 169 pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']);
170 170 pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']);
171 171 pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']);
172 172 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
173 173 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
174 174 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
175 175 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
176 176 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
177 177 pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']);
178 178 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
179 179 pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']);
180 180 pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']);
181 181 pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
182 182 pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']);
183 183 pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']);
184 184 pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
185 185 pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
186 186 pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
187 187 pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
188 188 pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']);
189 189 pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
190 190 pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
191 191 pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
192 192 pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
193 193 pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
194 194 pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
195 195 pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
196 196 pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
197 197 pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
198 198 pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
199 199 pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
200 200 pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
201 201 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
202 202 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
203 203 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
204 204 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
205 205 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
206 206 pyroutes.register('repo_changelog_elements', '/%(repo_name)s/changelog_elements', ['repo_name']);
207 207 pyroutes.register('repo_changelog_elements_file', '/%(repo_name)s/changelog_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
208 208 pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']);
209 209 pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
210 210 pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']);
211 211 pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
212 212 pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
213 213 pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']);
214 214 pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']);
215 215 pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']);
216 216 pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']);
217 217 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
218 218 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
219 219 pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']);
220 220 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
221 221 pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']);
222 222 pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']);
223 223 pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']);
224 224 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']);
225 225 pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']);
226 226 pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']);
227 227 pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']);
228 228 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']);
229 229 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
230 230 pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']);
231 pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']);
231 232 pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']);
232 233 pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']);
233 234 pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']);
234 235 pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']);
235 236 pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']);
236 237 pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']);
237 238 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
238 239 pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']);
239 240 pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']);
240 241 pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']);
241 242 pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']);
242 243 pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']);
243 244 pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']);
244 245 pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']);
245 246 pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']);
246 247 pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']);
247 248 pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']);
248 249 pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']);
249 250 pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']);
250 251 pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']);
251 252 pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']);
252 253 pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']);
253 254 pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']);
254 255 pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']);
255 256 pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']);
256 257 pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']);
257 258 pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']);
258 259 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
259 260 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']);
260 261 pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']);
261 262 pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']);
262 263 pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
263 264 pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
264 265 pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']);
265 266 pyroutes.register('rss_feed_home', '/%(repo_name)s/feed/rss', ['repo_name']);
266 267 pyroutes.register('atom_feed_home', '/%(repo_name)s/feed/atom', ['repo_name']);
267 268 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
268 269 pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']);
269 270 pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']);
270 271 pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']);
271 272 pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']);
272 273 pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']);
273 274 pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']);
274 275 pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']);
275 276 pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']);
276 277 pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']);
277 278 pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']);
278 279 pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']);
279 280 pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']);
280 281 pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']);
281 282 pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']);
282 283 pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']);
283 284 pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']);
284 285 pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']);
285 286 pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']);
286 287 pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']);
287 288 pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']);
288 289 pyroutes.register('search', '/_admin/search', []);
289 290 pyroutes.register('search_repo', '/%(repo_name)s/search', ['repo_name']);
290 291 pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']);
291 292 pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']);
292 293 pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
293 294 pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
294 295 pyroutes.register('my_account_update', '/_admin/my_account/update', []);
295 296 pyroutes.register('my_account_password', '/_admin/my_account/password', []);
296 297 pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []);
297 298 pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []);
298 299 pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []);
299 300 pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []);
300 301 pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []);
301 302 pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []);
302 303 pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []);
303 304 pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []);
304 305 pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []);
305 306 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
306 307 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
307 308 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
308 309 pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
309 310 pyroutes.register('my_account_watched', '/_admin/my_account/watched', []);
310 311 pyroutes.register('my_account_perms', '/_admin/my_account/perms', []);
311 312 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
312 313 pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []);
313 314 pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
314 315 pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
315 316 pyroutes.register('notifications_show_all', '/_admin/notifications', []);
316 317 pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []);
317 318 pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
318 319 pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
319 320 pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']);
320 321 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
321 322 pyroutes.register('gists_show', '/_admin/gists', []);
322 323 pyroutes.register('gists_new', '/_admin/gists/new', []);
323 324 pyroutes.register('gists_create', '/_admin/gists/create', []);
324 325 pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']);
325 326 pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']);
326 327 pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']);
327 328 pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']);
328 329 pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']);
329 330 pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']);
330 331 pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']);
331 332 pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']);
332 333 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
333 334 pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']);
334 335 pyroutes.register('apiv2', '/_admin/api', []);
335 336 }
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now