##// END OF EJS Templates
feat(branch removal trough UI): Added ability to remove branches trough UI from git and hg repositories. Fixes: RCCE-75
ilin.s -
r5428:fc536dab default
parent child Browse files
Show More
@@ -1,986 +1,987 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import time
20 20 import logging
21 21 import operator
22 22
23 23 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
24 24
25 25 from rhodecode.lib import helpers as h, diffs, rc_cache
26 26 from rhodecode.lib.str_utils import safe_str
27 27 from rhodecode.lib.utils import repo_name_slug
28 28 from rhodecode.lib.utils2 import (
29 29 StrictAttributeDict,
30 30 str2bool,
31 31 safe_int,
32 32 datetime_to_time,
33 33 )
34 34 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
35 35 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 36 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
37 37 from rhodecode.model import repo
38 38 from rhodecode.model import repo_group
39 39 from rhodecode.model import user_group
40 40 from rhodecode.model import user
41 41 from rhodecode.model.db import User
42 42 from rhodecode.model.scm import ScmModel
43 43 from rhodecode.model.settings import VcsSettingsModel, IssueTrackerSettingsModel
44 44 from rhodecode.model.repo import ReadmeFinder
45 45
46 46 log = logging.getLogger(__name__)
47 47
48 48
49 49 ADMIN_PREFIX: str = "/_admin"
50 50 STATIC_FILE_PREFIX: str = "/_static"
51 51
52 52 URL_NAME_REQUIREMENTS = {
53 53 # group name can have a slash in them, but they must not end with a slash
54 54 "group_name": r".*?[^/]",
55 55 "repo_group_name": r".*?[^/]",
56 56 # repo names can have a slash in them, but they must not end with a slash
57 57 "repo_name": r".*?[^/]",
58 58 # file path eats up everything at the end
59 59 "f_path": r".*",
60 60 # reference types
61 61 "source_ref_type": r"(branch|book|tag|rev|\%\(source_ref_type\)s)",
62 62 "target_ref_type": r"(branch|book|tag|rev|\%\(target_ref_type\)s)",
63 63 }
64 64
65 65
66 66 def add_route_with_slash(config, name, pattern, **kw):
67 67 config.add_route(name, pattern, **kw)
68 68 if not pattern.endswith("/"):
69 69 config.add_route(name + "_slash", pattern + "/", **kw)
70 70
71 71
72 72 def add_route_requirements(route_path, requirements=None):
73 73 """
74 74 Adds regex requirements to pyramid routes using a mapping dict
75 75 e.g::
76 76 add_route_requirements('{repo_name}/settings')
77 77 """
78 78 requirements = requirements or URL_NAME_REQUIREMENTS
79 79 for key, regex in list(requirements.items()):
80 80 route_path = route_path.replace("{%s}" % key, "{%s:%s}" % (key, regex))
81 81 return route_path
82 82
83 83
84 84 def get_format_ref_id(repo):
85 85 """Returns a `repo` specific reference formatter function"""
86 86 if h.is_svn(repo):
87 87 return _format_ref_id_svn
88 88 else:
89 89 return _format_ref_id
90 90
91 91
92 92 def _format_ref_id(name, raw_id):
93 93 """Default formatting of a given reference `name`"""
94 94 return name
95 95
96 96
97 97 def _format_ref_id_svn(name, raw_id):
98 98 """Special way of formatting a reference for Subversion including path"""
99 99 return f"{name}@{raw_id}"
100 100
101 101
102 102 class TemplateArgs(StrictAttributeDict):
103 103 pass
104 104
105 105
106 106 class BaseAppView(object):
107 107 DONT_CHECKOUT_VIEWS = ["channelstream_connect", "ops_ping"]
108 108 EXTRA_VIEWS_TO_IGNORE = ['login', 'register', 'logout']
109 109 SETUP_2FA_VIEW = 'setup_2fa'
110 110 VERIFY_2FA_VIEW = 'check_2fa'
111 111
112 112 def __init__(self, context, request):
113 113 self.request = request
114 114 self.context = context
115 115 self.session = request.session
116 116 if not hasattr(request, "user"):
117 117 # NOTE(marcink): edge case, we ended up in matched route
118 118 # but probably of web-app context, e.g API CALL/VCS CALL
119 119 if hasattr(request, "vcs_call") or hasattr(request, "rpc_method"):
120 120 log.warning("Unable to process request `%s` in this scope", request)
121 121 raise HTTPBadRequest()
122 122
123 123 self._rhodecode_user = request.user # auth user
124 124 self._rhodecode_db_user = self._rhodecode_user.get_instance()
125 125 self.user_data = self._rhodecode_db_user.user_data if self._rhodecode_db_user else {}
126 126 self._maybe_needs_password_change(
127 127 request.matched_route.name, self._rhodecode_db_user
128 128 )
129 129 self._maybe_needs_2fa_configuration(
130 130 request.matched_route.name, self._rhodecode_db_user
131 131 )
132 132 self._maybe_needs_2fa_check(
133 133 request.matched_route.name, self._rhodecode_db_user
134 134 )
135 135
136 136 def _maybe_needs_password_change(self, view_name, user_obj):
137 137 if view_name in self.DONT_CHECKOUT_VIEWS:
138 138 return
139 139
140 140 log.debug(
141 141 "Checking if user %s needs password change on view %s", user_obj, view_name
142 142 )
143 143
144 144 skip_user_views = [
145 145 "logout",
146 146 "login",
147 147 "check_2fa",
148 148 "my_account_password",
149 149 "my_account_password_update",
150 150 ]
151 151
152 152 if not user_obj:
153 153 return
154 154
155 155 if user_obj.username == User.DEFAULT_USER:
156 156 return
157 157
158 158 now = time.time()
159 159 should_change = self.user_data.get("force_password_change")
160 160 change_after = safe_int(should_change) or 0
161 161 if should_change and now > change_after:
162 162 log.debug("User %s requires password change", user_obj)
163 163 h.flash(
164 164 "You are required to change your password",
165 165 "warning",
166 166 ignore_duplicate=True,
167 167 )
168 168
169 169 if view_name not in skip_user_views:
170 170 raise HTTPFound(self.request.route_path("my_account_password"))
171 171
172 172 def _maybe_needs_2fa_configuration(self, view_name, user_obj):
173 173 if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE:
174 174 return
175 175
176 176 if not user_obj:
177 177 return
178 178
179 179 if user_obj.needs_2fa_configure and view_name != self.SETUP_2FA_VIEW:
180 180 h.flash(
181 181 "You are required to configure 2FA",
182 182 "warning",
183 183 ignore_duplicate=False,
184 184 )
185 185 # Special case for users created "on the fly" (ldap case for new user)
186 186 user_obj.check_2fa_required = False
187 187 raise HTTPFound(self.request.route_path(self.SETUP_2FA_VIEW))
188 188
189 189 def _maybe_needs_2fa_check(self, view_name, user_obj):
190 190 if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE:
191 191 return
192 192
193 193 if not user_obj:
194 194 return
195 195
196 196 if user_obj.check_2fa_required and view_name != self.VERIFY_2FA_VIEW:
197 197 raise HTTPFound(self.request.route_path(self.VERIFY_2FA_VIEW))
198 198
199 199 def _log_creation_exception(self, e, repo_name):
200 200 _ = self.request.translate
201 201 reason = None
202 202 if len(e.args) == 2:
203 203 reason = e.args[1]
204 204
205 205 if reason == "INVALID_CERTIFICATE":
206 206 log.exception("Exception creating a repository: invalid certificate")
207 207 msg = _("Error creating repository %s: invalid certificate") % repo_name
208 208 else:
209 209 log.exception("Exception creating a repository")
210 210 msg = _("Error creating repository %s") % repo_name
211 211 return msg
212 212
213 213 def _get_local_tmpl_context(self, include_app_defaults=True):
214 214 c = TemplateArgs()
215 215 c.auth_user = self.request.user
216 216 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
217 217 c.rhodecode_user = self.request.user
218 218
219 219 if include_app_defaults:
220 220 from rhodecode.lib.base import attach_context_attributes
221 221
222 222 attach_context_attributes(c, self.request, self.request.user.user_id)
223 223
224 224 c.is_super_admin = c.auth_user.is_admin
225 225
226 226 c.can_create_repo = c.is_super_admin
227 227 c.can_create_repo_group = c.is_super_admin
228 228 c.can_create_user_group = c.is_super_admin
229 229
230 230 c.is_delegated_admin = False
231 231
232 232 if not c.auth_user.is_default and not c.is_super_admin:
233 233 c.can_create_repo = h.HasPermissionAny("hg.create.repository")(
234 234 user=self.request.user
235 235 )
236 236 repositories = c.auth_user.repositories_admin or c.can_create_repo
237 237
238 238 c.can_create_repo_group = h.HasPermissionAny("hg.repogroup.create.true")(
239 239 user=self.request.user
240 240 )
241 241 repository_groups = (
242 242 c.auth_user.repository_groups_admin or c.can_create_repo_group
243 243 )
244 244
245 245 c.can_create_user_group = h.HasPermissionAny("hg.usergroup.create.true")(
246 246 user=self.request.user
247 247 )
248 248 user_groups = c.auth_user.user_groups_admin or c.can_create_user_group
249 249 # delegated admin can create, or manage some objects
250 250 c.is_delegated_admin = repositories or repository_groups or user_groups
251 251 return c
252 252
253 253 def _get_template_context(self, tmpl_args, **kwargs):
254 254 local_tmpl_args = {"defaults": {}, "errors": {}, "c": tmpl_args}
255 255 local_tmpl_args.update(kwargs)
256 256 return local_tmpl_args
257 257
258 258 def load_default_context(self):
259 259 """
260 260 example:
261 261
262 262 def load_default_context(self):
263 263 c = self._get_local_tmpl_context()
264 264 c.custom_var = 'foobar'
265 265
266 266 return c
267 267 """
268 268 raise NotImplementedError("Needs implementation in view class")
269 269
270 270
271 271 class RepoAppView(BaseAppView):
272 272 def __init__(self, context, request):
273 273 super().__init__(context, request)
274 274 self.db_repo = request.db_repo
275 275 self.db_repo_name = self.db_repo.repo_name
276 276 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
277 277 self.db_repo_artifacts = ScmModel().get_artifacts(self.db_repo)
278 278 self.db_repo_patterns = IssueTrackerSettingsModel(repo=self.db_repo)
279 279
280 280 def _handle_missing_requirements(self, error):
281 281 log.error(
282 282 "Requirements are missing for repository %s: %s",
283 283 self.db_repo_name,
284 284 safe_str(error),
285 285 )
286 286
287 287 def _prepare_and_set_clone_url(self, c):
288 288 username = ""
289 289 if self._rhodecode_user.username != User.DEFAULT_USER:
290 290 username = self._rhodecode_user.username
291 291
292 292 _def_clone_uri = c.clone_uri_tmpl
293 293 _def_clone_uri_id = c.clone_uri_id_tmpl
294 294 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
295 295
296 296 c.clone_repo_url = self.db_repo.clone_url(
297 297 user=username, uri_tmpl=_def_clone_uri
298 298 )
299 299 c.clone_repo_url_id = self.db_repo.clone_url(
300 300 user=username, uri_tmpl=_def_clone_uri_id
301 301 )
302 302 c.clone_repo_url_ssh = self.db_repo.clone_url(
303 303 uri_tmpl=_def_clone_uri_ssh, ssh=True
304 304 )
305 305
306 306 def _get_local_tmpl_context(self, include_app_defaults=True):
307 307 _ = self.request.translate
308 308 c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults)
309 309
310 310 # register common vars for this type of view
311 311 c.rhodecode_db_repo = self.db_repo
312 312 c.repo_name = self.db_repo_name
313 313 c.repository_pull_requests = self.db_repo_pull_requests
314 314 c.repository_artifacts = self.db_repo_artifacts
315 315 c.repository_is_user_following = ScmModel().is_following_repo(
316 316 self.db_repo_name, self._rhodecode_user.user_id
317 317 )
318 318 self.path_filter = PathFilter(None)
319 319
320 320 c.repository_requirements_missing = {}
321 321 try:
322 322 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
323 323 # NOTE(marcink):
324 324 # comparison to None since if it's an object __bool__ is expensive to
325 325 # calculate
326 326 if self.rhodecode_vcs_repo is not None:
327 327 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
328 328 c.auth_user.username
329 329 )
330 330 self.path_filter = PathFilter(path_perms)
331 331 except RepositoryRequirementError as e:
332 332 c.repository_requirements_missing = {"error": str(e)}
333 333 self._handle_missing_requirements(e)
334 334 self.rhodecode_vcs_repo = None
335 335
336 336 c.path_filter = self.path_filter # used by atom_feed_entry.mako
337 337
338 338 if self.rhodecode_vcs_repo is None:
339 339 # unable to fetch this repo as vcs instance, report back to user
340 340 log.debug(
341 341 "Repository was not found on filesystem, check if it exists or is not damaged"
342 342 )
343 343 h.flash(
344 344 _(
345 345 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
346 346 "Please check if it exist, or is not damaged."
347 347 )
348 348 % {"repo_name": c.repo_name},
349 349 category="error",
350 350 ignore_duplicate=True,
351 351 )
352 352 if c.repository_requirements_missing:
353 353 route = self.request.matched_route.name
354 354 if route.startswith(("edit_repo", "repo_summary")):
355 355 # allow summary and edit repo on missing requirements
356 356 return c
357 357
358 358 raise HTTPFound(
359 359 h.route_path("repo_summary", repo_name=self.db_repo_name)
360 360 )
361 361
362 362 else: # redirect if we don't show missing requirements
363 363 raise HTTPFound(h.route_path("home"))
364 364
365 365 c.has_origin_repo_read_perm = False
366 366 if self.db_repo.fork:
367 367 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
368 368 "repository.write", "repository.read", "repository.admin"
369 369 )(self.db_repo.fork.repo_name, "summary fork link")
370 370
371 371 return c
372 372
373 373 def _get_f_path_unchecked(self, matchdict, default=None):
374 374 """
375 375 Should only be used by redirects, everything else should call _get_f_path
376 376 """
377 377 f_path = matchdict.get("f_path")
378 378 if f_path:
379 379 # fix for multiple initial slashes that causes errors for GIT
380 380 return f_path.lstrip("/")
381 381
382 382 return default
383 383
384 384 def _get_f_path(self, matchdict, default=None):
385 385 f_path_match = self._get_f_path_unchecked(matchdict, default)
386 386 return self.path_filter.assert_path_permissions(f_path_match)
387 387
388 388 def _get_general_setting(self, target_repo, settings_key, default=False):
389 389 settings_model = VcsSettingsModel(repo=target_repo)
390 390 settings = settings_model.get_general_settings()
391 391 return settings.get(settings_key, default)
392 392
393 393 def _get_repo_setting(self, target_repo, settings_key, default=False):
394 394 settings_model = VcsSettingsModel(repo=target_repo)
395 395 settings = settings_model.get_repo_settings_inherited()
396 396 return settings.get(settings_key, default)
397 397
398 398 def _get_readme_data(self, db_repo, renderer_type, commit_id=None, path="/"):
399 399 log.debug("Looking for README file at path %s", path)
400 400 if commit_id:
401 401 landing_commit_id = commit_id
402 402 else:
403 403 landing_commit = db_repo.get_landing_commit()
404 404 if isinstance(landing_commit, EmptyCommit):
405 405 return None, None
406 406 landing_commit_id = landing_commit.raw_id
407 407
408 408 cache_namespace_uid = f"repo.{db_repo.repo_id}"
409 409 region = rc_cache.get_or_create_region(
410 410 "cache_repo", cache_namespace_uid, use_async_runner=False
411 411 )
412 412 start = time.time()
413 413
414 414 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
415 415 def generate_repo_readme(
416 416 repo_id, _commit_id, _repo_name, _readme_search_path, _renderer_type
417 417 ):
418 418 readme_data = None
419 419 readme_filename = None
420 420
421 421 commit = db_repo.get_commit(_commit_id)
422 422 log.debug("Searching for a README file at commit %s.", _commit_id)
423 423 readme_node = ReadmeFinder(_renderer_type).search(
424 424 commit, path=_readme_search_path
425 425 )
426 426
427 427 if readme_node:
428 428 log.debug("Found README node: %s", readme_node)
429 429
430 430 relative_urls = {
431 431 "raw": h.route_path(
432 432 "repo_file_raw",
433 433 repo_name=_repo_name,
434 434 commit_id=commit.raw_id,
435 435 f_path=readme_node.path,
436 436 ),
437 437 "standard": h.route_path(
438 438 "repo_files",
439 439 repo_name=_repo_name,
440 440 commit_id=commit.raw_id,
441 441 f_path=readme_node.path,
442 442 ),
443 443 }
444 444
445 445 readme_data = self._render_readme_or_none(
446 446 commit, readme_node, relative_urls
447 447 )
448 448 readme_filename = readme_node.str_path
449 449
450 450 return readme_data, readme_filename
451 451
452 452 readme_data, readme_filename = generate_repo_readme(
453 453 db_repo.repo_id,
454 454 landing_commit_id,
455 455 db_repo.repo_name,
456 456 path,
457 457 renderer_type,
458 458 )
459 459
460 460 compute_time = time.time() - start
461 461 log.debug(
462 462 "Repo README for path %s generated and computed in %.4fs",
463 463 path,
464 464 compute_time,
465 465 )
466 466 return readme_data, readme_filename
467 467
468 468 def _render_readme_or_none(self, commit, readme_node, relative_urls):
469 469 log.debug("Found README file `%s` rendering...", readme_node.path)
470 470 renderer = MarkupRenderer()
471 471 try:
472 472 html_source = renderer.render(
473 473 readme_node.str_content, filename=readme_node.path
474 474 )
475 475 if relative_urls:
476 476 return relative_links(html_source, relative_urls)
477 477 return html_source
478 478 except Exception:
479 479 log.exception("Exception while trying to render the README")
480 480
481 481 def get_recache_flag(self):
482 482 for flag_name in ["force_recache", "force-recache", "no-cache"]:
483 483 flag_val = self.request.GET.get(flag_name)
484 484 if str2bool(flag_val):
485 485 return True
486 486 return False
487 487
488 488 def get_commit_preload_attrs(cls):
489 489 pre_load = [
490 490 "author",
491 491 "branch",
492 492 "date",
493 493 "message",
494 494 "parents",
495 495 "obsolete",
496 496 "phase",
497 497 "hidden",
498 498 ]
499 499 return pre_load
500 500
501 501
502 502 class PathFilter(object):
503 503 # Expects and instance of BasePathPermissionChecker or None
504 504 def __init__(self, permission_checker):
505 505 self.permission_checker = permission_checker
506 506
507 507 def assert_path_permissions(self, path):
508 508 if self.path_access_allowed(path):
509 509 return path
510 510 raise HTTPForbidden()
511 511
512 512 def path_access_allowed(self, path):
513 513 log.debug("Checking ACL permissions for PathFilter for `%s`", path)
514 514 if self.permission_checker:
515 515 has_access = path and self.permission_checker.has_access(path)
516 516 log.debug(
517 517 "ACL Permissions checker enabled, ACL Check has_access: %s", has_access
518 518 )
519 519 return has_access
520 520
521 521 log.debug("ACL permissions checker not enabled, skipping...")
522 522 return True
523 523
524 524 def filter_patchset(self, patchset):
525 525 if not self.permission_checker or not patchset:
526 526 return patchset, False
527 527 had_filtered = False
528 528 filtered_patchset = []
529 529 for patch in patchset:
530 530 filename = patch.get("filename", None)
531 531 if not filename or self.permission_checker.has_access(filename):
532 532 filtered_patchset.append(patch)
533 533 else:
534 534 had_filtered = True
535 535 if had_filtered:
536 536 if isinstance(patchset, diffs.LimitedDiffContainer):
537 537 filtered_patchset = diffs.LimitedDiffContainer(
538 538 patchset.diff_limit, patchset.cur_diff_size, filtered_patchset
539 539 )
540 540 return filtered_patchset, True
541 541 else:
542 542 return patchset, False
543 543
544 544 def render_patchset_filtered(
545 545 self, diffset, patchset, source_ref=None, target_ref=None
546 546 ):
547 547 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
548 548 result = diffset.render_patchset(
549 549 filtered_patchset, source_ref=source_ref, target_ref=target_ref
550 550 )
551 551 result.has_hidden_changes = has_hidden_changes
552 552 return result
553 553
554 554 def get_raw_patch(self, diff_processor):
555 555 if self.permission_checker is None:
556 556 return diff_processor.as_raw()
557 557 elif self.permission_checker.has_full_access:
558 558 return diff_processor.as_raw()
559 559 else:
560 560 return "# Repository has user-specific filters, raw patch generation is disabled."
561 561
562 562 @property
563 563 def is_enabled(self):
564 564 return self.permission_checker is not None
565 565
566 566
567 567 class RepoGroupAppView(BaseAppView):
568 568 def __init__(self, context, request):
569 569 super().__init__(context, request)
570 570 self.db_repo_group = request.db_repo_group
571 571 self.db_repo_group_name = self.db_repo_group.group_name
572 572
573 573 def _get_local_tmpl_context(self, include_app_defaults=True):
574 574 _ = self.request.translate
575 575 c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults)
576 576 c.repo_group = self.db_repo_group
577 577 return c
578 578
579 579 def _revoke_perms_on_yourself(self, form_result):
580 580 _updates = [
581 581 u
582 582 for u in form_result["perm_updates"]
583 583 if self._rhodecode_user.user_id == int(u[0])
584 584 ]
585 585 _additions = [
586 586 u
587 587 for u in form_result["perm_additions"]
588 588 if self._rhodecode_user.user_id == int(u[0])
589 589 ]
590 590 _deletions = [
591 591 u
592 592 for u in form_result["perm_deletions"]
593 593 if self._rhodecode_user.user_id == int(u[0])
594 594 ]
595 595 admin_perm = "group.admin"
596 596 if (
597 597 _updates
598 598 and _updates[0][1] != admin_perm
599 599 or _additions
600 600 and _additions[0][1] != admin_perm
601 601 or _deletions
602 602 and _deletions[0][1] != admin_perm
603 603 ):
604 604 return True
605 605 return False
606 606
607 607
608 608 class UserGroupAppView(BaseAppView):
609 609 def __init__(self, context, request):
610 610 super().__init__(context, request)
611 611 self.db_user_group = request.db_user_group
612 612 self.db_user_group_name = self.db_user_group.users_group_name
613 613
614 614
615 615 class UserAppView(BaseAppView):
616 616 def __init__(self, context, request):
617 617 super().__init__(context, request)
618 618 self.db_user = request.db_user
619 619 self.db_user_id = self.db_user.user_id
620 620
621 621 _ = self.request.translate
622 622 if not request.db_user_supports_default:
623 623 if self.db_user.username == User.DEFAULT_USER:
624 624 h.flash(
625 625 _("Editing user `{}` is disabled.".format(User.DEFAULT_USER)),
626 626 category="warning",
627 627 )
628 628 raise HTTPFound(h.route_path("users"))
629 629
630 630
631 631 class DataGridAppView(object):
632 632 """
633 633 Common class to have re-usable grid rendering components
634 634 """
635 635
636 636 def _extract_ordering(self, request, column_map=None):
637 637 column_map = column_map or {}
638 638 column_index = safe_int(request.GET.get("order[0][column]"))
639 639 order_dir = request.GET.get("order[0][dir]", "desc")
640 640 order_by = request.GET.get("columns[%s][data][sort]" % column_index, "name_raw")
641 641
642 642 # translate datatable to DB columns
643 643 order_by = column_map.get(order_by) or order_by
644 644
645 645 search_q = request.GET.get("search[value]")
646 646 return search_q, order_by, order_dir
647 647
648 648 def _extract_chunk(self, request):
649 649 start = safe_int(request.GET.get("start"), 0)
650 650 length = safe_int(request.GET.get("length"), 25)
651 651 draw = safe_int(request.GET.get("draw"))
652 652 return draw, start, length
653 653
654 654 def _get_order_col(self, order_by, model):
655 655 if isinstance(order_by, str):
656 656 try:
657 657 return operator.attrgetter(order_by)(model)
658 658 except AttributeError:
659 659 return None
660 660 else:
661 661 return order_by
662 662
663 663
664 664 class BaseReferencesView(RepoAppView):
665 665 """
666 666 Base for reference view for branches, tags and bookmarks.
667 667 """
668 668
669 669 def load_default_context(self):
670 670 c = self._get_local_tmpl_context()
671 671 return c
672 672
673 673 def load_refs_context(self, ref_items, partials_template):
674 674 _render = self.request.get_partial_renderer(partials_template)
675 675 pre_load = ["author", "date", "message", "parents"]
676 676
677 677 is_svn = h.is_svn(self.rhodecode_vcs_repo)
678 678 is_hg = h.is_hg(self.rhodecode_vcs_repo)
679 679
680 680 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
681 681
682 682 closed_refs = {}
683 683 if is_hg:
684 684 closed_refs = self.rhodecode_vcs_repo.branches_closed
685 685
686 686 data = []
687 687 for ref_name, commit_id in ref_items:
688 688 commit = self.rhodecode_vcs_repo.get_commit(
689 689 commit_id=commit_id, pre_load=pre_load
690 690 )
691 691 closed = ref_name in closed_refs
692 692
693 693 # TODO: johbo: Unify generation of reference links
694 694 use_commit_id = "/" in ref_name or is_svn
695 695
696 696 if use_commit_id:
697 697 files_url = h.route_path(
698 698 "repo_files",
699 699 repo_name=self.db_repo_name,
700 700 f_path=ref_name if is_svn else "",
701 701 commit_id=commit_id,
702 702 _query=dict(at=ref_name),
703 703 )
704 704
705 705 else:
706 706 files_url = h.route_path(
707 707 "repo_files",
708 708 repo_name=self.db_repo_name,
709 709 f_path=ref_name if is_svn else "",
710 710 commit_id=ref_name,
711 711 _query=dict(at=ref_name),
712 712 )
713 713
714 714 data.append(
715 715 {
716 716 "name": _render("name", ref_name, files_url, closed),
717 717 "name_raw": ref_name,
718 "closed": closed,
718 719 "date": _render("date", commit.date),
719 720 "date_raw": datetime_to_time(commit.date),
720 721 "author": _render("author", commit.author),
721 722 "commit": _render(
722 723 "commit", commit.message, commit.raw_id, commit.idx
723 724 ),
724 725 "commit_raw": commit.idx,
725 726 "compare": _render(
726 727 "compare", format_ref_id(ref_name, commit.raw_id)
727 728 ),
728 729 }
729 730 )
730 731
731 732 return data
732 733
733 734
734 735 class RepoRoutePredicate(object):
735 736 def __init__(self, val, config):
736 737 self.val = val
737 738
738 739 def text(self):
739 740 return f"repo_route = {self.val}"
740 741
741 742 phash = text
742 743
743 744 def __call__(self, info, request):
744 745 if hasattr(request, "vcs_call"):
745 746 # skip vcs calls
746 747 return
747 748
748 749 repo_name = info["match"]["repo_name"]
749 750
750 751 repo_name_parts = repo_name.split("/")
751 752 repo_slugs = [x for x in (repo_name_slug(x) for x in repo_name_parts)]
752 753
753 754 if repo_name_parts != repo_slugs:
754 755 # short-skip if the repo-name doesn't follow slug rule
755 756 log.warning(
756 757 "repo_name: %s is different than slug %s", repo_name_parts, repo_slugs
757 758 )
758 759 return False
759 760
760 761 repo_model = repo.RepoModel()
761 762
762 763 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
763 764
764 765 def redirect_if_creating(route_info, db_repo):
765 766 skip_views = ["edit_repo_advanced_delete"]
766 767 route = route_info["route"]
767 768 # we should skip delete view so we can actually "remove" repositories
768 769 # if they get stuck in creating state.
769 770 if route.name in skip_views:
770 771 return
771 772
772 773 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
773 774 repo_creating_url = request.route_path(
774 775 "repo_creating", repo_name=db_repo.repo_name
775 776 )
776 777 raise HTTPFound(repo_creating_url)
777 778
778 779 if by_name_match:
779 780 # register this as request object we can re-use later
780 781 request.db_repo = by_name_match
781 782 request.db_repo_name = request.db_repo.repo_name
782 783
783 784 redirect_if_creating(info, by_name_match)
784 785 return True
785 786
786 787 by_id_match = repo_model.get_repo_by_id(repo_name)
787 788 if by_id_match:
788 789 request.db_repo = by_id_match
789 790 request.db_repo_name = request.db_repo.repo_name
790 791 redirect_if_creating(info, by_id_match)
791 792 return True
792 793
793 794 return False
794 795
795 796
796 797 class RepoForbidArchivedRoutePredicate(object):
797 798 def __init__(self, val, config):
798 799 self.val = val
799 800
800 801 def text(self):
801 802 return f"repo_forbid_archived = {self.val}"
802 803
803 804 phash = text
804 805
805 806 def __call__(self, info, request):
806 807 _ = request.translate
807 808 rhodecode_db_repo = request.db_repo
808 809
809 810 log.debug(
810 811 "%s checking if archived flag for repo for %s",
811 812 self.__class__.__name__,
812 813 rhodecode_db_repo.repo_name,
813 814 )
814 815
815 816 if rhodecode_db_repo.archived:
816 817 log.warning(
817 818 "Current view is not supported for archived repo:%s",
818 819 rhodecode_db_repo.repo_name,
819 820 )
820 821
821 822 h.flash(
822 823 h.literal(_("Action not supported for archived repository.")),
823 824 category="warning",
824 825 )
825 826 summary_url = request.route_path(
826 827 "repo_summary", repo_name=rhodecode_db_repo.repo_name
827 828 )
828 829 raise HTTPFound(summary_url)
829 830 return True
830 831
831 832
832 833 class RepoTypeRoutePredicate(object):
833 834 def __init__(self, val, config):
834 835 self.val = val or ["hg", "git", "svn"]
835 836
836 837 def text(self):
837 838 return f"repo_accepted_type = {self.val}"
838 839
839 840 phash = text
840 841
841 842 def __call__(self, info, request):
842 843 if hasattr(request, "vcs_call"):
843 844 # skip vcs calls
844 845 return
845 846
846 847 rhodecode_db_repo = request.db_repo
847 848
848 849 log.debug(
849 850 "%s checking repo type for %s in %s",
850 851 self.__class__.__name__,
851 852 rhodecode_db_repo.repo_type,
852 853 self.val,
853 854 )
854 855
855 856 if rhodecode_db_repo.repo_type in self.val:
856 857 return True
857 858 else:
858 859 log.warning(
859 860 "Current view is not supported for repo type:%s",
860 861 rhodecode_db_repo.repo_type,
861 862 )
862 863 return False
863 864
864 865
865 866 class RepoGroupRoutePredicate(object):
866 867 def __init__(self, val, config):
867 868 self.val = val
868 869
869 870 def text(self):
870 871 return f"repo_group_route = {self.val}"
871 872
872 873 phash = text
873 874
874 875 def __call__(self, info, request):
875 876 if hasattr(request, "vcs_call"):
876 877 # skip vcs calls
877 878 return
878 879
879 880 repo_group_name = info["match"]["repo_group_name"]
880 881
881 882 repo_group_name_parts = repo_group_name.split("/")
882 883 repo_group_slugs = [
883 884 x for x in [repo_name_slug(x) for x in repo_group_name_parts]
884 885 ]
885 886 if repo_group_name_parts != repo_group_slugs:
886 887 # short-skip if the repo-name doesn't follow slug rule
887 888 log.warning(
888 889 "repo_group_name: %s is different than slug %s",
889 890 repo_group_name_parts,
890 891 repo_group_slugs,
891 892 )
892 893 return False
893 894
894 895 repo_group_model = repo_group.RepoGroupModel()
895 896 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
896 897
897 898 if by_name_match:
898 899 # register this as request object we can re-use later
899 900 request.db_repo_group = by_name_match
900 901 request.db_repo_group_name = request.db_repo_group.group_name
901 902 return True
902 903
903 904 return False
904 905
905 906
906 907 class UserGroupRoutePredicate(object):
907 908 def __init__(self, val, config):
908 909 self.val = val
909 910
910 911 def text(self):
911 912 return f"user_group_route = {self.val}"
912 913
913 914 phash = text
914 915
915 916 def __call__(self, info, request):
916 917 if hasattr(request, "vcs_call"):
917 918 # skip vcs calls
918 919 return
919 920
920 921 user_group_id = info["match"]["user_group_id"]
921 922 user_group_model = user_group.UserGroup()
922 923 by_id_match = user_group_model.get(user_group_id, cache=False)
923 924
924 925 if by_id_match:
925 926 # register this as request object we can re-use later
926 927 request.db_user_group = by_id_match
927 928 return True
928 929
929 930 return False
930 931
931 932
932 933 class UserRoutePredicateBase(object):
933 934 supports_default = None
934 935
935 936 def __init__(self, val, config):
936 937 self.val = val
937 938
938 939 def text(self):
939 940 raise NotImplementedError()
940 941
941 942 def __call__(self, info, request):
942 943 if hasattr(request, "vcs_call"):
943 944 # skip vcs calls
944 945 return
945 946
946 947 user_id = info["match"]["user_id"]
947 948 user_model = user.User()
948 949 by_id_match = user_model.get(user_id, cache=False)
949 950
950 951 if by_id_match:
951 952 # register this as request object we can re-use later
952 953 request.db_user = by_id_match
953 954 request.db_user_supports_default = self.supports_default
954 955 return True
955 956
956 957 return False
957 958
958 959
959 960 class UserRoutePredicate(UserRoutePredicateBase):
960 961 supports_default = False
961 962
962 963 def text(self):
963 964 return f"user_route = {self.val}"
964 965
965 966 phash = text
966 967
967 968
968 969 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
969 970 supports_default = True
970 971
971 972 def text(self):
972 973 return f"user_with_default_route = {self.val}"
973 974
974 975 phash = text
975 976
976 977
977 978 def includeme(config):
978 979 config.add_route_predicate("repo_route", RepoRoutePredicate)
979 980 config.add_route_predicate("repo_accepted_types", RepoTypeRoutePredicate)
980 981 config.add_route_predicate(
981 982 "repo_forbid_when_archived", RepoForbidArchivedRoutePredicate
982 983 )
983 984 config.add_route_predicate("repo_group_route", RepoGroupRoutePredicate)
984 985 config.add_route_predicate("user_group_route", UserGroupRoutePredicate)
985 986 config.add_route_predicate("user_route_with_default", UserRouteWithDefaultPredicate)
986 987 config.add_route_predicate("user_route", UserRoutePredicate)
@@ -1,1235 +1,1244 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18 from rhodecode.apps._base import add_route_with_slash
19 19
20 20
21 21 def includeme(config):
22 22 from rhodecode.apps.repository.views.repo_artifacts import RepoArtifactsView
23 23 from rhodecode.apps.repository.views.repo_audit_logs import AuditLogsView
24 24 from rhodecode.apps.repository.views.repo_automation import RepoAutomationView
25 25 from rhodecode.apps.repository.views.repo_bookmarks import RepoBookmarksView
26 26 from rhodecode.apps.repository.views.repo_branch_permissions import RepoSettingsBranchPermissionsView
27 27 from rhodecode.apps.repository.views.repo_branches import RepoBranchesView
28 28 from rhodecode.apps.repository.views.repo_caches import RepoCachesView
29 29 from rhodecode.apps.repository.views.repo_changelog import RepoChangelogView
30 30 from rhodecode.apps.repository.views.repo_checks import RepoChecksView
31 31 from rhodecode.apps.repository.views.repo_commits import RepoCommitsView
32 32 from rhodecode.apps.repository.views.repo_compare import RepoCompareView
33 33 from rhodecode.apps.repository.views.repo_feed import RepoFeedView
34 34 from rhodecode.apps.repository.views.repo_files import RepoFilesView
35 35 from rhodecode.apps.repository.views.repo_forks import RepoForksView
36 36 from rhodecode.apps.repository.views.repo_maintainance import RepoMaintenanceView
37 37 from rhodecode.apps.repository.views.repo_permissions import RepoSettingsPermissionsView
38 38 from rhodecode.apps.repository.views.repo_pull_requests import RepoPullRequestsView
39 39 from rhodecode.apps.repository.views.repo_review_rules import RepoReviewRulesView
40 40 from rhodecode.apps.repository.views.repo_settings import RepoSettingsView
41 41 from rhodecode.apps.repository.views.repo_settings_advanced import RepoSettingsAdvancedView
42 42 from rhodecode.apps.repository.views.repo_settings_fields import RepoSettingsFieldsView
43 43 from rhodecode.apps.repository.views.repo_settings_issue_trackers import RepoSettingsIssueTrackersView
44 44 from rhodecode.apps.repository.views.repo_settings_remote import RepoSettingsRemoteView
45 45 from rhodecode.apps.repository.views.repo_settings_vcs import RepoSettingsVcsView
46 46 from rhodecode.apps.repository.views.repo_strip import RepoStripView
47 47 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
48 48 from rhodecode.apps.repository.views.repo_tags import RepoTagsView
49 49
50 50 # repo creating checks, special cases that aren't repo routes
51 51 config.add_route(
52 52 name='repo_creating',
53 53 pattern='/{repo_name:.*?[^/]}/repo_creating')
54 54 config.add_view(
55 55 RepoChecksView,
56 56 attr='repo_creating',
57 57 route_name='repo_creating', request_method='GET',
58 58 renderer='rhodecode:templates/admin/repos/repo_creating.mako')
59 59
60 60 config.add_route(
61 61 name='repo_creating_check',
62 62 pattern='/{repo_name:.*?[^/]}/repo_creating_check')
63 63 config.add_view(
64 64 RepoChecksView,
65 65 attr='repo_creating_check',
66 66 route_name='repo_creating_check', request_method='GET',
67 67 renderer='json_ext')
68 68
69 69 # Summary
70 70 # NOTE(marcink): one additional route is defined in very bottom, catch
71 71 # all pattern
72 72 config.add_route(
73 73 name='repo_summary_explicit',
74 74 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
75 75 config.add_view(
76 76 RepoSummaryView,
77 77 attr='summary',
78 78 route_name='repo_summary_explicit', request_method='GET',
79 79 renderer='rhodecode:templates/summary/summary.mako')
80 80
81 81 config.add_route(
82 82 name='repo_summary_commits',
83 83 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
84 84 config.add_view(
85 85 RepoSummaryView,
86 86 attr='summary_commits',
87 87 route_name='repo_summary_commits', request_method='GET',
88 88 renderer='rhodecode:templates/summary/summary_commits.mako')
89 89
90 90 # Commits
91 91 config.add_route(
92 92 name='repo_commit',
93 93 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True)
94 94 config.add_view(
95 95 RepoCommitsView,
96 96 attr='repo_commit_show',
97 97 route_name='repo_commit', request_method='GET',
98 98 renderer=None)
99 99
100 100 config.add_route(
101 101 name='repo_commit_children',
102 102 pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True)
103 103 config.add_view(
104 104 RepoCommitsView,
105 105 attr='repo_commit_children',
106 106 route_name='repo_commit_children', request_method='GET',
107 107 renderer='json_ext', xhr=True)
108 108
109 109 config.add_route(
110 110 name='repo_commit_parents',
111 111 pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True)
112 112 config.add_view(
113 113 RepoCommitsView,
114 114 attr='repo_commit_parents',
115 115 route_name='repo_commit_parents', request_method='GET',
116 116 renderer='json_ext')
117 117
118 118 config.add_route(
119 119 name='repo_commit_raw',
120 120 pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True)
121 121 config.add_view(
122 122 RepoCommitsView,
123 123 attr='repo_commit_raw',
124 124 route_name='repo_commit_raw', request_method='GET',
125 125 renderer=None)
126 126
127 127 config.add_route(
128 128 name='repo_commit_patch',
129 129 pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True)
130 130 config.add_view(
131 131 RepoCommitsView,
132 132 attr='repo_commit_patch',
133 133 route_name='repo_commit_patch', request_method='GET',
134 134 renderer=None)
135 135
136 136 config.add_route(
137 137 name='repo_commit_download',
138 138 pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True)
139 139 config.add_view(
140 140 RepoCommitsView,
141 141 attr='repo_commit_download',
142 142 route_name='repo_commit_download', request_method='GET',
143 143 renderer=None)
144 144
145 145 config.add_route(
146 146 name='repo_commit_data',
147 147 pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True)
148 148 config.add_view(
149 149 RepoCommitsView,
150 150 attr='repo_commit_data',
151 151 route_name='repo_commit_data', request_method='GET',
152 152 renderer='json_ext', xhr=True)
153 153
154 154 config.add_route(
155 155 name='repo_commit_comment_create',
156 156 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True)
157 157 config.add_view(
158 158 RepoCommitsView,
159 159 attr='repo_commit_comment_create',
160 160 route_name='repo_commit_comment_create', request_method='POST',
161 161 renderer='json_ext')
162 162
163 163 config.add_route(
164 164 name='repo_commit_comment_preview',
165 165 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True)
166 166 config.add_view(
167 167 RepoCommitsView,
168 168 attr='repo_commit_comment_preview',
169 169 route_name='repo_commit_comment_preview', request_method='POST',
170 170 renderer='string', xhr=True)
171 171
172 172 config.add_route(
173 173 name='repo_commit_comment_history_view',
174 174 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/history_view/{comment_history_id}', repo_route=True)
175 175 config.add_view(
176 176 RepoCommitsView,
177 177 attr='repo_commit_comment_history_view',
178 178 route_name='repo_commit_comment_history_view', request_method='POST',
179 179 renderer='string', xhr=True)
180 180
181 181 config.add_route(
182 182 name='repo_commit_comment_attachment_upload',
183 183 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True)
184 184 config.add_view(
185 185 RepoCommitsView,
186 186 attr='repo_commit_comment_attachment_upload',
187 187 route_name='repo_commit_comment_attachment_upload', request_method='POST',
188 188 renderer='json_ext', xhr=True)
189 189
190 190 config.add_route(
191 191 name='repo_commit_comment_delete',
192 192 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True)
193 193 config.add_view(
194 194 RepoCommitsView,
195 195 attr='repo_commit_comment_delete',
196 196 route_name='repo_commit_comment_delete', request_method='POST',
197 197 renderer='json_ext')
198 198
199 199 config.add_route(
200 200 name='repo_commit_comment_edit',
201 201 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/edit', repo_route=True)
202 202 config.add_view(
203 203 RepoCommitsView,
204 204 attr='repo_commit_comment_edit',
205 205 route_name='repo_commit_comment_edit', request_method='POST',
206 206 renderer='json_ext')
207 207
208 208 # still working url for backward compat.
209 209 config.add_route(
210 210 name='repo_commit_raw_deprecated',
211 211 pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True)
212 212 config.add_view(
213 213 RepoCommitsView,
214 214 attr='repo_commit_raw',
215 215 route_name='repo_commit_raw_deprecated', request_method='GET',
216 216 renderer=None)
217 217
218 218 # Files
219 219 config.add_route(
220 220 name='repo_archivefile',
221 221 pattern='/{repo_name:.*?[^/]}/archive/{fname:.*}', repo_route=True)
222 222 config.add_view(
223 223 RepoFilesView,
224 224 attr='repo_archivefile',
225 225 route_name='repo_archivefile', request_method='GET',
226 226 renderer=None)
227 227
228 228 config.add_route(
229 229 name='repo_files_diff',
230 230 pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True)
231 231 config.add_view(
232 232 RepoFilesView,
233 233 attr='repo_files_diff',
234 234 route_name='repo_files_diff', request_method='GET',
235 235 renderer=None)
236 236
237 237 config.add_route( # legacy route to make old links work
238 238 name='repo_files_diff_2way_redirect',
239 239 pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True)
240 240 config.add_view(
241 241 RepoFilesView,
242 242 attr='repo_files_diff_2way_redirect',
243 243 route_name='repo_files_diff_2way_redirect', request_method='GET',
244 244 renderer=None)
245 245
246 246 config.add_route(
247 247 name='repo_files',
248 248 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True)
249 249 config.add_view(
250 250 RepoFilesView,
251 251 attr='repo_files',
252 252 route_name='repo_files', request_method='GET',
253 253 renderer=None)
254 254
255 255 config.add_route(
256 256 name='repo_files:default_path',
257 257 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True)
258 258 config.add_view(
259 259 RepoFilesView,
260 260 attr='repo_files',
261 261 route_name='repo_files:default_path', request_method='GET',
262 262 renderer=None)
263 263
264 264 config.add_route(
265 265 name='repo_files:default_commit',
266 266 pattern='/{repo_name:.*?[^/]}/files', repo_route=True)
267 267 config.add_view(
268 268 RepoFilesView,
269 269 attr='repo_files',
270 270 route_name='repo_files:default_commit', request_method='GET',
271 271 renderer=None)
272 272
273 273 config.add_route(
274 274 name='repo_files:rendered',
275 275 pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True)
276 276 config.add_view(
277 277 RepoFilesView,
278 278 attr='repo_files',
279 279 route_name='repo_files:rendered', request_method='GET',
280 280 renderer=None)
281 281
282 282 config.add_route(
283 283 name='repo_files:annotated',
284 284 pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True)
285 285 config.add_view(
286 286 RepoFilesView,
287 287 attr='repo_files',
288 288 route_name='repo_files:annotated', request_method='GET',
289 289 renderer=None)
290 290
291 291 config.add_route(
292 292 name='repo_files:annotated_previous',
293 293 pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True)
294 294 config.add_view(
295 295 RepoFilesView,
296 296 attr='repo_files_annotated_previous',
297 297 route_name='repo_files:annotated_previous', request_method='GET',
298 298 renderer=None)
299 299
300 300 config.add_route(
301 301 name='repo_nodetree_full',
302 302 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True)
303 303 config.add_view(
304 304 RepoFilesView,
305 305 attr='repo_nodetree_full',
306 306 route_name='repo_nodetree_full', request_method='GET',
307 307 renderer=None, xhr=True)
308 308
309 309 config.add_route(
310 310 name='repo_nodetree_full:default_path',
311 311 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True)
312 312 config.add_view(
313 313 RepoFilesView,
314 314 attr='repo_nodetree_full',
315 315 route_name='repo_nodetree_full:default_path', request_method='GET',
316 316 renderer=None, xhr=True)
317 317
318 318 config.add_route(
319 319 name='repo_files_nodelist',
320 320 pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True)
321 321 config.add_view(
322 322 RepoFilesView,
323 323 attr='repo_nodelist',
324 324 route_name='repo_files_nodelist', request_method='GET',
325 325 renderer='json_ext', xhr=True)
326 326
327 327 config.add_route(
328 328 name='repo_file_raw',
329 329 pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True)
330 330 config.add_view(
331 331 RepoFilesView,
332 332 attr='repo_file_raw',
333 333 route_name='repo_file_raw', request_method='GET',
334 334 renderer=None)
335 335
336 336 config.add_route(
337 337 name='repo_file_download',
338 338 pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True)
339 339 config.add_view(
340 340 RepoFilesView,
341 341 attr='repo_file_download',
342 342 route_name='repo_file_download', request_method='GET',
343 343 renderer=None)
344 344
345 345 config.add_route( # backward compat to keep old links working
346 346 name='repo_file_download:legacy',
347 347 pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}',
348 348 repo_route=True)
349 349 config.add_view(
350 350 RepoFilesView,
351 351 attr='repo_file_download',
352 352 route_name='repo_file_download:legacy', request_method='GET',
353 353 renderer=None)
354 354
355 355 config.add_route(
356 356 name='repo_file_history',
357 357 pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True)
358 358 config.add_view(
359 359 RepoFilesView,
360 360 attr='repo_file_history',
361 361 route_name='repo_file_history', request_method='GET',
362 362 renderer='json_ext')
363 363
364 364 config.add_route(
365 365 name='repo_file_authors',
366 366 pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True)
367 367 config.add_view(
368 368 RepoFilesView,
369 369 attr='repo_file_authors',
370 370 route_name='repo_file_authors', request_method='GET',
371 371 renderer='rhodecode:templates/files/file_authors_box.mako')
372 372
373 373 config.add_route(
374 374 name='repo_files_check_head',
375 375 pattern='/{repo_name:.*?[^/]}/check_head/{commit_id}/{f_path:.*}',
376 376 repo_route=True)
377 377 config.add_view(
378 378 RepoFilesView,
379 379 attr='repo_files_check_head',
380 380 route_name='repo_files_check_head', request_method='POST',
381 381 renderer='json_ext', xhr=True)
382 382
383 383 config.add_route(
384 384 name='repo_files_remove_file',
385 385 pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}',
386 386 repo_route=True)
387 387 config.add_view(
388 388 RepoFilesView,
389 389 attr='repo_files_remove_file',
390 390 route_name='repo_files_remove_file', request_method='GET',
391 391 renderer='rhodecode:templates/files/files_delete.mako')
392 392
393 393 config.add_route(
394 394 name='repo_files_delete_file',
395 395 pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}',
396 396 repo_route=True)
397 397 config.add_view(
398 398 RepoFilesView,
399 399 attr='repo_files_delete_file',
400 400 route_name='repo_files_delete_file', request_method='POST',
401 401 renderer=None)
402 402
403 403 config.add_route(
404 404 name='repo_files_edit_file',
405 405 pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}',
406 406 repo_route=True)
407 407 config.add_view(
408 408 RepoFilesView,
409 409 attr='repo_files_edit_file',
410 410 route_name='repo_files_edit_file', request_method='GET',
411 411 renderer='rhodecode:templates/files/files_edit.mako')
412 412
413 413 config.add_route(
414 414 name='repo_files_update_file',
415 415 pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}',
416 416 repo_route=True)
417 417 config.add_view(
418 418 RepoFilesView,
419 419 attr='repo_files_update_file',
420 420 route_name='repo_files_update_file', request_method='POST',
421 421 renderer=None)
422 422
423 423 config.add_route(
424 424 name='repo_files_add_file',
425 425 pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}',
426 426 repo_route=True)
427 427 config.add_view(
428 428 RepoFilesView,
429 429 attr='repo_files_add_file',
430 430 route_name='repo_files_add_file', request_method='GET',
431 431 renderer='rhodecode:templates/files/files_add.mako')
432 432
433 433 config.add_route(
434 434 name='repo_files_upload_file',
435 435 pattern='/{repo_name:.*?[^/]}/upload_file/{commit_id}/{f_path:.*}',
436 436 repo_route=True)
437 437 config.add_view(
438 438 RepoFilesView,
439 439 attr='repo_files_add_file',
440 440 route_name='repo_files_upload_file', request_method='GET',
441 441 renderer='rhodecode:templates/files/files_upload.mako')
442 442 config.add_view( # POST creates
443 443 RepoFilesView,
444 444 attr='repo_files_upload_file',
445 445 route_name='repo_files_upload_file', request_method='POST',
446 446 renderer='json_ext')
447 447
448 448 config.add_route(
449 449 name='repo_files_replace_binary',
450 450 pattern='/{repo_name:.*?[^/]}/replace_binary/{commit_id}/{f_path:.*}',
451 451 repo_route=True)
452 452 config.add_view(
453 453 RepoFilesView,
454 454 attr='repo_files_replace_file',
455 455 route_name='repo_files_replace_binary', request_method='POST',
456 456 renderer='json_ext')
457 457
458 458 config.add_route(
459 459 name='repo_files_create_file',
460 460 pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}',
461 461 repo_route=True)
462 462 config.add_view( # POST creates
463 463 RepoFilesView,
464 464 attr='repo_files_create_file',
465 465 route_name='repo_files_create_file', request_method='POST',
466 466 renderer=None)
467 467
468 468 # Refs data
469 469 config.add_route(
470 470 name='repo_refs_data',
471 471 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
472 472 config.add_view(
473 473 RepoSummaryView,
474 474 attr='repo_refs_data',
475 475 route_name='repo_refs_data', request_method='GET',
476 476 renderer='json_ext')
477 477
478 478 config.add_route(
479 479 name='repo_refs_changelog_data',
480 480 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
481 481 config.add_view(
482 482 RepoSummaryView,
483 483 attr='repo_refs_changelog_data',
484 484 route_name='repo_refs_changelog_data', request_method='GET',
485 485 renderer='json_ext')
486 486
487 487 config.add_route(
488 488 name='repo_stats',
489 489 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
490 490 config.add_view(
491 491 RepoSummaryView,
492 492 attr='repo_stats',
493 493 route_name='repo_stats', request_method='GET',
494 494 renderer='json_ext')
495 495
496 496 # Commits
497 497 config.add_route(
498 498 name='repo_commits',
499 499 pattern='/{repo_name:.*?[^/]}/commits', repo_route=True)
500 500 config.add_view(
501 501 RepoChangelogView,
502 502 attr='repo_changelog',
503 503 route_name='repo_commits', request_method='GET',
504 504 renderer='rhodecode:templates/commits/changelog.mako')
505 505 # old routes for backward compat
506 506 config.add_view(
507 507 RepoChangelogView,
508 508 attr='repo_changelog',
509 509 route_name='repo_changelog', request_method='GET',
510 510 renderer='rhodecode:templates/commits/changelog.mako')
511 511
512 512 config.add_route(
513 513 name='repo_commits_elements',
514 514 pattern='/{repo_name:.*?[^/]}/commits_elements', repo_route=True)
515 515 config.add_view(
516 516 RepoChangelogView,
517 517 attr='repo_commits_elements',
518 518 route_name='repo_commits_elements', request_method=('GET', 'POST'),
519 519 renderer='rhodecode:templates/commits/changelog_elements.mako',
520 520 xhr=True)
521 521
522 522 config.add_route(
523 523 name='repo_commits_elements_file',
524 524 pattern='/{repo_name:.*?[^/]}/commits_elements/{commit_id}/{f_path:.*}', repo_route=True)
525 525 config.add_view(
526 526 RepoChangelogView,
527 527 attr='repo_commits_elements',
528 528 route_name='repo_commits_elements_file', request_method=('GET', 'POST'),
529 529 renderer='rhodecode:templates/commits/changelog_elements.mako',
530 530 xhr=True)
531 531
532 532 config.add_route(
533 533 name='repo_commits_file',
534 534 pattern='/{repo_name:.*?[^/]}/commits/{commit_id}/{f_path:.*}', repo_route=True)
535 535 config.add_view(
536 536 RepoChangelogView,
537 537 attr='repo_changelog',
538 538 route_name='repo_commits_file', request_method='GET',
539 539 renderer='rhodecode:templates/commits/changelog.mako')
540 540 # old routes for backward compat
541 541 config.add_view(
542 542 RepoChangelogView,
543 543 attr='repo_changelog',
544 544 route_name='repo_changelog_file', request_method='GET',
545 545 renderer='rhodecode:templates/commits/changelog.mako')
546 546
547 547 # Changelog (old deprecated name for commits page)
548 548 config.add_route(
549 549 name='repo_changelog',
550 550 pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True)
551 551 config.add_route(
552 552 name='repo_changelog_file',
553 553 pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True)
554 554
555 555 # Compare
556 556 config.add_route(
557 557 name='repo_compare_select',
558 558 pattern='/{repo_name:.*?[^/]}/compare', repo_route=True)
559 559 config.add_view(
560 560 RepoCompareView,
561 561 attr='compare_select',
562 562 route_name='repo_compare_select', request_method='GET',
563 563 renderer='rhodecode:templates/compare/compare_diff.mako')
564 564
565 565 config.add_route(
566 566 name='repo_compare',
567 567 pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True)
568 568 config.add_view(
569 569 RepoCompareView,
570 570 attr='compare',
571 571 route_name='repo_compare', request_method='GET',
572 572 renderer=None)
573 573
574 574 # Tags
575 575 config.add_route(
576 576 name='tags_home',
577 577 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
578 578 config.add_view(
579 579 RepoTagsView,
580 580 attr='tags',
581 581 route_name='tags_home', request_method='GET',
582 582 renderer='rhodecode:templates/tags/tags.mako')
583 583
584 584 # Branches
585 585 config.add_route(
586 586 name='branches_home',
587 587 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
588 588 config.add_view(
589 589 RepoBranchesView,
590 590 attr='branches',
591 591 route_name='branches_home', request_method='GET',
592 592 renderer='rhodecode:templates/branches/branches.mako')
593 593
594 config.add_route(
595 name='branch_remove',
596 pattern='/{repo_name:.*?[^/]}/{branch_name:.*?[^/]}/remove', repo_route=True, repo_accepted_types=['hg', 'git'])
597 config.add_view(
598 RepoBranchesView,
599 attr='remove_branch',
600 route_name='branch_remove', request_method='POST'
601 )
602
594 603 # Bookmarks
595 604 config.add_route(
596 605 name='bookmarks_home',
597 606 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
598 607 config.add_view(
599 608 RepoBookmarksView,
600 609 attr='bookmarks',
601 610 route_name='bookmarks_home', request_method='GET',
602 611 renderer='rhodecode:templates/bookmarks/bookmarks.mako')
603 612
604 613 # Forks
605 614 config.add_route(
606 615 name='repo_fork_new',
607 616 pattern='/{repo_name:.*?[^/]}/fork', repo_route=True,
608 617 repo_forbid_when_archived=True,
609 618 repo_accepted_types=['hg', 'git'])
610 619 config.add_view(
611 620 RepoForksView,
612 621 attr='repo_fork_new',
613 622 route_name='repo_fork_new', request_method='GET',
614 623 renderer='rhodecode:templates/forks/forks.mako')
615 624
616 625 config.add_route(
617 626 name='repo_fork_create',
618 627 pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True,
619 628 repo_forbid_when_archived=True,
620 629 repo_accepted_types=['hg', 'git'])
621 630 config.add_view(
622 631 RepoForksView,
623 632 attr='repo_fork_create',
624 633 route_name='repo_fork_create', request_method='POST',
625 634 renderer='rhodecode:templates/forks/fork.mako')
626 635
627 636 config.add_route(
628 637 name='repo_forks_show_all',
629 638 pattern='/{repo_name:.*?[^/]}/forks', repo_route=True,
630 639 repo_accepted_types=['hg', 'git'])
631 640 config.add_view(
632 641 RepoForksView,
633 642 attr='repo_forks_show_all',
634 643 route_name='repo_forks_show_all', request_method='GET',
635 644 renderer='rhodecode:templates/forks/forks.mako')
636 645
637 646 config.add_route(
638 647 name='repo_forks_data',
639 648 pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True,
640 649 repo_accepted_types=['hg', 'git'])
641 650 config.add_view(
642 651 RepoForksView,
643 652 attr='repo_forks_data',
644 653 route_name='repo_forks_data', request_method='GET',
645 654 renderer='json_ext', xhr=True)
646 655
647 656 # Pull Requests
648 657 config.add_route(
649 658 name='pullrequest_show',
650 659 pattern=r'/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}',
651 660 repo_route=True)
652 661 config.add_view(
653 662 RepoPullRequestsView,
654 663 attr='pull_request_show',
655 664 route_name='pullrequest_show', request_method='GET',
656 665 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
657 666
658 667 config.add_route(
659 668 name='pullrequest_show_all',
660 669 pattern='/{repo_name:.*?[^/]}/pull-request',
661 670 repo_route=True, repo_accepted_types=['hg', 'git'])
662 671 config.add_view(
663 672 RepoPullRequestsView,
664 673 attr='pull_request_list',
665 674 route_name='pullrequest_show_all', request_method='GET',
666 675 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
667 676
668 677 config.add_route(
669 678 name='pullrequest_show_all_data',
670 679 pattern='/{repo_name:.*?[^/]}/pull-request-data',
671 680 repo_route=True, repo_accepted_types=['hg', 'git'])
672 681 config.add_view(
673 682 RepoPullRequestsView,
674 683 attr='pull_request_list_data',
675 684 route_name='pullrequest_show_all_data', request_method='GET',
676 685 renderer='json_ext', xhr=True)
677 686
678 687 config.add_route(
679 688 name='pullrequest_repo_refs',
680 689 pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}',
681 690 repo_route=True)
682 691 config.add_view(
683 692 RepoPullRequestsView,
684 693 attr='pull_request_repo_refs',
685 694 route_name='pullrequest_repo_refs', request_method='GET',
686 695 renderer='json_ext', xhr=True)
687 696
688 697 config.add_route(
689 698 name='pullrequest_repo_targets',
690 699 pattern='/{repo_name:.*?[^/]}/pull-request/repo-targets',
691 700 repo_route=True)
692 701 config.add_view(
693 702 RepoPullRequestsView,
694 703 attr='pullrequest_repo_targets',
695 704 route_name='pullrequest_repo_targets', request_method='GET',
696 705 renderer='json_ext', xhr=True)
697 706
698 707 config.add_route(
699 708 name='pullrequest_new',
700 709 pattern='/{repo_name:.*?[^/]}/pull-request/new',
701 710 repo_route=True, repo_accepted_types=['hg', 'git'],
702 711 repo_forbid_when_archived=True)
703 712 config.add_view(
704 713 RepoPullRequestsView,
705 714 attr='pull_request_new',
706 715 route_name='pullrequest_new', request_method='GET',
707 716 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
708 717
709 718 config.add_route(
710 719 name='pullrequest_create',
711 720 pattern='/{repo_name:.*?[^/]}/pull-request/create',
712 721 repo_route=True, repo_accepted_types=['hg', 'git'],
713 722 repo_forbid_when_archived=True)
714 723 config.add_view(
715 724 RepoPullRequestsView,
716 725 attr='pull_request_create',
717 726 route_name='pullrequest_create', request_method='POST',
718 727 renderer=None)
719 728
720 729 config.add_route(
721 730 name='pullrequest_update',
722 731 pattern=r'/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update',
723 732 repo_route=True, repo_forbid_when_archived=True)
724 733 config.add_view(
725 734 RepoPullRequestsView,
726 735 attr='pull_request_update',
727 736 route_name='pullrequest_update', request_method='POST',
728 737 renderer='json_ext')
729 738
730 739 config.add_route(
731 740 name='pullrequest_merge',
732 741 pattern=r'/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge',
733 742 repo_route=True, repo_forbid_when_archived=True)
734 743 config.add_view(
735 744 RepoPullRequestsView,
736 745 attr='pull_request_merge',
737 746 route_name='pullrequest_merge', request_method='POST',
738 747 renderer='json_ext')
739 748
740 749 config.add_route(
741 750 name='pullrequest_delete',
742 751 pattern=r'/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete',
743 752 repo_route=True, repo_forbid_when_archived=True)
744 753 config.add_view(
745 754 RepoPullRequestsView,
746 755 attr='pull_request_delete',
747 756 route_name='pullrequest_delete', request_method='POST',
748 757 renderer='json_ext')
749 758
750 759 config.add_route(
751 760 name='pullrequest_comment_create',
752 761 pattern=r'/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment',
753 762 repo_route=True)
754 763 config.add_view(
755 764 RepoPullRequestsView,
756 765 attr='pull_request_comment_create',
757 766 route_name='pullrequest_comment_create', request_method='POST',
758 767 renderer='json_ext')
759 768
760 769 config.add_route(
761 770 name='pullrequest_comment_edit',
762 771 pattern=r'/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/edit',
763 772 repo_route=True, repo_accepted_types=['hg', 'git'])
764 773 config.add_view(
765 774 RepoPullRequestsView,
766 775 attr='pull_request_comment_edit',
767 776 route_name='pullrequest_comment_edit', request_method='POST',
768 777 renderer='json_ext')
769 778
770 779 config.add_route(
771 780 name='pullrequest_comment_delete',
772 781 pattern=r'/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete',
773 782 repo_route=True, repo_accepted_types=['hg', 'git'])
774 783 config.add_view(
775 784 RepoPullRequestsView,
776 785 attr='pull_request_comment_delete',
777 786 route_name='pullrequest_comment_delete', request_method='POST',
778 787 renderer='json_ext')
779 788
780 789 config.add_route(
781 790 name='pullrequest_comments',
782 791 pattern=r'/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comments',
783 792 repo_route=True)
784 793 config.add_view(
785 794 RepoPullRequestsView,
786 795 attr='pullrequest_comments',
787 796 route_name='pullrequest_comments', request_method='POST',
788 797 renderer='string_html', xhr=True)
789 798
790 799 config.add_route(
791 800 name='pullrequest_todos',
792 801 pattern=r'/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/todos',
793 802 repo_route=True)
794 803 config.add_view(
795 804 RepoPullRequestsView,
796 805 attr='pullrequest_todos',
797 806 route_name='pullrequest_todos', request_method='POST',
798 807 renderer='string_html', xhr=True)
799 808
800 809 config.add_route(
801 810 name='pullrequest_drafts',
802 811 pattern=r'/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/drafts',
803 812 repo_route=True)
804 813 config.add_view(
805 814 RepoPullRequestsView,
806 815 attr='pullrequest_drafts',
807 816 route_name='pullrequest_drafts', request_method='POST',
808 817 renderer='string_html', xhr=True)
809 818
810 819 # Artifacts, (EE feature)
811 820 config.add_route(
812 821 name='repo_artifacts_list',
813 822 pattern='/{repo_name:.*?[^/]}/artifacts', repo_route=True)
814 823 config.add_view(
815 824 RepoArtifactsView,
816 825 attr='repo_artifacts',
817 826 route_name='repo_artifacts_list', request_method='GET',
818 827 renderer='rhodecode:templates/artifacts/artifact_list.mako')
819 828
820 829 # Settings
821 830 config.add_route(
822 831 name='edit_repo',
823 832 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
824 833 config.add_view(
825 834 RepoSettingsView,
826 835 attr='edit_settings',
827 836 route_name='edit_repo', request_method='GET',
828 837 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
829 838 # update is POST on edit_repo
830 839 config.add_view(
831 840 RepoSettingsView,
832 841 attr='edit_settings_update',
833 842 route_name='edit_repo', request_method='POST',
834 843 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
835 844
836 845 # Settings advanced
837 846 config.add_route(
838 847 name='edit_repo_advanced',
839 848 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
840 849 config.add_view(
841 850 RepoSettingsAdvancedView,
842 851 attr='edit_advanced',
843 852 route_name='edit_repo_advanced', request_method='GET',
844 853 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
845 854
846 855 config.add_route(
847 856 name='edit_repo_advanced_archive',
848 857 pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True)
849 858 config.add_view(
850 859 RepoSettingsAdvancedView,
851 860 attr='edit_advanced_archive',
852 861 route_name='edit_repo_advanced_archive', request_method='POST',
853 862 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
854 863
855 864 config.add_route(
856 865 name='edit_repo_advanced_delete',
857 866 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
858 867 config.add_view(
859 868 RepoSettingsAdvancedView,
860 869 attr='edit_advanced_delete',
861 870 route_name='edit_repo_advanced_delete', request_method='POST',
862 871 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
863 872
864 873 config.add_route(
865 874 name='edit_repo_advanced_locking',
866 875 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
867 876 config.add_view(
868 877 RepoSettingsAdvancedView,
869 878 attr='edit_advanced_toggle_locking',
870 879 route_name='edit_repo_advanced_locking', request_method='POST',
871 880 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
872 881
873 882 config.add_route(
874 883 name='edit_repo_advanced_journal',
875 884 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
876 885 config.add_view(
877 886 RepoSettingsAdvancedView,
878 887 attr='edit_advanced_journal',
879 888 route_name='edit_repo_advanced_journal', request_method='POST',
880 889 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
881 890
882 891 config.add_route(
883 892 name='edit_repo_advanced_fork',
884 893 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
885 894 config.add_view(
886 895 RepoSettingsAdvancedView,
887 896 attr='edit_advanced_fork',
888 897 route_name='edit_repo_advanced_fork', request_method='POST',
889 898 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
890 899
891 900 config.add_route(
892 901 name='edit_repo_advanced_hooks',
893 902 pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True)
894 903 config.add_view(
895 904 RepoSettingsAdvancedView,
896 905 attr='edit_advanced_install_hooks',
897 906 route_name='edit_repo_advanced_hooks', request_method='GET',
898 907 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
899 908
900 909 # Caches
901 910 config.add_route(
902 911 name='edit_repo_caches',
903 912 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
904 913 config.add_view(
905 914 RepoCachesView,
906 915 attr='repo_caches',
907 916 route_name='edit_repo_caches', request_method='GET',
908 917 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
909 918 config.add_view(
910 919 RepoCachesView,
911 920 attr='repo_caches_purge',
912 921 route_name='edit_repo_caches', request_method='POST')
913 922
914 923 # Permissions
915 924 config.add_route(
916 925 name='edit_repo_perms',
917 926 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
918 927 config.add_view(
919 928 RepoSettingsPermissionsView,
920 929 attr='edit_permissions',
921 930 route_name='edit_repo_perms', request_method='GET',
922 931 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
923 932 config.add_view(
924 933 RepoSettingsPermissionsView,
925 934 attr='edit_permissions_update',
926 935 route_name='edit_repo_perms', request_method='POST',
927 936 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
928 937
929 938 config.add_route(
930 939 name='edit_repo_perms_set_private',
931 940 pattern='/{repo_name:.*?[^/]}/settings/permissions/set_private', repo_route=True)
932 941 config.add_view(
933 942 RepoSettingsPermissionsView,
934 943 attr='edit_permissions_set_private_repo',
935 944 route_name='edit_repo_perms_set_private', request_method='POST',
936 945 renderer='json_ext')
937 946
938 947 # Permissions Branch (EE feature)
939 948 config.add_route(
940 949 name='edit_repo_perms_branch',
941 950 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True)
942 951 config.add_view(
943 952 RepoSettingsBranchPermissionsView,
944 953 attr='branch_permissions',
945 954 route_name='edit_repo_perms_branch', request_method='GET',
946 955 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
947 956
948 957 config.add_route(
949 958 name='edit_repo_perms_branch_delete',
950 959 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete',
951 960 repo_route=True)
952 961 ## Only implemented in EE
953 962
954 963 # Maintenance
955 964 config.add_route(
956 965 name='edit_repo_maintenance',
957 966 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
958 967 config.add_view(
959 968 RepoMaintenanceView,
960 969 attr='repo_maintenance',
961 970 route_name='edit_repo_maintenance', request_method='GET',
962 971 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
963 972
964 973 config.add_route(
965 974 name='edit_repo_maintenance_execute',
966 975 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
967 976 config.add_view(
968 977 RepoMaintenanceView,
969 978 attr='repo_maintenance_execute',
970 979 route_name='edit_repo_maintenance_execute', request_method='GET',
971 980 renderer='json', xhr=True)
972 981
973 982 # Fields
974 983 config.add_route(
975 984 name='edit_repo_fields',
976 985 pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True)
977 986 config.add_view(
978 987 RepoSettingsFieldsView,
979 988 attr='repo_field_edit',
980 989 route_name='edit_repo_fields', request_method='GET',
981 990 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
982 991
983 992 config.add_route(
984 993 name='edit_repo_fields_create',
985 994 pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True)
986 995 config.add_view(
987 996 RepoSettingsFieldsView,
988 997 attr='repo_field_create',
989 998 route_name='edit_repo_fields_create', request_method='POST',
990 999 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
991 1000
992 1001 config.add_route(
993 1002 name='edit_repo_fields_delete',
994 1003 pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True)
995 1004 config.add_view(
996 1005 RepoSettingsFieldsView,
997 1006 attr='repo_field_delete',
998 1007 route_name='edit_repo_fields_delete', request_method='POST',
999 1008 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1000 1009
1001 1010 # quick actions: locking
1002 1011 config.add_route(
1003 1012 name='repo_settings_quick_actions',
1004 1013 pattern='/{repo_name:.*?[^/]}/settings/quick-action', repo_route=True)
1005 1014 config.add_view(
1006 1015 RepoSettingsView,
1007 1016 attr='repo_settings_quick_actions',
1008 1017 route_name='repo_settings_quick_actions', request_method='GET',
1009 1018 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1010 1019
1011 1020 # Remote
1012 1021 config.add_route(
1013 1022 name='edit_repo_remote',
1014 1023 pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True)
1015 1024 config.add_view(
1016 1025 RepoSettingsRemoteView,
1017 1026 attr='repo_remote_edit_form',
1018 1027 route_name='edit_repo_remote', request_method='GET',
1019 1028 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1020 1029
1021 1030 config.add_route(
1022 1031 name='edit_repo_remote_pull',
1023 1032 pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True)
1024 1033 config.add_view(
1025 1034 RepoSettingsRemoteView,
1026 1035 attr='repo_remote_pull_changes',
1027 1036 route_name='edit_repo_remote_pull', request_method='POST',
1028 1037 renderer=None)
1029 1038
1030 1039 config.add_route(
1031 1040 name='edit_repo_remote_push',
1032 1041 pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True)
1033 1042
1034 1043 # Statistics
1035 1044 config.add_route(
1036 1045 name='edit_repo_statistics',
1037 1046 pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True)
1038 1047 config.add_view(
1039 1048 RepoSettingsView,
1040 1049 attr='edit_statistics_form',
1041 1050 route_name='edit_repo_statistics', request_method='GET',
1042 1051 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1043 1052
1044 1053 config.add_route(
1045 1054 name='edit_repo_statistics_reset',
1046 1055 pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True)
1047 1056 config.add_view(
1048 1057 RepoSettingsView,
1049 1058 attr='repo_statistics_reset',
1050 1059 route_name='edit_repo_statistics_reset', request_method='POST',
1051 1060 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1052 1061
1053 1062 # Issue trackers
1054 1063 config.add_route(
1055 1064 name='edit_repo_issuetracker',
1056 1065 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True)
1057 1066 config.add_view(
1058 1067 RepoSettingsIssueTrackersView,
1059 1068 attr='repo_issuetracker',
1060 1069 route_name='edit_repo_issuetracker', request_method='GET',
1061 1070 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1062 1071
1063 1072 config.add_route(
1064 1073 name='edit_repo_issuetracker_test',
1065 1074 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True)
1066 1075 config.add_view(
1067 1076 RepoSettingsIssueTrackersView,
1068 1077 attr='repo_issuetracker_test',
1069 1078 route_name='edit_repo_issuetracker_test', request_method='POST',
1070 1079 renderer='string', xhr=True)
1071 1080
1072 1081 config.add_route(
1073 1082 name='edit_repo_issuetracker_delete',
1074 1083 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True)
1075 1084 config.add_view(
1076 1085 RepoSettingsIssueTrackersView,
1077 1086 attr='repo_issuetracker_delete',
1078 1087 route_name='edit_repo_issuetracker_delete', request_method='POST',
1079 1088 renderer='json_ext', xhr=True)
1080 1089
1081 1090 config.add_route(
1082 1091 name='edit_repo_issuetracker_update',
1083 1092 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True)
1084 1093 config.add_view(
1085 1094 RepoSettingsIssueTrackersView,
1086 1095 attr='repo_issuetracker_update',
1087 1096 route_name='edit_repo_issuetracker_update', request_method='POST',
1088 1097 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1089 1098
1090 1099 # VCS Settings
1091 1100 config.add_route(
1092 1101 name='edit_repo_vcs',
1093 1102 pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True)
1094 1103 config.add_view(
1095 1104 RepoSettingsVcsView,
1096 1105 attr='repo_vcs_settings',
1097 1106 route_name='edit_repo_vcs', request_method='GET',
1098 1107 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1099 1108
1100 1109 config.add_route(
1101 1110 name='edit_repo_vcs_update',
1102 1111 pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True)
1103 1112 config.add_view(
1104 1113 RepoSettingsVcsView,
1105 1114 attr='repo_settings_vcs_update',
1106 1115 route_name='edit_repo_vcs_update', request_method='POST',
1107 1116 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1108 1117
1109 1118 # svn pattern
1110 1119 config.add_route(
1111 1120 name='edit_repo_vcs_svn_pattern_delete',
1112 1121 pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True)
1113 1122 config.add_view(
1114 1123 RepoSettingsVcsView,
1115 1124 attr='repo_settings_delete_svn_pattern',
1116 1125 route_name='edit_repo_vcs_svn_pattern_delete', request_method='POST',
1117 1126 renderer='json_ext', xhr=True)
1118 1127
1119 1128 # Repo Review Rules (EE feature)
1120 1129 config.add_route(
1121 1130 name='repo_reviewers',
1122 1131 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
1123 1132 config.add_view(
1124 1133 RepoReviewRulesView,
1125 1134 attr='repo_review_rules',
1126 1135 route_name='repo_reviewers', request_method='GET',
1127 1136 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1128 1137
1129 1138 config.add_route(
1130 1139 name='repo_default_reviewers_data',
1131 1140 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
1132 1141 config.add_view(
1133 1142 RepoReviewRulesView,
1134 1143 attr='repo_default_reviewers_data',
1135 1144 route_name='repo_default_reviewers_data', request_method='GET',
1136 1145 renderer='json_ext')
1137 1146
1138 1147 # Repo Automation (EE feature)
1139 1148 config.add_route(
1140 1149 name='repo_automation',
1141 1150 pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True)
1142 1151 config.add_view(
1143 1152 RepoAutomationView,
1144 1153 attr='repo_automation',
1145 1154 route_name='repo_automation', request_method='GET',
1146 1155 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1147 1156
1148 1157 # Strip
1149 1158 config.add_route(
1150 1159 name='edit_repo_strip',
1151 1160 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
1152 1161 config.add_view(
1153 1162 RepoStripView,
1154 1163 attr='strip',
1155 1164 route_name='edit_repo_strip', request_method='GET',
1156 1165 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1157 1166
1158 1167 config.add_route(
1159 1168 name='strip_check',
1160 1169 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
1161 1170 config.add_view(
1162 1171 RepoStripView,
1163 1172 attr='strip_check',
1164 1173 route_name='strip_check', request_method='POST',
1165 1174 renderer='json', xhr=True)
1166 1175
1167 1176 config.add_route(
1168 1177 name='strip_execute',
1169 1178 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
1170 1179 config.add_view(
1171 1180 RepoStripView,
1172 1181 attr='strip_execute',
1173 1182 route_name='strip_execute', request_method='POST',
1174 1183 renderer='json', xhr=True)
1175 1184
1176 1185 # Audit logs
1177 1186 config.add_route(
1178 1187 name='edit_repo_audit_logs',
1179 1188 pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True)
1180 1189 config.add_view(
1181 1190 AuditLogsView,
1182 1191 attr='repo_audit_logs',
1183 1192 route_name='edit_repo_audit_logs', request_method='GET',
1184 1193 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1185 1194
1186 1195 # ATOM/RSS Feed, shouldn't contain slashes for outlook compatibility
1187 1196 config.add_route(
1188 1197 name='rss_feed_home',
1189 1198 pattern='/{repo_name:.*?[^/]}/feed-rss', repo_route=True)
1190 1199 config.add_view(
1191 1200 RepoFeedView,
1192 1201 attr='rss',
1193 1202 route_name='rss_feed_home', request_method='GET', renderer=None)
1194 1203
1195 1204 config.add_route(
1196 1205 name='rss_feed_home_old',
1197 1206 pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True)
1198 1207 config.add_view(
1199 1208 RepoFeedView,
1200 1209 attr='rss',
1201 1210 route_name='rss_feed_home_old', request_method='GET', renderer=None)
1202 1211
1203 1212 config.add_route(
1204 1213 name='atom_feed_home',
1205 1214 pattern='/{repo_name:.*?[^/]}/feed-atom', repo_route=True)
1206 1215 config.add_view(
1207 1216 RepoFeedView,
1208 1217 attr='atom',
1209 1218 route_name='atom_feed_home', request_method='GET', renderer=None)
1210 1219
1211 1220 config.add_route(
1212 1221 name='atom_feed_home_old',
1213 1222 pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True)
1214 1223 config.add_view(
1215 1224 RepoFeedView,
1216 1225 attr='atom',
1217 1226 route_name='atom_feed_home_old', request_method='GET', renderer=None)
1218 1227
1219 1228 # NOTE(marcink): needs to be at the end for catch-all
1220 1229 add_route_with_slash(
1221 1230 config,
1222 1231 name='repo_summary',
1223 1232 pattern='/{repo_name:.*?[^/]}', repo_route=True)
1224 1233 config.add_view(
1225 1234 RepoSummaryView,
1226 1235 attr='summary',
1227 1236 route_name='repo_summary', request_method='GET',
1228 1237 renderer='rhodecode:templates/summary/summary.mako')
1229 1238
1230 1239 # TODO(marcink): there's no such route??
1231 1240 config.add_view(
1232 1241 RepoSummaryView,
1233 1242 attr='summary',
1234 1243 route_name='repo_summary_slash', request_method='GET',
1235 1244 renderer='rhodecode:templates/summary/summary.mako') No newline at end of file
@@ -1,35 +1,83 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import pytest
20 20 from rhodecode.model.db import Repository
21 21 from rhodecode.tests.routes import route_path
22 from rhodecode.tests import assert_session_flash
22 23
23 24
24 25 @pytest.mark.usefixtures('autologin_user', 'app')
25 26 class TestBranchesController(object):
26 27
27 28 def test_index(self, backend):
28 29 response = self.app.get(
29 30 route_path('branches_home', repo_name=backend.repo_name))
30 31
31 32 repo = Repository.get_by_repo_name(backend.repo_name)
32 33
33 34 for commit_id, obj_name in repo.scm_instance().branches.items():
34 35 assert commit_id in response
35 36 assert obj_name in response
37
38 def test_landing_branch_delete(self, backend, csrf_token):
39 if backend.alias == 'svn':
40 pytest.skip("Not supported yet")
41 branch_related_data_per_backend = {
42 'git': {'name': 'master'},
43 'hg': {'name': 'default'},
44 }
45 response = self.app.post(
46 route_path('branch_remove', repo_name=backend.repo_name,
47 branch_name=branch_related_data_per_backend[backend.alias]['name']),
48 params={'csrf_token': csrf_token}, status=302)
49 assert_session_flash(
50 response,
51 f"This branch {branch_related_data_per_backend[backend.alias]['name']} cannot be removed as it's currently set as landing branch"
52 )
53
54 def test_delete_branch_by_repo_owner(self, backend, csrf_token):
55 if backend.alias in ('svn', 'hg'):
56 pytest.skip("Skipping for hg and svn")
57 branch_to_be_removed = 'remove_me'
58 repo = Repository.get_by_repo_name(backend.repo_name)
59 repo.scm_instance()._create_branch(branch_to_be_removed, repo.scm_instance().commit_ids[1])
60 response = self.app.post(
61 route_path('branch_remove', repo_name=backend.repo_name,
62 branch_name=branch_to_be_removed),
63 params={'csrf_token': csrf_token}, status=302)
64 assert_session_flash(response, f"Branch {branch_to_be_removed} has been successfully deleted")
65
66 def test_delete_branch_by_not_repo_owner(self, backend, csrf_token):
67 username = 'test_regular'
68 pwd = 'test12'
69 branch_related_data_per_backend = {
70 'git': {'name': 'master', 'action': 'deleted'},
71 'hg': {'name': 'stable', 'action': 'closed'},
72 }
73 if backend.alias == 'svn':
74 pytest.skip("Not supported yet")
75 self.app.post(route_path('login'),
76 {'username': username,
77 'password': pwd})
78 selected_branch = branch_related_data_per_backend[backend.alias]['name']
79 response = self.app.post(
80 route_path('branch_remove', repo_name=backend.repo_name,
81 branch_name=selected_branch),
82 params={'csrf_token': csrf_token, 'username': username, 'password': pwd}, status=404)
83 assert response.status_code == 404
@@ -1,47 +1,107 b''
1 1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20
21 from pyramid.httpexceptions import HTTPFound
21 22
22 23 from rhodecode.apps._base import BaseReferencesView
23 24 from rhodecode.lib import ext_json
24 from rhodecode.lib.auth import (LoginRequired, HasRepoPermissionAnyDecorator)
25 from rhodecode.lib import helpers as h
26 from rhodecode.lib.auth import (LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
25 27 from rhodecode.model.scm import ScmModel
28 from rhodecode.model.meta import Session
29 from rhodecode.model.db import PullRequest
26 30
27 31 log = logging.getLogger(__name__)
28 32
29 33
30 34 class RepoBranchesView(BaseReferencesView):
31 35
32 36 @LoginRequired()
33 37 @HasRepoPermissionAnyDecorator(
34 38 'repository.read', 'repository.write', 'repository.admin')
35 39 def branches(self):
40 partial_render = self.request.get_partial_renderer(
41 'rhodecode:templates/data_table/_dt_elements.mako')
42 repo_name = self.db_repo_name
36 43 c = self.load_default_context()
37 44 self._prepare_and_set_clone_url(c)
38 45 c.rhodecode_repo = self.rhodecode_vcs_repo
39 46 c.repository_forks = ScmModel().get_forks(self.db_repo)
40
41 47 ref_items = self.rhodecode_vcs_repo.branches_all.items()
42 48 data = self.load_refs_context(
43 49 ref_items=ref_items, partials_template='branches/branches_data.mako')
44
50 data_with_actions = []
51 if self.db_repo.repo_type != 'svn':
52 for branch in data:
53 branch['action'] = partial_render(
54 f"branch_actions_{self.db_repo.repo_type}", branch['name_raw'], repo_name, closed=branch['closed']
55 )
56 data_with_actions.append(branch)
57 data = data_with_actions
45 58 c.has_references = bool(data)
46 59 c.data = ext_json.str_json(data)
47 60 return self._get_template_context(c)
61
62 @LoginRequired()
63 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
64 @CSRFRequired()
65 def remove_branch(self):
66 _ = self.request.translate
67 self.load_default_context()
68 repo = self.db_repo
69 repo_name = self.db_repo_name
70 repo_type = repo.repo_type
71 action = _('deleted') if repo_type == 'git' else _('closed')
72 redirect = HTTPFound(location=self.request.route_path('branches_home', repo_name=repo_name))
73 branch_name = self.request.matchdict.get('branch_name')
74 if repo.landing_ref_name == branch_name:
75 h.flash(
76 _("This branch {} cannot be removed as it's currently set as landing branch").format(branch_name),
77 category='error'
78 )
79 return redirect
80 if prs_related_to := Session().query(PullRequest).filter(PullRequest.target_repo_id == repo.repo_id,
81 PullRequest.status != PullRequest.STATUS_CLOSED).filter(
82 (PullRequest.source_ref.like(f'branch:{branch_name}:%')) | (
83 PullRequest.target_ref.like(f'branch:{branch_name}:%'))
84 ).all():
85 h.flash(_("Branch cannot be {} - it's used in following open Pull Request ids: {}").format(action, ','.join(
86 map(str, prs_related_to))), category='error')
87 return redirect
88
89 match repo_type:
90 case 'git':
91 self.rhodecode_vcs_repo.delete_branch(branch_name)
92 case 'hg':
93 from rhodecode.lib.vcs.backends.base import Reference
94 self.rhodecode_vcs_repo._local_close(
95 source_ref=Reference(type='branch', name=branch_name,
96 commit_id=self.rhodecode_vcs_repo.branches[branch_name]),
97 target_ref=Reference(type='branch', name='', commit_id=None),
98 user_name=self.request.user.name,
99 user_email=self.request.user.email)
100 case _:
101 raise NotImplementedError('Branch deleting functionality not yet implemented')
102 ScmModel().mark_for_invalidation(repo_name)
103 self.rhodecode_vcs_repo._invalidate_prop_cache('commit_ids')
104 self.rhodecode_vcs_repo._invalidate_prop_cache('_refs')
105 self.rhodecode_vcs_repo._invalidate_prop_cache('branches')
106 h.flash(_("Branch {} has been successfully {}").format(branch_name, action), category='success')
107 return redirect
@@ -1,1053 +1,1056 b''
1 1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 GIT repository module
21 21 """
22 22
23 23 import logging
24 24 import os
25 25 import re
26 26
27 27 from zope.cachedescriptors.property import Lazy as LazyProperty
28 28
29 29 from collections import OrderedDict
30 30 from rhodecode.lib.datelib import (
31 31 utcdate_fromtimestamp, makedate, date_astimestamp)
32 32 from rhodecode.lib.hash_utils import safe_str
33 33 from rhodecode.lib.utils2 import CachedProperty
34 34 from rhodecode.lib.vcs import connection, path as vcspath
35 35 from rhodecode.lib.vcs.backends.base import (
36 36 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 37 MergeFailureReason, Reference)
38 38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 41 from rhodecode.lib.vcs.exceptions import (
42 42 CommitDoesNotExistError, EmptyRepositoryError,
43 43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
44 44
45 45
46 46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
47 47
48 48 log = logging.getLogger(__name__)
49 49
50 50
51 51 class GitRepository(BaseRepository):
52 52 """
53 53 Git repository backend.
54 54 """
55 55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
56 56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
57 57
58 58 contact = BaseRepository.DEFAULT_CONTACT
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62
63 63 self.path = safe_str(os.path.abspath(repo_path))
64 64 self.config = config if config else self.get_default_config()
65 65 self.with_wire = with_wire or {"cache": False} # default should not use cache
66 66
67 67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 68
69 69 # caches
70 70 self._commit_ids = {}
71 71
72 72 @LazyProperty
73 73 def _remote(self):
74 74 repo_id = self.path
75 75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76 76
77 77 @LazyProperty
78 78 def bare(self):
79 79 return self._remote.bare()
80 80
81 81 @LazyProperty
82 82 def head(self):
83 83 return self._remote.head()
84 84
85 85 @CachedProperty
86 86 def commit_ids(self):
87 87 """
88 88 Returns list of commit ids, in ascending order. Being lazy
89 89 attribute allows external tools to inject commit ids from cache.
90 90 """
91 91 commit_ids = self._get_all_commit_ids()
92 92 self._rebuild_cache(commit_ids)
93 93 return commit_ids
94 94
95 95 def _rebuild_cache(self, commit_ids):
96 96 self._commit_ids = {commit_id: index
97 97 for index, commit_id in enumerate(commit_ids)}
98 98
99 99 def run_git_command(self, cmd, **opts):
100 100 """
101 101 Runs given ``cmd`` as git command and returns tuple
102 102 (stdout, stderr).
103 103
104 104 :param cmd: git command to be executed
105 105 :param opts: env options to pass into Subprocess command
106 106 """
107 107 if not isinstance(cmd, list):
108 108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
109 109
110 110 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 111 out, err = self._remote.run_git_command(cmd, **opts)
112 112 if err and not skip_stderr_log:
113 113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 114 return out, err
115 115
116 116 @staticmethod
117 117 def check_url(url, config):
118 118 """
119 119 Function will check given url and try to verify if it's a valid
120 120 link. Sometimes it may happened that git will issue basic
121 121 auth request that can cause whole API to hang when used from python
122 122 or other external calls.
123 123
124 124 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 125 when the return code is non 200
126 126 """
127 127 # check first if it's not an url
128 128 if os.path.isdir(url) or url.startswith('file:'):
129 129 return True
130 130
131 131 if '+' in url.split('://', 1)[0]:
132 132 url = url.split('+', 1)[1]
133 133
134 134 # Request the _remote to verify the url
135 135 return connection.Git.check_url(url, config.serialize())
136 136
137 137 @staticmethod
138 138 def is_valid_repository(path):
139 139 if os.path.isdir(os.path.join(path, '.git')):
140 140 return True
141 141 # check case of bare repository
142 142 try:
143 143 GitRepository(path)
144 144 return True
145 145 except VCSError:
146 146 pass
147 147 return False
148 148
149 149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 150 bare=False):
151 151 if create and os.path.exists(self.path):
152 152 raise RepositoryError(
153 153 f"Cannot create repository at {self.path}, location already exist")
154 154
155 155 if bare and do_workspace_checkout:
156 156 raise RepositoryError("Cannot update a bare repository")
157 157 try:
158 158
159 159 if src_url:
160 160 # check URL before any actions
161 161 GitRepository.check_url(src_url, self.config)
162 162
163 163 if create:
164 164 if bare:
165 165 self._remote.init_bare()
166 166 else:
167 167 self._remote.init()
168 168
169 169 if src_url and bare:
170 170 # bare repository only allows a fetch and checkout is not allowed
171 171 self.fetch(src_url, commit_ids=None)
172 172 elif src_url:
173 173 self.pull(src_url, commit_ids=None,
174 174 update_after=do_workspace_checkout)
175 175
176 176 else:
177 177 if not self._remote.assert_correct_path():
178 178 raise RepositoryError(
179 179 f'Path "{self.path}" does not contain a Git repository')
180 180
181 181 # TODO: johbo: check if we have to translate the OSError here
182 182 except OSError as err:
183 183 raise RepositoryError(err)
184 184
185 185 def _get_all_commit_ids(self):
186 186 return self._remote.get_all_commit_ids()
187 187
188 188 def _get_commit_ids(self, filters=None):
189 189 # we must check if this repo is not empty, since later command
190 190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 191 # errors
192 192
193 193 head = self._remote.head(show_exc=False)
194 194
195 195 if not head:
196 196 return []
197 197
198 198 rev_filter = ['--branches', '--tags']
199 199 extra_filter = []
200 200
201 201 if filters:
202 202 if filters.get('since'):
203 203 extra_filter.append('--since=%s' % (filters['since']))
204 204 if filters.get('until'):
205 205 extra_filter.append('--until=%s' % (filters['until']))
206 206 if filters.get('branch_name'):
207 207 rev_filter = []
208 208 extra_filter.append(filters['branch_name'])
209 209 rev_filter.extend(extra_filter)
210 210
211 211 # if filters.get('start') or filters.get('end'):
212 212 # # skip is offset, max-count is limit
213 213 # if filters.get('start'):
214 214 # extra_filter += ' --skip=%s' % filters['start']
215 215 # if filters.get('end'):
216 216 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
217 217
218 218 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
219 219 try:
220 220 output, __ = self.run_git_command(cmd)
221 221 except RepositoryError:
222 222 # Can be raised for empty repositories
223 223 return []
224 224 return output.splitlines()
225 225
226 226 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
227 227
228 228 def is_null(value):
229 229 return len(value) == commit_id_or_idx.count('0')
230 230
231 231 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
232 232 return self.commit_ids[-1]
233 233
234 234 commit_missing_err = "Commit {} does not exist for `{}`".format(
235 235 *map(safe_str, [commit_id_or_idx, self.name]))
236 236
237 237 is_bstr = isinstance(commit_id_or_idx, str)
238 238 is_branch = reference_obj and reference_obj.branch
239 239
240 240 lookup_ok = False
241 241 if is_bstr:
242 242 # Need to call remote to translate id for tagging scenarios,
243 243 # or branch that are numeric
244 244 try:
245 245 remote_data = self._remote.get_object(commit_id_or_idx,
246 246 maybe_unreachable=maybe_unreachable)
247 247 commit_id_or_idx = remote_data["commit_id"]
248 248 lookup_ok = True
249 249 except (CommitDoesNotExistError,):
250 250 lookup_ok = False
251 251
252 252 if lookup_ok is False:
253 253 is_numeric_idx = \
254 254 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
255 255 or isinstance(commit_id_or_idx, int)
256 256 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
257 257 try:
258 258 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
259 259 lookup_ok = True
260 260 except Exception:
261 261 raise CommitDoesNotExistError(commit_missing_err)
262 262
263 263 # we failed regular lookup, and by integer number lookup
264 264 if lookup_ok is False:
265 265 raise CommitDoesNotExistError(commit_missing_err)
266 266
267 267 # Ensure we return full id
268 268 if not SHA_PATTERN.match(str(commit_id_or_idx)):
269 269 raise CommitDoesNotExistError(
270 270 "Given commit id %s not recognized" % commit_id_or_idx)
271 271 return commit_id_or_idx
272 272
273 273 def get_hook_location(self):
274 274 """
275 275 returns absolute path to location where hooks are stored
276 276 """
277 277 loc = os.path.join(self.path, 'hooks')
278 278 if not self.bare:
279 279 loc = os.path.join(self.path, '.git', 'hooks')
280 280 return loc
281 281
282 282 @LazyProperty
283 283 def last_change(self):
284 284 """
285 285 Returns last change made on this repository as
286 286 `datetime.datetime` object.
287 287 """
288 288 try:
289 289 return self.get_commit().date
290 290 except RepositoryError:
291 291 tzoffset = makedate()[1]
292 292 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
293 293
294 294 def _get_fs_mtime(self):
295 295 idx_loc = '' if self.bare else '.git'
296 296 # fallback to filesystem
297 297 in_path = os.path.join(self.path, idx_loc, "index")
298 298 he_path = os.path.join(self.path, idx_loc, "HEAD")
299 299 if os.path.exists(in_path):
300 300 return os.stat(in_path).st_mtime
301 301 else:
302 302 return os.stat(he_path).st_mtime
303 303
304 304 @LazyProperty
305 305 def description(self):
306 306 description = self._remote.get_description()
307 307 return safe_str(description or self.DEFAULT_DESCRIPTION)
308 308
309 309 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
310 310 if self.is_empty():
311 311 return OrderedDict()
312 312
313 313 result = []
314 314 for ref, sha in self._refs.items():
315 315 if ref.startswith(prefix):
316 316 ref_name = ref
317 317 if strip_prefix:
318 318 ref_name = ref[len(prefix):]
319 319 result.append((safe_str(ref_name), sha))
320 320
321 321 def get_name(entry):
322 322 return entry[0]
323 323
324 324 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
325 325
326 326 def _get_branches(self):
327 327 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
328 328
329 def delete_branch(self, branch_name):
330 return self._remote.delete_branch(branch_name)
331
329 332 @CachedProperty
330 333 def branches(self):
331 334 return self._get_branches()
332 335
333 336 @CachedProperty
334 337 def branches_closed(self):
335 338 return {}
336 339
337 340 @CachedProperty
338 341 def bookmarks(self):
339 342 return {}
340 343
341 344 @CachedProperty
342 345 def branches_all(self):
343 346 all_branches = {}
344 347 all_branches.update(self.branches)
345 348 all_branches.update(self.branches_closed)
346 349 return all_branches
347 350
348 351 @CachedProperty
349 352 def tags(self):
350 353 return self._get_tags()
351 354
352 355 def _get_tags(self):
353 356 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
354 357
355 358 def tag(self, name, user, commit_id=None, message=None, date=None,
356 359 **kwargs):
357 360 # TODO: fix this method to apply annotated tags correct with message
358 361 """
359 362 Creates and returns a tag for the given ``commit_id``.
360 363
361 364 :param name: name for new tag
362 365 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
363 366 :param commit_id: commit id for which new tag would be created
364 367 :param message: message of the tag's commit
365 368 :param date: date of tag's commit
366 369
367 370 :raises TagAlreadyExistError: if tag with same name already exists
368 371 """
369 372 if name in self.tags:
370 373 raise TagAlreadyExistError("Tag %s already exists" % name)
371 374 commit = self.get_commit(commit_id=commit_id)
372 375 message = message or f"Added tag {name} for commit {commit.raw_id}"
373 376
374 377 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
375 378
376 379 self._invalidate_prop_cache('tags')
377 380 self._invalidate_prop_cache('_refs')
378 381
379 382 return commit
380 383
381 384 def remove_tag(self, name, user, message=None, date=None):
382 385 """
383 386 Removes tag with the given ``name``.
384 387
385 388 :param name: name of the tag to be removed
386 389 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 390 :param message: message of the tag's removal commit
388 391 :param date: date of tag's removal commit
389 392
390 393 :raises TagDoesNotExistError: if tag with given name does not exists
391 394 """
392 395 if name not in self.tags:
393 396 raise TagDoesNotExistError("Tag %s does not exist" % name)
394 397
395 398 self._remote.tag_remove(name)
396 399 self._invalidate_prop_cache('tags')
397 400 self._invalidate_prop_cache('_refs')
398 401
399 402 def _get_refs(self):
400 403 return self._remote.get_refs()
401 404
402 405 @CachedProperty
403 406 def _refs(self):
404 407 return self._get_refs()
405 408
406 409 @property
407 410 def _ref_tree(self):
408 411 node = tree = {}
409 412 for ref, sha in self._refs.items():
410 413 path = ref.split('/')
411 414 for bit in path[:-1]:
412 415 node = node.setdefault(bit, {})
413 416 node[path[-1]] = sha
414 417 node = tree
415 418 return tree
416 419
417 420 def get_remote_ref(self, ref_name):
418 421 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
419 422 try:
420 423 return self._refs[ref_key]
421 424 except Exception:
422 425 return
423 426
424 427 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
425 428 translate_tag=True, maybe_unreachable=False, reference_obj=None):
426 429 """
427 430 Returns `GitCommit` object representing commit from git repository
428 431 at the given `commit_id` or head (most recent commit) if None given.
429 432 """
430 433
431 434 if self.is_empty():
432 435 raise EmptyRepositoryError("There are no commits yet")
433 436
434 437 if commit_id is not None:
435 438 self._validate_commit_id(commit_id)
436 439 try:
437 440 # we have cached idx, use it without contacting the remote
438 441 idx = self._commit_ids[commit_id]
439 442 return GitCommit(self, commit_id, idx, pre_load=pre_load)
440 443 except KeyError:
441 444 pass
442 445
443 446 elif commit_idx is not None:
444 447 self._validate_commit_idx(commit_idx)
445 448 try:
446 449 _commit_id = self.commit_ids[commit_idx]
447 450 if commit_idx < 0:
448 451 commit_idx = self.commit_ids.index(_commit_id)
449 452 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
450 453 except IndexError:
451 454 commit_id = commit_idx
452 455 else:
453 456 commit_id = "tip"
454 457
455 458 if translate_tag:
456 459 commit_id = self._lookup_commit(
457 460 commit_id, maybe_unreachable=maybe_unreachable,
458 461 reference_obj=reference_obj)
459 462
460 463 try:
461 464 idx = self._commit_ids[commit_id]
462 465 except KeyError:
463 466 idx = -1
464 467
465 468 return GitCommit(self, commit_id, idx, pre_load=pre_load)
466 469
467 470 def get_commits(
468 471 self, start_id=None, end_id=None, start_date=None, end_date=None,
469 472 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
470 473 """
471 474 Returns generator of `GitCommit` objects from start to end (both
472 475 are inclusive), in ascending date order.
473 476
474 477 :param start_id: None, str(commit_id)
475 478 :param end_id: None, str(commit_id)
476 479 :param start_date: if specified, commits with commit date less than
477 480 ``start_date`` would be filtered out from returned set
478 481 :param end_date: if specified, commits with commit date greater than
479 482 ``end_date`` would be filtered out from returned set
480 483 :param branch_name: if specified, commits not reachable from given
481 484 branch would be filtered out from returned set
482 485 :param show_hidden: Show hidden commits such as obsolete or hidden from
483 486 Mercurial evolve
484 487 :raise BranchDoesNotExistError: If given `branch_name` does not
485 488 exist.
486 489 :raise CommitDoesNotExistError: If commits for given `start` or
487 490 `end` could not be found.
488 491
489 492 """
490 493 if self.is_empty():
491 494 raise EmptyRepositoryError("There are no commits yet")
492 495
493 496 self._validate_branch_name(branch_name)
494 497
495 498 if start_id is not None:
496 499 self._validate_commit_id(start_id)
497 500 if end_id is not None:
498 501 self._validate_commit_id(end_id)
499 502
500 503 start_raw_id = self._lookup_commit(start_id)
501 504 start_pos = self._commit_ids[start_raw_id] if start_id else None
502 505 end_raw_id = self._lookup_commit(end_id)
503 506 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
504 507
505 508 if None not in [start_id, end_id] and start_pos > end_pos:
506 509 raise RepositoryError(
507 510 "Start commit '%s' cannot be after end commit '%s'" %
508 511 (start_id, end_id))
509 512
510 513 if end_pos is not None:
511 514 end_pos += 1
512 515
513 516 filter_ = []
514 517 if branch_name:
515 518 filter_.append({'branch_name': branch_name})
516 519 if start_date and not end_date:
517 520 filter_.append({'since': start_date})
518 521 if end_date and not start_date:
519 522 filter_.append({'until': end_date})
520 523 if start_date and end_date:
521 524 filter_.append({'since': start_date})
522 525 filter_.append({'until': end_date})
523 526
524 527 # if start_pos or end_pos:
525 528 # filter_.append({'start': start_pos})
526 529 # filter_.append({'end': end_pos})
527 530
528 531 if filter_:
529 532 revfilters = {
530 533 'branch_name': branch_name,
531 534 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
532 535 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
533 536 'start': start_pos,
534 537 'end': end_pos,
535 538 }
536 539 commit_ids = self._get_commit_ids(filters=revfilters)
537 540
538 541 else:
539 542 commit_ids = self.commit_ids
540 543
541 544 if start_pos or end_pos:
542 545 commit_ids = commit_ids[start_pos: end_pos]
543 546
544 547 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
545 548 translate_tag=translate_tags)
546 549
547 550 def get_diff(
548 551 self, commit1, commit2, path='', ignore_whitespace=False,
549 552 context=3, path1=None):
550 553 """
551 554 Returns (git like) *diff*, as plain text. Shows changes introduced by
552 555 ``commit2`` since ``commit1``.
553 556
554 557 :param commit1: Entry point from which diff is shown. Can be
555 558 ``self.EMPTY_COMMIT`` - in this case, patch showing all
556 559 the changes since empty state of the repository until ``commit2``
557 560 :param commit2: Until which commits changes should be shown.
558 561 :param path:
559 562 :param ignore_whitespace: If set to ``True``, would not show whitespace
560 563 changes. Defaults to ``False``.
561 564 :param context: How many lines before/after changed lines should be
562 565 shown. Defaults to ``3``.
563 566 :param path1:
564 567 """
565 568 self._validate_diff_commits(commit1, commit2)
566 569 if path1 is not None and path1 != path:
567 570 raise ValueError("Diff of two different paths not supported.")
568 571
569 572 if path:
570 573 file_filter = path
571 574 else:
572 575 file_filter = None
573 576
574 577 diff = self._remote.diff(
575 578 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
576 579 opt_ignorews=ignore_whitespace,
577 580 context=context)
578 581
579 582 return GitDiff(diff)
580 583
581 584 def strip(self, commit_id, branch_name):
582 585 commit = self.get_commit(commit_id=commit_id)
583 586 if commit.merge:
584 587 raise Exception('Cannot reset to merge commit')
585 588
586 589 if not branch_name:
587 590 raise ValueError(f'git strip requires a valid branch name, got {branch_name} instead')
588 591
589 592 # parent is going to be the new head now
590 593 commit = commit.parents[0]
591 594 self._remote.update_refs(f'refs/heads/{branch_name}', commit.raw_id)
592 595
593 596 # clear cached properties
594 597 self._invalidate_prop_cache('commit_ids')
595 598 self._invalidate_prop_cache('_refs')
596 599 self._invalidate_prop_cache('branches')
597 600
598 601 return len(self.commit_ids)
599 602
600 603 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
601 604 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
602 605 self, commit_id1, repo2, commit_id2)
603 606
604 607 if commit_id1 == commit_id2:
605 608 return commit_id1
606 609
607 610 if self != repo2:
608 611 commits = self._remote.get_missing_revs(
609 612 commit_id1, commit_id2, repo2.path)
610 613 if commits:
611 614 commit = repo2.get_commit(commits[-1])
612 615 if commit.parents:
613 616 ancestor_id = commit.parents[0].raw_id
614 617 else:
615 618 ancestor_id = None
616 619 else:
617 620 # no commits from other repo, ancestor_id is the commit_id2
618 621 ancestor_id = commit_id2
619 622 else:
620 623 output, __ = self.run_git_command(
621 624 ['merge-base', commit_id1, commit_id2])
622 625 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
623 626
624 627 log.debug('Found common ancestor with sha: %s', ancestor_id)
625 628
626 629 return ancestor_id
627 630
628 631 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
629 632 repo1 = self
630 633 ancestor_id = None
631 634
632 635 if commit_id1 == commit_id2:
633 636 commits = []
634 637 elif repo1 != repo2:
635 638 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
636 639 repo2.path)
637 640 commits = [
638 641 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
639 642 for commit_id in reversed(missing_ids)]
640 643 else:
641 644 output, __ = repo1.run_git_command(
642 645 ['log', '--reverse', '--pretty=format: %H', '-s',
643 646 f'{commit_id1}..{commit_id2}'])
644 647 commits = [
645 648 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
646 649 for commit_id in self.COMMIT_ID_PAT.findall(output)]
647 650
648 651 return commits
649 652
650 653 @LazyProperty
651 654 def in_memory_commit(self):
652 655 """
653 656 Returns ``GitInMemoryCommit`` object for this repository.
654 657 """
655 658 return GitInMemoryCommit(self)
656 659
657 660 def pull(self, url, commit_ids=None, update_after=False):
658 661 """
659 662 Pull changes from external location. Pull is different in GIT
660 663 that fetch since it's doing a checkout
661 664
662 665 :param commit_ids: Optional. Can be set to a list of commit ids
663 666 which shall be pulled from the other repository.
664 667 """
665 668 refs = None
666 669 if commit_ids is not None:
667 670 remote_refs = self._remote.get_remote_refs(url)
668 671 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 672 self._remote.pull(url, refs=refs, update_after=update_after)
670 673 self._remote.invalidate_vcs_cache()
671 674
672 675 def fetch(self, url, commit_ids=None, **kwargs):
673 676 """
674 677 Fetch all git objects from external location.
675 678 """
676 679 self._remote.sync_fetch(url, refs=commit_ids, **kwargs)
677 680 self._remote.invalidate_vcs_cache()
678 681
679 682 def push(self, url, **kwargs):
680 683 refs = None
681 684 self._remote.sync_push(url, refs=refs, **kwargs)
682 685
683 686 def set_refs(self, ref_name, commit_id):
684 687 self._remote.set_refs(ref_name, commit_id)
685 688 self._invalidate_prop_cache('_refs')
686 689
687 690 def remove_ref(self, ref_name):
688 691 self._remote.remove_ref(ref_name)
689 692 self._invalidate_prop_cache('_refs')
690 693
691 694 def run_gc(self, prune=True):
692 695 cmd = ['gc', '--aggressive']
693 696 if prune:
694 697 cmd += ['--prune=now']
695 698 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
696 699 return stderr
697 700
698 701 def _update_server_info(self, force=False):
699 702 """
700 703 runs gits update-server-info command in this repo instance
701 704 """
702 705 self._remote.update_server_info(force=force)
703 706
704 707 def _current_branch(self):
705 708 """
706 709 Return the name of the current branch.
707 710
708 711 It only works for non bare repositories (i.e. repositories with a
709 712 working copy)
710 713 """
711 714 if self.bare:
712 715 raise RepositoryError('Bare git repos do not have active branches')
713 716
714 717 if self.is_empty():
715 718 return None
716 719
717 720 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 721 return stdout.strip()
719 722
720 723 def _checkout(self, branch_name, create=False, force=False):
721 724 """
722 725 Checkout a branch in the working directory.
723 726
724 727 It tries to create the branch if create is True, failing if the branch
725 728 already exists.
726 729
727 730 It only works for non bare repositories (i.e. repositories with a
728 731 working copy)
729 732 """
730 733 if self.bare:
731 734 raise RepositoryError('Cannot checkout branches in a bare git repo')
732 735
733 736 cmd = ['checkout']
734 737 if force:
735 738 cmd.append('-f')
736 739 if create:
737 740 cmd.append('-b')
738 741 cmd.append(branch_name)
739 742 self.run_git_command(cmd, fail_on_stderr=False)
740 743
741 744 def _create_branch(self, branch_name, commit_id):
742 745 """
743 746 creates a branch in a GIT repo
744 747 """
745 748 self._remote.create_branch(branch_name, commit_id)
746 749
747 750 def _identify(self):
748 751 """
749 752 Return the current state of the working directory.
750 753 """
751 754 if self.bare:
752 755 raise RepositoryError('Bare git repos do not have active branches')
753 756
754 757 if self.is_empty():
755 758 return None
756 759
757 760 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 761 return stdout.strip()
759 762
760 763 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 764 """
762 765 Create a local clone of the current repo.
763 766 """
764 767 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 768 # clone will only fetch the active branch.
766 769 cmd = ['clone', '--branch', branch_name,
767 770 self.path, os.path.abspath(clone_path)]
768 771
769 772 self.run_git_command(cmd, fail_on_stderr=False)
770 773
771 774 # if we get the different source branch, make sure we also fetch it for
772 775 # merge conditions
773 776 if source_branch and source_branch != branch_name:
774 777 # check if the ref exists.
775 778 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 779 if shadow_repo.get_remote_ref(source_branch):
777 780 cmd = ['fetch', self.path, source_branch]
778 781 self.run_git_command(cmd, fail_on_stderr=False)
779 782
780 783 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 784 """
782 785 Fetch a branch from a local repository.
783 786 """
784 787 repository_path = os.path.abspath(repository_path)
785 788 if repository_path == self.path:
786 789 raise ValueError('Cannot fetch from the same repository')
787 790
788 791 if use_origin:
789 792 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 793 branch=branch_name)
791 794
792 795 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 796 repository_path, branch_name]
794 797 self.run_git_command(cmd, fail_on_stderr=False)
795 798
796 799 def _local_reset(self, branch_name):
797 800 branch_name = f'{branch_name}'
798 801 cmd = ['reset', '--hard', branch_name, '--']
799 802 self.run_git_command(cmd, fail_on_stderr=False)
800 803
801 804 def _last_fetch_heads(self):
802 805 """
803 806 Return the last fetched heads that need merging.
804 807
805 808 The algorithm is defined at
806 809 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 810 """
808 811 if not self.bare:
809 812 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 813 else:
811 814 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812 815
813 816 heads = []
814 817 with open(fetch_heads_path) as f:
815 818 for line in f:
816 819 if ' not-for-merge ' in line:
817 820 continue
818 821 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 822 heads.append(line)
820 823
821 824 return heads
822 825
823 826 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
824 827 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
825 828
826 829 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 830 """
828 831 Pull a branch from a local repository.
829 832 """
830 833 if self.bare:
831 834 raise RepositoryError('Cannot pull into a bare git repository')
832 835 # N.B.(skreft): The --ff-only option is to make sure this is a
833 836 # fast-forward (i.e., we are only pulling new changes and there are no
834 837 # conflicts with our current branch)
835 838 # Additionally, that option needs to go before --no-tags, otherwise git
836 839 # pull complains about it being an unknown flag.
837 840 cmd = ['pull']
838 841 if ff_only:
839 842 cmd.append('--ff-only')
840 843 cmd.extend(['--no-tags', repository_path, branch_name])
841 844 self.run_git_command(cmd, fail_on_stderr=False)
842 845
843 846 def _local_merge(self, merge_message, user_name, user_email, heads):
844 847 """
845 848 Merge the given head into the checked out branch.
846 849
847 850 It will force a merge commit.
848 851
849 852 Currently it raises an error if the repo is empty, as it is not possible
850 853 to create a merge commit in an empty repo.
851 854
852 855 :param merge_message: The message to use for the merge commit.
853 856 :param heads: the heads to merge.
854 857 """
855 858 if self.bare:
856 859 raise RepositoryError('Cannot merge into a bare git repository')
857 860
858 861 if not heads:
859 862 return
860 863
861 864 if self.is_empty():
862 865 # TODO(skreft): do something more robust in this case.
863 866 raise RepositoryError('Do not know how to merge into empty repositories yet')
864 867 unresolved = None
865 868
866 869 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 870 # commit message. We also specify the user who is doing the merge.
868 871 cmd = ['-c', f'user.name="{user_name}"',
869 872 '-c', f'user.email={user_email}',
870 873 'merge', '--no-ff', '-m', safe_str(merge_message)]
871 874
872 875 merge_cmd = cmd + heads
873 876
874 877 try:
875 878 self.run_git_command(merge_cmd, fail_on_stderr=False)
876 879 except RepositoryError:
877 880 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
878 881 fail_on_stderr=False)[0].splitlines()
879 882 # NOTE(marcink): we add U notation for consistent with HG backend output
880 883 unresolved = [f'U {f}' for f in files]
881 884
882 885 # Cleanup any merge leftovers
883 886 self._remote.invalidate_vcs_cache()
884 887 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
885 888
886 889 if unresolved:
887 890 raise UnresolvedFilesInRepo(unresolved)
888 891 else:
889 892 raise
890 893
891 894 def _local_push(
892 895 self, source_branch, repository_path, target_branch,
893 896 enable_hooks=False, rc_scm_data=None):
894 897 """
895 898 Push the source_branch to the given repository and target_branch.
896 899
897 900 Currently it if the target_branch is not master and the target repo is
898 901 empty, the push will work, but then GitRepository won't be able to find
899 902 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
900 903 pointing to master, which does not exist).
901 904
902 905 It does not run the hooks in the target repo.
903 906 """
904 907 # TODO(skreft): deal with the case in which the target repo is empty,
905 908 # and the target_branch is not master.
906 909 target_repo = GitRepository(repository_path)
907 910 if (not target_repo.bare and
908 911 target_repo._current_branch() == target_branch):
909 912 # Git prevents pushing to the checked out branch, so simulate it by
910 913 # pulling into the target repository.
911 914 target_repo._local_pull(self.path, source_branch)
912 915 else:
913 916 cmd = ['push', os.path.abspath(repository_path),
914 917 f'{source_branch}:{target_branch}']
915 918 gitenv = {}
916 919 if rc_scm_data:
917 920 gitenv.update({'RC_SCM_DATA': rc_scm_data})
918 921
919 922 if not enable_hooks:
920 923 gitenv['RC_SKIP_HOOKS'] = '1'
921 924 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
922 925
923 926 def _get_new_pr_branch(self, source_branch, target_branch):
924 927 prefix = f'pr_{source_branch}-{target_branch}_'
925 928 pr_branches = []
926 929 for branch in self.branches:
927 930 if branch.startswith(prefix):
928 931 pr_branches.append(int(branch[len(prefix):]))
929 932
930 933 if not pr_branches:
931 934 branch_id = 0
932 935 else:
933 936 branch_id = max(pr_branches) + 1
934 937
935 938 return '%s%d' % (prefix, branch_id)
936 939
937 940 def _maybe_prepare_merge_workspace(
938 941 self, repo_id, workspace_id, target_ref, source_ref):
939 942 shadow_repository_path = self._get_shadow_repository_path(
940 943 self.path, repo_id, workspace_id)
941 944 if not os.path.exists(shadow_repository_path):
942 945 self._local_clone(
943 946 shadow_repository_path, target_ref.name, source_ref.name)
944 947 log.debug('Prepared %s shadow repository in %s',
945 948 self.alias, shadow_repository_path)
946 949
947 950 return shadow_repository_path
948 951
949 952 def _merge_repo(self, repo_id, workspace_id, target_ref,
950 953 source_repo, source_ref, merge_message,
951 954 merger_name, merger_email, dry_run=False,
952 955 use_rebase=False, close_branch=False):
953 956
954 957 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
955 958 'rebase' if use_rebase else 'merge', dry_run)
956 959
957 960 if target_ref.commit_id != self.branches[target_ref.name]:
958 961 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
959 962 target_ref.commit_id, self.branches[target_ref.name])
960 963 return MergeResponse(
961 964 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
962 965 metadata={'target_ref': target_ref})
963 966
964 967 shadow_repository_path = self._maybe_prepare_merge_workspace(
965 968 repo_id, workspace_id, target_ref, source_ref)
966 969 shadow_repo = self.get_shadow_instance(shadow_repository_path)
967 970
968 971 # checkout source, if it's different. Otherwise we could not
969 972 # fetch proper commits for merge testing
970 973 if source_ref.name != target_ref.name:
971 974 if shadow_repo.get_remote_ref(source_ref.name):
972 975 shadow_repo._checkout(source_ref.name, force=True)
973 976
974 977 # checkout target, and fetch changes
975 978 shadow_repo._checkout(target_ref.name, force=True)
976 979
977 980 # fetch/reset pull the target, in case it is changed
978 981 # this handles even force changes
979 982 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
980 983 shadow_repo._local_reset(target_ref.name)
981 984
982 985 # Need to reload repo to invalidate the cache, or otherwise we cannot
983 986 # retrieve the last target commit.
984 987 shadow_repo = self.get_shadow_instance(shadow_repository_path)
985 988 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
986 989 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
987 990 target_ref, target_ref.commit_id,
988 991 shadow_repo.branches[target_ref.name])
989 992 return MergeResponse(
990 993 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
991 994 metadata={'target_ref': target_ref})
992 995
993 996 # calculate new branch
994 997 pr_branch = shadow_repo._get_new_pr_branch(
995 998 source_ref.name, target_ref.name)
996 999 log.debug('using pull-request merge branch: `%s`', pr_branch)
997 1000 # checkout to temp branch, and fetch changes
998 1001 shadow_repo._checkout(pr_branch, create=True)
999 1002 try:
1000 1003 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1001 1004 except RepositoryError:
1002 1005 log.exception('Failure when doing local fetch on '
1003 1006 'shadow repo: %s', shadow_repo)
1004 1007 return MergeResponse(
1005 1008 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1006 1009 metadata={'source_ref': source_ref})
1007 1010
1008 1011 merge_ref = None
1009 1012 merge_failure_reason = MergeFailureReason.NONE
1010 1013 metadata = {}
1011 1014 try:
1012 1015 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1013 1016 [source_ref.commit_id])
1014 1017 merge_possible = True
1015 1018
1016 1019 # Need to invalidate the cache, or otherwise we
1017 1020 # cannot retrieve the merge commit.
1018 1021 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1019 1022 merge_commit_id = shadow_repo.branches[pr_branch]
1020 1023
1021 1024 # Set a reference pointing to the merge commit. This reference may
1022 1025 # be used to easily identify the last successful merge commit in
1023 1026 # the shadow repository.
1024 1027 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1025 1028 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1026 1029 except RepositoryError as e:
1027 1030 log.exception('Failure when doing local merge on git shadow repo')
1028 1031 if isinstance(e, UnresolvedFilesInRepo):
1029 1032 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1030 1033
1031 1034 merge_possible = False
1032 1035 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1033 1036
1034 1037 if merge_possible and not dry_run:
1035 1038 try:
1036 1039 shadow_repo._local_push(
1037 1040 pr_branch, self.path, target_ref.name, enable_hooks=True,
1038 1041 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1039 1042 merge_succeeded = True
1040 1043 except RepositoryError:
1041 1044 log.exception(
1042 1045 'Failure when doing local push from the shadow '
1043 1046 'repository to the target repository at %s.', self.path)
1044 1047 merge_succeeded = False
1045 1048 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1046 1049 metadata['target'] = 'git shadow repo'
1047 1050 metadata['merge_commit'] = pr_branch
1048 1051 else:
1049 1052 merge_succeeded = False
1050 1053
1051 1054 return MergeResponse(
1052 1055 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1053 1056 metadata=metadata)
@@ -1,6037 +1,6043 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Database Models for RhodeCode Enterprise
21 21 """
22 22
23 23 import re
24 24 import os
25 25 import time
26 26 import string
27 27 import logging
28 28 import datetime
29 29 import uuid
30 30 import warnings
31 31 import ipaddress
32 32 import functools
33 33 import traceback
34 34 import collections
35 35
36 36 import pyotp
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, cast, TypeDecorator, event, select,
39 39 true, false, null, union_all,
40 40 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
41 41 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
42 42 Text, Float, PickleType, BigInteger)
43 43 from sqlalchemy.sql.expression import case
44 44 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
45 45 from sqlalchemy.orm import (
46 46 relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only)
47 47 from sqlalchemy.ext.declarative import declared_attr
48 48 from sqlalchemy.ext.hybrid import hybrid_property
49 49 from sqlalchemy.exc import IntegrityError # pragma: no cover
50 50 from sqlalchemy.dialects.mysql import LONGTEXT
51 51 from zope.cachedescriptors.property import Lazy as LazyProperty
52 52 from pyramid.threadlocal import get_current_request
53 53 from webhelpers2.text import remove_formatting
54 54
55 55 from rhodecode import ConfigGet
56 56 from rhodecode.lib.str_utils import safe_bytes
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 59 from rhodecode.lib.vcs.backends.base import (
60 60 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
61 61 from rhodecode.lib.utils2 import (
62 62 str2bool, safe_str, get_commit_safe, sha1_safe,
63 63 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
64 64 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
65 65 from rhodecode.lib.jsonalchemy import (
66 66 MutationObj, MutationList, JsonType, JsonRaw)
67 67 from rhodecode.lib.hash_utils import sha1
68 68 from rhodecode.lib import ext_json
69 69 from rhodecode.lib import enc_utils
70 70 from rhodecode.lib.ext_json import json, str_json
71 71 from rhodecode.lib.caching_query import FromCache
72 72 from rhodecode.lib.exceptions import (
73 73 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
74 74 from rhodecode.model.meta import Base, Session
75 75
76 76 URL_SEP = '/'
77 77 log = logging.getLogger(__name__)
78 78
79 79 # =============================================================================
80 80 # BASE CLASSES
81 81 # =============================================================================
82 82
83 83 # this is propagated from .ini file rhodecode.encrypted_values.secret or
84 84 # beaker.session.secret if first is not set.
85 85 # and initialized at environment.py
86 86 ENCRYPTION_KEY: bytes = b''
87 87
88 88 # used to sort permissions by types, '#' used here is not allowed to be in
89 89 # usernames, and it's very early in sorted string.printable table.
90 90 PERMISSION_TYPE_SORT = {
91 91 'admin': '####',
92 92 'write': '###',
93 93 'read': '##',
94 94 'none': '#',
95 95 }
96 96
97 97
98 98 def display_user_sort(obj):
99 99 """
100 100 Sort function used to sort permissions in .permissions() function of
101 101 Repository, RepoGroup, UserGroup. Also it put the default user in front
102 102 of all other resources
103 103 """
104 104
105 105 if obj.username == User.DEFAULT_USER:
106 106 return '#####'
107 107 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
108 108 extra_sort_num = '1' # default
109 109
110 110 # NOTE(dan): inactive duplicates goes last
111 111 if getattr(obj, 'duplicate_perm', None):
112 112 extra_sort_num = '9'
113 113 return prefix + extra_sort_num + obj.username
114 114
115 115
116 116 def display_user_group_sort(obj):
117 117 """
118 118 Sort function used to sort permissions in .permissions() function of
119 119 Repository, RepoGroup, UserGroup. Also it put the default user in front
120 120 of all other resources
121 121 """
122 122
123 123 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
124 124 return prefix + obj.users_group_name
125 125
126 126
127 127 def _hash_key(k):
128 128 return sha1_safe(k)
129 129
130 130
131 131 def in_filter_generator(qry, items, limit=500):
132 132 """
133 133 Splits IN() into multiple with OR
134 134 e.g.::
135 135 cnt = Repository.query().filter(
136 136 or_(
137 137 *in_filter_generator(Repository.repo_id, range(100000))
138 138 )).count()
139 139 """
140 140 if not items:
141 141 # empty list will cause empty query which might cause security issues
142 142 # this can lead to hidden unpleasant results
143 143 items = [-1]
144 144
145 145 parts = []
146 146 for chunk in range(0, len(items), limit):
147 147 parts.append(
148 148 qry.in_(items[chunk: chunk + limit])
149 149 )
150 150
151 151 return parts
152 152
153 153
154 154 base_table_args = {
155 155 'extend_existing': True,
156 156 'mysql_engine': 'InnoDB',
157 157 'mysql_charset': 'utf8',
158 158 'sqlite_autoincrement': True
159 159 }
160 160
161 161
162 162 class EncryptedTextValue(TypeDecorator):
163 163 """
164 164 Special column for encrypted long text data, use like::
165 165
166 166 value = Column("encrypted_value", EncryptedValue(), nullable=False)
167 167
168 168 This column is intelligent so if value is in unencrypted form it return
169 169 unencrypted form, but on save it always encrypts
170 170 """
171 171 cache_ok = True
172 172 impl = Text
173 173
174 174 def process_bind_param(self, value, dialect):
175 175 """
176 176 Setter for storing value
177 177 """
178 178 import rhodecode
179 179 if not value:
180 180 return value
181 181
182 182 # protect against double encrypting if values is already encrypted
183 183 if value.startswith('enc$aes$') \
184 184 or value.startswith('enc$aes_hmac$') \
185 185 or value.startswith('enc2$'):
186 186 raise ValueError('value needs to be in unencrypted format, '
187 187 'ie. not starting with enc$ or enc2$')
188 188
189 189 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
190 190 bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
191 191 return safe_str(bytes_val)
192 192
193 193 def process_result_value(self, value, dialect):
194 194 """
195 195 Getter for retrieving value
196 196 """
197 197
198 198 import rhodecode
199 199 if not value:
200 200 return value
201 201
202 202 bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY)
203 203
204 204 return safe_str(bytes_val)
205 205
206 206
207 207 class BaseModel(object):
208 208 """
209 209 Base Model for all classes
210 210 """
211 211
212 212 @classmethod
213 213 def _get_keys(cls):
214 214 """return column names for this model """
215 215 return class_mapper(cls).c.keys()
216 216
217 217 def get_dict(self):
218 218 """
219 219 return dict with keys and values corresponding
220 220 to this model data """
221 221
222 222 d = {}
223 223 for k in self._get_keys():
224 224 d[k] = getattr(self, k)
225 225
226 226 # also use __json__() if present to get additional fields
227 227 _json_attr = getattr(self, '__json__', None)
228 228 if _json_attr:
229 229 # update with attributes from __json__
230 230 if callable(_json_attr):
231 231 _json_attr = _json_attr()
232 232 for k, val in _json_attr.items():
233 233 d[k] = val
234 234 return d
235 235
236 236 def get_appstruct(self):
237 237 """return list with keys and values tuples corresponding
238 238 to this model data """
239 239
240 240 lst = []
241 241 for k in self._get_keys():
242 242 lst.append((k, getattr(self, k),))
243 243 return lst
244 244
245 245 def populate_obj(self, populate_dict):
246 246 """populate model with data from given populate_dict"""
247 247
248 248 for k in self._get_keys():
249 249 if k in populate_dict:
250 250 setattr(self, k, populate_dict[k])
251 251
252 252 @classmethod
253 253 def query(cls):
254 254 return Session().query(cls)
255 255
256 256 @classmethod
257 257 def select(cls, custom_cls=None):
258 258 """
259 259 stmt = cls.select().where(cls.user_id==1)
260 260 # optionally
261 261 stmt = cls.select(User.user_id).where(cls.user_id==1)
262 262 result = cls.execute(stmt) | cls.scalars(stmt)
263 263 """
264 264
265 265 if custom_cls:
266 266 stmt = select(custom_cls)
267 267 else:
268 268 stmt = select(cls)
269 269 return stmt
270 270
271 271 @classmethod
272 272 def execute(cls, stmt):
273 273 return Session().execute(stmt)
274 274
275 275 @classmethod
276 276 def scalars(cls, stmt):
277 277 return Session().scalars(stmt)
278 278
279 279 @classmethod
280 280 def get(cls, id_):
281 281 if id_:
282 282 return cls.query().get(id_)
283 283
284 284 @classmethod
285 285 def get_or_404(cls, id_):
286 286 from pyramid.httpexceptions import HTTPNotFound
287 287
288 288 try:
289 289 id_ = int(id_)
290 290 except (TypeError, ValueError):
291 291 raise HTTPNotFound()
292 292
293 293 res = cls.query().get(id_)
294 294 if not res:
295 295 raise HTTPNotFound()
296 296 return res
297 297
298 298 @classmethod
299 299 def getAll(cls):
300 300 # deprecated and left for backward compatibility
301 301 return cls.get_all()
302 302
303 303 @classmethod
304 304 def get_all(cls):
305 305 return cls.query().all()
306 306
307 307 @classmethod
308 308 def delete(cls, id_):
309 309 obj = cls.query().get(id_)
310 310 Session().delete(obj)
311 311
312 312 @classmethod
313 313 def identity_cache(cls, session, attr_name, value):
314 314 exist_in_session = []
315 315 for (item_cls, pkey), instance in session.identity_map.items():
316 316 if cls == item_cls and getattr(instance, attr_name) == value:
317 317 exist_in_session.append(instance)
318 318 if exist_in_session:
319 319 if len(exist_in_session) == 1:
320 320 return exist_in_session[0]
321 321 log.exception(
322 322 'multiple objects with attr %s and '
323 323 'value %s found with same name: %r',
324 324 attr_name, value, exist_in_session)
325 325
326 326 @property
327 327 def cls_name(self):
328 328 return self.__class__.__name__
329 329
330 330 def __repr__(self):
331 331 return f'<DB:{self.cls_name}>'
332 332
333 333
334 334 class RhodeCodeSetting(Base, BaseModel):
335 335 __tablename__ = 'rhodecode_settings'
336 336 __table_args__ = (
337 337 UniqueConstraint('app_settings_name'),
338 338 base_table_args
339 339 )
340 340
341 341 SETTINGS_TYPES = {
342 342 'str': safe_str,
343 343 'int': safe_int,
344 344 'unicode': safe_str,
345 345 'bool': str2bool,
346 346 'list': functools.partial(aslist, sep=',')
347 347 }
348 348 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
349 349 GLOBAL_CONF_KEY = 'app_settings'
350 350
351 351 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
352 352 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
353 353 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
354 354 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
355 355
356 356 def __init__(self, key='', val='', type='unicode'):
357 357 self.app_settings_name = key
358 358 self.app_settings_type = type
359 359 self.app_settings_value = val
360 360
361 361 @validates('_app_settings_value')
362 362 def validate_settings_value(self, key, val):
363 363 assert type(val) == str
364 364 return val
365 365
366 366 @hybrid_property
367 367 def app_settings_value(self):
368 368 v = self._app_settings_value
369 369 _type = self.app_settings_type
370 370 if _type:
371 371 _type = self.app_settings_type.split('.')[0]
372 372 # decode the encrypted value
373 373 if 'encrypted' in self.app_settings_type:
374 374 cipher = EncryptedTextValue()
375 375 v = safe_str(cipher.process_result_value(v, None))
376 376
377 377 converter = self.SETTINGS_TYPES.get(_type) or \
378 378 self.SETTINGS_TYPES['unicode']
379 379 return converter(v)
380 380
381 381 @app_settings_value.setter
382 382 def app_settings_value(self, val):
383 383 """
384 384 Setter that will always make sure we use unicode in app_settings_value
385 385
386 386 :param val:
387 387 """
388 388 val = safe_str(val)
389 389 # encode the encrypted value
390 390 if 'encrypted' in self.app_settings_type:
391 391 cipher = EncryptedTextValue()
392 392 val = safe_str(cipher.process_bind_param(val, None))
393 393 self._app_settings_value = val
394 394
395 395 @hybrid_property
396 396 def app_settings_type(self):
397 397 return self._app_settings_type
398 398
399 399 @app_settings_type.setter
400 400 def app_settings_type(self, val):
401 401 if val.split('.')[0] not in self.SETTINGS_TYPES:
402 402 raise Exception('type must be one of %s got %s'
403 403 % (self.SETTINGS_TYPES.keys(), val))
404 404 self._app_settings_type = val
405 405
406 406 @classmethod
407 407 def get_by_prefix(cls, prefix):
408 408 return RhodeCodeSetting.query()\
409 409 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
410 410 .all()
411 411
412 412 def __repr__(self):
413 413 return "<%s('%s:%s[%s]')>" % (
414 414 self.cls_name,
415 415 self.app_settings_name, self.app_settings_value,
416 416 self.app_settings_type
417 417 )
418 418
419 419
420 420 class RhodeCodeUi(Base, BaseModel):
421 421 __tablename__ = 'rhodecode_ui'
422 422 __table_args__ = (
423 423 UniqueConstraint('ui_key'),
424 424 base_table_args
425 425 )
426 426 # Sync those values with vcsserver.config.hooks
427 427
428 428 HOOK_REPO_SIZE = 'changegroup.repo_size'
429 429 # HG
430 430 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
431 431 HOOK_PULL = 'outgoing.pull_logger'
432 432 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
433 433 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
434 434 HOOK_PUSH = 'changegroup.push_logger'
435 435 HOOK_PUSH_KEY = 'pushkey.key_push'
436 436
437 437 HOOKS_BUILTIN = [
438 438 HOOK_PRE_PULL,
439 439 HOOK_PULL,
440 440 HOOK_PRE_PUSH,
441 441 HOOK_PRETX_PUSH,
442 442 HOOK_PUSH,
443 443 HOOK_PUSH_KEY,
444 444 ]
445 445
446 446 # TODO: johbo: Unify way how hooks are configured for git and hg,
447 447 # git part is currently hardcoded.
448 448
449 449 # SVN PATTERNS
450 450 SVN_BRANCH_ID = 'vcs_svn_branch'
451 451 SVN_TAG_ID = 'vcs_svn_tag'
452 452
453 453 ui_id = Column(
454 454 "ui_id", Integer(), nullable=False, unique=True, default=None,
455 455 primary_key=True)
456 456 ui_section = Column(
457 457 "ui_section", String(255), nullable=True, unique=None, default=None)
458 458 ui_key = Column(
459 459 "ui_key", String(255), nullable=True, unique=None, default=None)
460 460 ui_value = Column(
461 461 "ui_value", String(255), nullable=True, unique=None, default=None)
462 462 ui_active = Column(
463 463 "ui_active", Boolean(), nullable=True, unique=None, default=True)
464 464
465 465 def __repr__(self):
466 466 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section,
467 467 self.ui_key, self.ui_value)
468 468
469 469
470 470 class RepoRhodeCodeSetting(Base, BaseModel):
471 471 __tablename__ = 'repo_rhodecode_settings'
472 472 __table_args__ = (
473 473 UniqueConstraint(
474 474 'app_settings_name', 'repository_id',
475 475 name='uq_repo_rhodecode_setting_name_repo_id'),
476 476 base_table_args
477 477 )
478 478
479 479 repository_id = Column(
480 480 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
481 481 nullable=False)
482 482 app_settings_id = Column(
483 483 "app_settings_id", Integer(), nullable=False, unique=True,
484 484 default=None, primary_key=True)
485 485 app_settings_name = Column(
486 486 "app_settings_name", String(255), nullable=True, unique=None,
487 487 default=None)
488 488 _app_settings_value = Column(
489 489 "app_settings_value", String(4096), nullable=True, unique=None,
490 490 default=None)
491 491 _app_settings_type = Column(
492 492 "app_settings_type", String(255), nullable=True, unique=None,
493 493 default=None)
494 494
495 495 repository = relationship('Repository', viewonly=True)
496 496
497 497 def __init__(self, repository_id, key='', val='', type='unicode'):
498 498 self.repository_id = repository_id
499 499 self.app_settings_name = key
500 500 self.app_settings_type = type
501 501 self.app_settings_value = val
502 502
503 503 @validates('_app_settings_value')
504 504 def validate_settings_value(self, key, val):
505 505 assert type(val) == str
506 506 return val
507 507
508 508 @hybrid_property
509 509 def app_settings_value(self):
510 510 v = self._app_settings_value
511 511 type_ = self.app_settings_type
512 512 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
513 513 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
514 514 return converter(v)
515 515
516 516 @app_settings_value.setter
517 517 def app_settings_value(self, val):
518 518 """
519 519 Setter that will always make sure we use unicode in app_settings_value
520 520
521 521 :param val:
522 522 """
523 523 self._app_settings_value = safe_str(val)
524 524
525 525 @hybrid_property
526 526 def app_settings_type(self):
527 527 return self._app_settings_type
528 528
529 529 @app_settings_type.setter
530 530 def app_settings_type(self, val):
531 531 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
532 532 if val not in SETTINGS_TYPES:
533 533 raise Exception('type must be one of %s got %s'
534 534 % (SETTINGS_TYPES.keys(), val))
535 535 self._app_settings_type = val
536 536
537 537 def __repr__(self):
538 538 return "<%s('%s:%s:%s[%s]')>" % (
539 539 self.cls_name, self.repository.repo_name,
540 540 self.app_settings_name, self.app_settings_value,
541 541 self.app_settings_type
542 542 )
543 543
544 544
545 545 class RepoRhodeCodeUi(Base, BaseModel):
546 546 __tablename__ = 'repo_rhodecode_ui'
547 547 __table_args__ = (
548 548 UniqueConstraint(
549 549 'repository_id', 'ui_section', 'ui_key',
550 550 name='uq_repo_rhodecode_ui_repository_id_section_key'),
551 551 base_table_args
552 552 )
553 553
554 554 repository_id = Column(
555 555 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
556 556 nullable=False)
557 557 ui_id = Column(
558 558 "ui_id", Integer(), nullable=False, unique=True, default=None,
559 559 primary_key=True)
560 560 ui_section = Column(
561 561 "ui_section", String(255), nullable=True, unique=None, default=None)
562 562 ui_key = Column(
563 563 "ui_key", String(255), nullable=True, unique=None, default=None)
564 564 ui_value = Column(
565 565 "ui_value", String(255), nullable=True, unique=None, default=None)
566 566 ui_active = Column(
567 567 "ui_active", Boolean(), nullable=True, unique=None, default=True)
568 568
569 569 repository = relationship('Repository', viewonly=True)
570 570
571 571 def __repr__(self):
572 572 return '<%s[%s:%s]%s=>%s]>' % (
573 573 self.cls_name, self.repository.repo_name,
574 574 self.ui_section, self.ui_key, self.ui_value)
575 575
576 576
577 577 class User(Base, BaseModel):
578 578 __tablename__ = 'users'
579 579 __table_args__ = (
580 580 UniqueConstraint('username'), UniqueConstraint('email'),
581 581 Index('u_username_idx', 'username'),
582 582 Index('u_email_idx', 'email'),
583 583 base_table_args
584 584 )
585 585
586 586 DEFAULT_USER = 'default'
587 587 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
588 588 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
589 589 RECOVERY_CODES_COUNT = 10
590 590
591 591 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
592 592 username = Column("username", String(255), nullable=True, unique=None, default=None)
593 593 password = Column("password", String(255), nullable=True, unique=None, default=None)
594 594 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
595 595 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
596 596 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
597 597 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
598 598 _email = Column("email", String(255), nullable=True, unique=None, default=None)
599 599 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
600 600 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
601 601 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
602 602
603 603 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
604 604 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
605 605 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
606 606 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
607 607 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
608 608 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
609 609
610 610 user_log = relationship('UserLog', back_populates='user')
611 611 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
612 612
613 613 repositories = relationship('Repository', back_populates='user')
614 614 repository_groups = relationship('RepoGroup', back_populates='user')
615 615 user_groups = relationship('UserGroup', back_populates='user')
616 616
617 617 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user')
618 618 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user')
619 619
620 620 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
621 621 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
622 622 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
623 623
624 624 group_member = relationship('UserGroupMember', cascade='all', back_populates='user')
625 625
626 626 notifications = relationship('UserNotification', cascade='all', back_populates='user')
627 627 # notifications assigned to this user
628 628 user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user')
629 629 # comments created by this user
630 630 user_comments = relationship('ChangesetComment', cascade='all', back_populates='author')
631 631 # user profile extra info
632 632 user_emails = relationship('UserEmailMap', cascade='all', back_populates='user')
633 633 user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user')
634 634 user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user')
635 635 user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user')
636 636
637 637 # gists
638 638 user_gists = relationship('Gist', cascade='all', back_populates='owner')
639 639 # user pull requests
640 640 user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author')
641 641
642 642 # external identities
643 643 external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all')
644 644 # review rules
645 645 user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user')
646 646
647 647 # artifacts owned
648 648 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user')
649 649
650 650 # no cascade, set NULL
651 651 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user')
652 652
653 653 def __repr__(self):
654 654 return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>"
655 655
656 656 @hybrid_property
657 657 def email(self):
658 658 return self._email
659 659
660 660 @email.setter
661 661 def email(self, val):
662 662 self._email = val.lower() if val else None
663 663
664 664 @hybrid_property
665 665 def first_name(self):
666 666 from rhodecode.lib import helpers as h
667 667 if self.name:
668 668 return h.escape(self.name)
669 669 return self.name
670 670
671 671 @hybrid_property
672 672 def last_name(self):
673 673 from rhodecode.lib import helpers as h
674 674 if self.lastname:
675 675 return h.escape(self.lastname)
676 676 return self.lastname
677 677
678 678 @hybrid_property
679 679 def api_key(self):
680 680 """
681 681 Fetch if exist an auth-token with role ALL connected to this user
682 682 """
683 683 user_auth_token = UserApiKeys.query()\
684 684 .filter(UserApiKeys.user_id == self.user_id)\
685 685 .filter(or_(UserApiKeys.expires == -1,
686 686 UserApiKeys.expires >= time.time()))\
687 687 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
688 688 if user_auth_token:
689 689 user_auth_token = user_auth_token.api_key
690 690
691 691 return user_auth_token
692 692
693 693 @api_key.setter
694 694 def api_key(self, val):
695 695 # don't allow to set API key this is deprecated for now
696 696 self._api_key = None
697 697
698 698 @property
699 699 def reviewer_pull_requests(self):
700 700 return PullRequestReviewers.query() \
701 701 .options(joinedload(PullRequestReviewers.pull_request)) \
702 702 .filter(PullRequestReviewers.user_id == self.user_id) \
703 703 .all()
704 704
705 705 @property
706 706 def firstname(self):
707 707 # alias for future
708 708 return self.name
709 709
710 710 @property
711 711 def emails(self):
712 712 other = UserEmailMap.query()\
713 713 .filter(UserEmailMap.user == self) \
714 714 .order_by(UserEmailMap.email_id.asc()) \
715 715 .all()
716 716 return [self.email] + [x.email for x in other]
717 717
718 718 def emails_cached(self):
719 719 emails = []
720 720 if self.user_id != self.get_default_user_id():
721 721 emails = UserEmailMap.query()\
722 722 .filter(UserEmailMap.user == self) \
723 723 .order_by(UserEmailMap.email_id.asc())
724 724
725 725 emails = emails.options(
726 726 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
727 727 )
728 728
729 729 return [self.email] + [x.email for x in emails]
730 730
731 731 @property
732 732 def auth_tokens(self):
733 733 auth_tokens = self.get_auth_tokens()
734 734 return [x.api_key for x in auth_tokens]
735 735
736 736 def get_auth_tokens(self):
737 737 return UserApiKeys.query()\
738 738 .filter(UserApiKeys.user == self)\
739 739 .order_by(UserApiKeys.user_api_key_id.asc())\
740 740 .all()
741 741
742 742 @LazyProperty
743 743 def feed_token(self):
744 744 return self.get_feed_token()
745 745
746 746 def get_feed_token(self, cache=True):
747 747 feed_tokens = UserApiKeys.query()\
748 748 .filter(UserApiKeys.user == self)\
749 749 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
750 750 if cache:
751 751 feed_tokens = feed_tokens.options(
752 752 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
753 753
754 754 feed_tokens = feed_tokens.all()
755 755 if feed_tokens:
756 756 return feed_tokens[0].api_key
757 757 return 'NO_FEED_TOKEN_AVAILABLE'
758 758
759 759 @LazyProperty
760 760 def artifact_token(self):
761 761 return self.get_artifact_token()
762 762
763 763 def get_artifact_token(self, cache=True):
764 764 artifacts_tokens = UserApiKeys.query()\
765 765 .filter(UserApiKeys.user == self) \
766 766 .filter(or_(UserApiKeys.expires == -1,
767 767 UserApiKeys.expires >= time.time())) \
768 768 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
769 769
770 770 if cache:
771 771 artifacts_tokens = artifacts_tokens.options(
772 772 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
773 773
774 774 artifacts_tokens = artifacts_tokens.all()
775 775 if artifacts_tokens:
776 776 return artifacts_tokens[0].api_key
777 777 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
778 778
779 779 def get_or_create_artifact_token(self):
780 780 artifacts_tokens = UserApiKeys.query()\
781 781 .filter(UserApiKeys.user == self) \
782 782 .filter(or_(UserApiKeys.expires == -1,
783 783 UserApiKeys.expires >= time.time())) \
784 784 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
785 785
786 786 artifacts_tokens = artifacts_tokens.all()
787 787 if artifacts_tokens:
788 788 return artifacts_tokens[0].api_key
789 789 else:
790 790 from rhodecode.model.auth_token import AuthTokenModel
791 791 artifact_token = AuthTokenModel().create(
792 792 self, 'auto-generated-artifact-token',
793 793 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
794 794 Session.commit()
795 795 return artifact_token.api_key
796 796
797 797 def is_totp_valid(self, received_code, secret):
798 798 totp = pyotp.TOTP(secret)
799 799 return totp.verify(received_code)
800 800
801 801 def is_2fa_recovery_code_valid(self, received_code, secret):
802 802 encrypted_recovery_codes = self.user_data.get('recovery_codes_2fa', [])
803 803 recovery_codes = self.get_2fa_recovery_codes()
804 804 if received_code in recovery_codes:
805 805 encrypted_recovery_codes.pop(recovery_codes.index(received_code))
806 806 self.update_userdata(recovery_codes_2fa=encrypted_recovery_codes)
807 807 return True
808 808 return False
809 809
810 810 @hybrid_property
811 811 def has_forced_2fa(self):
812 812 """
813 813 Checks if 2fa was forced for current user
814 814 """
815 815 from rhodecode.model.settings import SettingsModel
816 816 if value := SettingsModel().get_setting_by_name(f'auth_{self.extern_type}_global_2fa'):
817 817 return value.app_settings_value
818 818 return False
819 819
820 820 @hybrid_property
821 821 def has_enabled_2fa(self):
822 822 """
823 823 Checks if user enabled 2fa
824 824 """
825 825 if value := self.has_forced_2fa:
826 826 return value
827 827 return self.user_data.get('enabled_2fa', False)
828 828
829 829 @has_enabled_2fa.setter
830 830 def has_enabled_2fa(self, val):
831 831 val = str2bool(val)
832 832 self.update_userdata(enabled_2fa=val)
833 833 if not val:
834 834 # NOTE: setting to false we clear the user_data to not store any 2fa artifacts
835 835 self.update_userdata(secret_2fa=None, recovery_codes_2fa=[], check_2fa=False)
836 836 Session().commit()
837 837
838 838 @hybrid_property
839 839 def check_2fa_required(self):
840 840 """
841 841 Check if check 2fa flag is set for this user
842 842 """
843 843 value = self.user_data.get('check_2fa', False)
844 844 return value
845 845
846 846 @check_2fa_required.setter
847 847 def check_2fa_required(self, val):
848 848 val = str2bool(val)
849 849 self.update_userdata(check_2fa=val)
850 850 Session().commit()
851 851
852 852 @hybrid_property
853 853 def has_seen_2fa_codes(self):
854 854 """
855 855 get the flag about if user has seen 2fa recovery codes
856 856 """
857 857 value = self.user_data.get('recovery_codes_2fa_seen', False)
858 858 return value
859 859
860 860 @has_seen_2fa_codes.setter
861 861 def has_seen_2fa_codes(self, val):
862 862 val = str2bool(val)
863 863 self.update_userdata(recovery_codes_2fa_seen=val)
864 864 Session().commit()
865 865
866 866 @hybrid_property
867 867 def needs_2fa_configure(self):
868 868 """
869 869 Determines if setup2fa has completed for this user. Means he has all needed data for 2fa to work.
870 870
871 871 Currently this is 2fa enabled and secret exists
872 872 """
873 873 if self.has_enabled_2fa:
874 874 return not self.user_data.get('secret_2fa')
875 875 return False
876 876
877 877 def init_2fa_recovery_codes(self, persist=True, force=False):
878 878 """
879 879 Creates 2fa recovery codes
880 880 """
881 881 recovery_codes = self.user_data.get('recovery_codes_2fa', [])
882 882 encrypted_codes = []
883 883 if not recovery_codes or force:
884 884 for _ in range(self.RECOVERY_CODES_COUNT):
885 885 recovery_code = pyotp.random_base32()
886 886 recovery_codes.append(recovery_code)
887 887 encrypted_code = enc_utils.encrypt_value(safe_bytes(recovery_code), enc_key=ENCRYPTION_KEY)
888 888 encrypted_codes.append(safe_str(encrypted_code))
889 889 if persist:
890 890 self.update_userdata(recovery_codes_2fa=encrypted_codes, recovery_codes_2fa_seen=False)
891 891 return recovery_codes
892 892 # User should not check the same recovery codes more than once
893 893 return []
894 894
895 895 def get_2fa_recovery_codes(self):
896 896 encrypted_recovery_codes = self.user_data.get('recovery_codes_2fa', [])
897 897
898 898 recovery_codes = list(map(
899 899 lambda val: safe_str(
900 900 enc_utils.decrypt_value(
901 901 val,
902 902 enc_key=ENCRYPTION_KEY
903 903 )),
904 904 encrypted_recovery_codes))
905 905 return recovery_codes
906 906
907 907 def init_secret_2fa(self, persist=True, force=False):
908 908 secret_2fa = self.user_data.get('secret_2fa')
909 909 if not secret_2fa or force:
910 910 secret = pyotp.random_base32()
911 911 if persist:
912 912 self.update_userdata(secret_2fa=safe_str(enc_utils.encrypt_value(safe_bytes(secret), enc_key=ENCRYPTION_KEY)))
913 913 return secret
914 914 return ''
915 915
916 916 @hybrid_property
917 917 def secret_2fa(self) -> str:
918 918 """
919 919 get stored secret for 2fa
920 920 """
921 921 secret_2fa = self.user_data.get('secret_2fa')
922 922 if secret_2fa:
923 923 return safe_str(
924 924 enc_utils.decrypt_value(secret_2fa, enc_key=ENCRYPTION_KEY))
925 925 return ''
926 926
927 927 @secret_2fa.setter
928 928 def secret_2fa(self, value: str) -> None:
929 929 encrypted_value = enc_utils.encrypt_value(safe_bytes(value), enc_key=ENCRYPTION_KEY)
930 930 self.update_userdata(secret_2fa=safe_str(encrypted_value))
931 931
932 932 def regenerate_2fa_recovery_codes(self):
933 933 """
934 934 Regenerates 2fa recovery codes upon request
935 935 """
936 936 new_recovery_codes = self.init_2fa_recovery_codes(force=True)
937 937 Session().commit()
938 938 return new_recovery_codes
939 939
940 940 @classmethod
941 941 def extra_valid_auth_tokens(cls, user, role=None):
942 942 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
943 943 .filter(or_(UserApiKeys.expires == -1,
944 944 UserApiKeys.expires >= time.time()))
945 945 if role:
946 946 tokens = tokens.filter(or_(UserApiKeys.role == role,
947 947 UserApiKeys.role == UserApiKeys.ROLE_ALL))
948 948 return tokens.all()
949 949
950 950 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
951 951 from rhodecode.lib import auth
952 952
953 953 log.debug('Trying to authenticate user: %s via auth-token, '
954 954 'and roles: %s', self, roles)
955 955
956 956 if not auth_token:
957 957 return False
958 958
959 959 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
960 960 tokens_q = UserApiKeys.query()\
961 961 .filter(UserApiKeys.user_id == self.user_id)\
962 962 .filter(or_(UserApiKeys.expires == -1,
963 963 UserApiKeys.expires >= time.time()))
964 964
965 965 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
966 966
967 967 crypto_backend = auth.crypto_backend()
968 968 enc_token_map = {}
969 969 plain_token_map = {}
970 970 for token in tokens_q:
971 971 if token.api_key.startswith(crypto_backend.ENC_PREF):
972 972 enc_token_map[token.api_key] = token
973 973 else:
974 974 plain_token_map[token.api_key] = token
975 975 log.debug(
976 976 'Found %s plain and %s encrypted tokens to check for authentication for this user',
977 977 len(plain_token_map), len(enc_token_map))
978 978
979 979 # plain token match comes first
980 980 match = plain_token_map.get(auth_token)
981 981
982 982 # check encrypted tokens now
983 983 if not match:
984 984 for token_hash, token in enc_token_map.items():
985 985 # NOTE(marcink): this is expensive to calculate, but most secure
986 986 if crypto_backend.hash_check(auth_token, token_hash):
987 987 match = token
988 988 break
989 989
990 990 if match:
991 991 log.debug('Found matching token %s', match)
992 992 if match.repo_id:
993 993 log.debug('Found scope, checking for scope match of token %s', match)
994 994 if match.repo_id == scope_repo_id:
995 995 return True
996 996 else:
997 997 log.debug(
998 998 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
999 999 'and calling scope is:%s, skipping further checks',
1000 1000 match.repo, scope_repo_id)
1001 1001 return False
1002 1002 else:
1003 1003 return True
1004 1004
1005 1005 return False
1006 1006
1007 1007 @property
1008 1008 def ip_addresses(self):
1009 1009 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
1010 1010 return [x.ip_addr for x in ret]
1011 1011
1012 1012 @property
1013 1013 def username_and_name(self):
1014 1014 return f'{self.username} ({self.first_name} {self.last_name})'
1015 1015
1016 1016 @property
1017 1017 def username_or_name_or_email(self):
1018 1018 full_name = self.full_name if self.full_name != ' ' else None
1019 1019 return self.username or full_name or self.email
1020 1020
1021 1021 @property
1022 1022 def full_name(self):
1023 1023 return f'{self.first_name} {self.last_name}'
1024 1024
1025 1025 @property
1026 1026 def full_name_or_username(self):
1027 1027 return (f'{self.first_name} {self.last_name}'
1028 1028 if (self.first_name and self.last_name) else self.username)
1029 1029
1030 1030 @property
1031 1031 def full_contact(self):
1032 1032 return f'{self.first_name} {self.last_name} <{self.email}>'
1033 1033
1034 1034 @property
1035 1035 def short_contact(self):
1036 1036 return f'{self.first_name} {self.last_name}'
1037 1037
1038 1038 @property
1039 1039 def is_admin(self):
1040 1040 return self.admin
1041 1041
1042 1042 @property
1043 1043 def language(self):
1044 1044 return self.user_data.get('language')
1045 1045
1046 1046 def AuthUser(self, **kwargs):
1047 1047 """
1048 1048 Returns instance of AuthUser for this user
1049 1049 """
1050 1050 from rhodecode.lib.auth import AuthUser
1051 1051 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
1052 1052
1053 1053 @hybrid_property
1054 1054 def user_data(self):
1055 1055 if not self._user_data:
1056 1056 return {}
1057 1057
1058 1058 try:
1059 1059 return json.loads(self._user_data) or {}
1060 1060 except TypeError:
1061 1061 return {}
1062 1062
1063 1063 @user_data.setter
1064 1064 def user_data(self, val):
1065 1065 if not isinstance(val, dict):
1066 1066 raise Exception(f'user_data must be dict, got {type(val)}')
1067 1067 try:
1068 1068 self._user_data = safe_bytes(json.dumps(val))
1069 1069 except Exception:
1070 1070 log.error(traceback.format_exc())
1071 1071
1072 1072 @classmethod
1073 1073 def get(cls, user_id, cache=False):
1074 1074 if not user_id:
1075 1075 return
1076 1076
1077 1077 user = cls.query()
1078 1078 if cache:
1079 1079 user = user.options(
1080 1080 FromCache("sql_cache_short", f"get_users_{user_id}"))
1081 1081 return user.get(user_id)
1082 1082
1083 1083 @classmethod
1084 1084 def get_by_username(cls, username, case_insensitive=False,
1085 1085 cache=False):
1086 1086
1087 1087 if case_insensitive:
1088 1088 q = cls.select().where(
1089 1089 func.lower(cls.username) == func.lower(username))
1090 1090 else:
1091 1091 q = cls.select().where(cls.username == username)
1092 1092
1093 1093 if cache:
1094 1094 hash_key = _hash_key(username)
1095 1095 q = q.options(
1096 1096 FromCache("sql_cache_short", f"get_user_by_name_{hash_key}"))
1097 1097
1098 1098 return cls.execute(q).scalar_one_or_none()
1099 1099
1100 1100 @classmethod
1101 1101 def get_by_username_or_primary_email(cls, user_identifier):
1102 1102 qs = union_all(cls.select().where(func.lower(cls.username) == func.lower(user_identifier)),
1103 1103 cls.select().where(func.lower(cls.email) == func.lower(user_identifier)))
1104 1104 return cls.execute(cls.select(User).from_statement(qs)).scalar_one_or_none()
1105 1105
1106 1106 @classmethod
1107 1107 def get_by_auth_token(cls, auth_token, cache=False):
1108 1108
1109 1109 q = cls.select(User)\
1110 1110 .join(UserApiKeys)\
1111 1111 .where(UserApiKeys.api_key == auth_token)\
1112 1112 .where(or_(UserApiKeys.expires == -1,
1113 1113 UserApiKeys.expires >= time.time()))
1114 1114
1115 1115 if cache:
1116 1116 q = q.options(
1117 1117 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
1118 1118
1119 1119 matched_user = cls.execute(q).scalar_one_or_none()
1120 1120
1121 1121 return matched_user
1122 1122
1123 1123 @classmethod
1124 1124 def get_by_email(cls, email, case_insensitive=False, cache=False):
1125 1125
1126 1126 if case_insensitive:
1127 1127 q = cls.select().where(func.lower(cls.email) == func.lower(email))
1128 1128 else:
1129 1129 q = cls.select().where(cls.email == email)
1130 1130
1131 1131 if cache:
1132 1132 email_key = _hash_key(email)
1133 1133 q = q.options(
1134 1134 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
1135 1135
1136 1136 ret = cls.execute(q).scalar_one_or_none()
1137 1137
1138 1138 if ret is None:
1139 1139 q = cls.select(UserEmailMap)
1140 1140 # try fetching in alternate email map
1141 1141 if case_insensitive:
1142 1142 q = q.where(func.lower(UserEmailMap.email) == func.lower(email))
1143 1143 else:
1144 1144 q = q.where(UserEmailMap.email == email)
1145 1145 q = q.options(joinedload(UserEmailMap.user))
1146 1146 if cache:
1147 1147 q = q.options(
1148 1148 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
1149 1149
1150 1150 result = cls.execute(q).scalar_one_or_none()
1151 1151 ret = getattr(result, 'user', None)
1152 1152
1153 1153 return ret
1154 1154
1155 1155 @classmethod
1156 1156 def get_from_cs_author(cls, author):
1157 1157 """
1158 1158 Tries to get User objects out of commit author string
1159 1159
1160 1160 :param author:
1161 1161 """
1162 1162 from rhodecode.lib.helpers import email, author_name
1163 1163 # Valid email in the attribute passed, see if they're in the system
1164 1164 _email = email(author)
1165 1165 if _email:
1166 1166 user = cls.get_by_email(_email, case_insensitive=True)
1167 1167 if user:
1168 1168 return user
1169 1169 # Maybe we can match by username?
1170 1170 _author = author_name(author)
1171 1171 user = cls.get_by_username(_author, case_insensitive=True)
1172 1172 if user:
1173 1173 return user
1174 1174
1175 1175 def update_userdata(self, **kwargs):
1176 1176 usr = self
1177 1177 old = usr.user_data
1178 1178 old.update(**kwargs)
1179 1179 usr.user_data = old
1180 1180 Session().add(usr)
1181 1181 log.debug('updated userdata with %s', kwargs)
1182 1182
1183 1183 def update_lastlogin(self):
1184 1184 """Update user lastlogin"""
1185 1185 self.last_login = datetime.datetime.now()
1186 1186 Session().add(self)
1187 1187 log.debug('updated user %s lastlogin', self.username)
1188 1188
1189 1189 def update_password(self, new_password):
1190 1190 from rhodecode.lib.auth import get_crypt_password
1191 1191
1192 1192 self.password = get_crypt_password(new_password)
1193 1193 Session().add(self)
1194 1194
1195 1195 @classmethod
1196 1196 def get_first_super_admin(cls):
1197 1197 stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc())
1198 1198 user = cls.scalars(stmt).first()
1199 1199
1200 1200 if user is None:
1201 1201 raise Exception('FATAL: Missing administrative account!')
1202 1202 return user
1203 1203
1204 1204 @classmethod
1205 1205 def get_all_super_admins(cls, only_active=False):
1206 1206 """
1207 1207 Returns all admin accounts sorted by username
1208 1208 """
1209 1209 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1210 1210 if only_active:
1211 1211 qry = qry.filter(User.active == true())
1212 1212 return qry.all()
1213 1213
1214 1214 @classmethod
1215 1215 def get_all_user_ids(cls, only_active=True):
1216 1216 """
1217 1217 Returns all users IDs
1218 1218 """
1219 1219 qry = Session().query(User.user_id)
1220 1220
1221 1221 if only_active:
1222 1222 qry = qry.filter(User.active == true())
1223 1223 return [x.user_id for x in qry]
1224 1224
1225 1225 @classmethod
1226 1226 def get_default_user(cls, cache=False, refresh=False):
1227 1227 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1228 1228 if user is None:
1229 1229 raise Exception('FATAL: Missing default account!')
1230 1230 if refresh:
1231 1231 # The default user might be based on outdated state which
1232 1232 # has been loaded from the cache.
1233 1233 # A call to refresh() ensures that the
1234 1234 # latest state from the database is used.
1235 1235 Session().refresh(user)
1236 1236
1237 1237 return user
1238 1238
1239 1239 @classmethod
1240 1240 def get_default_user_id(cls):
1241 1241 import rhodecode
1242 1242 return rhodecode.CONFIG['default_user_id']
1243 1243
1244 1244 def _get_default_perms(self, user, suffix=''):
1245 1245 from rhodecode.model.permission import PermissionModel
1246 1246 return PermissionModel().get_default_perms(user.user_perms, suffix)
1247 1247
1248 1248 def get_default_perms(self, suffix=''):
1249 1249 return self._get_default_perms(self, suffix)
1250 1250
1251 1251 def get_api_data(self, include_secrets=False, details='full'):
1252 1252 """
1253 1253 Common function for generating user related data for API
1254 1254
1255 1255 :param include_secrets: By default secrets in the API data will be replaced
1256 1256 by a placeholder value to prevent exposing this data by accident. In case
1257 1257 this data shall be exposed, set this flag to ``True``.
1258 1258
1259 1259 :param details: details can be 'basic|full' basic gives only a subset of
1260 1260 the available user information that includes user_id, name and emails.
1261 1261 """
1262 1262 user = self
1263 1263 user_data = self.user_data
1264 1264 data = {
1265 1265 'user_id': user.user_id,
1266 1266 'username': user.username,
1267 1267 'firstname': user.name,
1268 1268 'lastname': user.lastname,
1269 1269 'description': user.description,
1270 1270 'email': user.email,
1271 1271 'emails': user.emails,
1272 1272 }
1273 1273 if details == 'basic':
1274 1274 return data
1275 1275
1276 1276 auth_token_length = 40
1277 1277 auth_token_replacement = '*' * auth_token_length
1278 1278
1279 1279 extras = {
1280 1280 'auth_tokens': [auth_token_replacement],
1281 1281 'active': user.active,
1282 1282 'admin': user.admin,
1283 1283 'extern_type': user.extern_type,
1284 1284 'extern_name': user.extern_name,
1285 1285 'last_login': user.last_login,
1286 1286 'last_activity': user.last_activity,
1287 1287 'ip_addresses': user.ip_addresses,
1288 1288 'language': user_data.get('language')
1289 1289 }
1290 1290 data.update(extras)
1291 1291
1292 1292 if include_secrets:
1293 1293 data['auth_tokens'] = user.auth_tokens
1294 1294 return data
1295 1295
1296 1296 def __json__(self):
1297 1297 data = {
1298 1298 'full_name': self.full_name,
1299 1299 'full_name_or_username': self.full_name_or_username,
1300 1300 'short_contact': self.short_contact,
1301 1301 'full_contact': self.full_contact,
1302 1302 }
1303 1303 data.update(self.get_api_data())
1304 1304 return data
1305 1305
1306 1306
1307 1307 class UserApiKeys(Base, BaseModel):
1308 1308 __tablename__ = 'user_api_keys'
1309 1309 __table_args__ = (
1310 1310 Index('uak_api_key_idx', 'api_key'),
1311 1311 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1312 1312 base_table_args
1313 1313 )
1314 1314
1315 1315 # ApiKey role
1316 1316 ROLE_ALL = 'token_role_all'
1317 1317 ROLE_VCS = 'token_role_vcs'
1318 1318 ROLE_API = 'token_role_api'
1319 1319 ROLE_HTTP = 'token_role_http'
1320 1320 ROLE_FEED = 'token_role_feed'
1321 1321 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1322 1322 # The last one is ignored in the list as we only
1323 1323 # use it for one action, and cannot be created by users
1324 1324 ROLE_PASSWORD_RESET = 'token_password_reset'
1325 1325
1326 1326 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1327 1327
1328 1328 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1329 1329 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1330 1330 api_key = Column("api_key", String(255), nullable=False, unique=True)
1331 1331 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1332 1332 expires = Column('expires', Float(53), nullable=False)
1333 1333 role = Column('role', String(255), nullable=True)
1334 1334 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1335 1335
1336 1336 # scope columns
1337 1337 repo_id = Column(
1338 1338 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1339 1339 nullable=True, unique=None, default=None)
1340 1340 repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens')
1341 1341
1342 1342 repo_group_id = Column(
1343 1343 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1344 1344 nullable=True, unique=None, default=None)
1345 1345 repo_group = relationship('RepoGroup', lazy='joined')
1346 1346
1347 1347 user = relationship('User', lazy='joined', back_populates='user_auth_tokens')
1348 1348
1349 1349 def __repr__(self):
1350 1350 return f"<{self.cls_name}('{self.role}')>"
1351 1351
1352 1352 def __json__(self):
1353 1353 data = {
1354 1354 'auth_token': self.api_key,
1355 1355 'role': self.role,
1356 1356 'scope': self.scope_humanized,
1357 1357 'expired': self.expired
1358 1358 }
1359 1359 return data
1360 1360
1361 1361 def get_api_data(self, include_secrets=False):
1362 1362 data = self.__json__()
1363 1363 if include_secrets:
1364 1364 return data
1365 1365 else:
1366 1366 data['auth_token'] = self.token_obfuscated
1367 1367 return data
1368 1368
1369 1369 @hybrid_property
1370 1370 def description_safe(self):
1371 1371 from rhodecode.lib import helpers as h
1372 1372 return h.escape(self.description)
1373 1373
1374 1374 @property
1375 1375 def expired(self):
1376 1376 if self.expires == -1:
1377 1377 return False
1378 1378 return time.time() > self.expires
1379 1379
1380 1380 @classmethod
1381 1381 def _get_role_name(cls, role):
1382 1382 return {
1383 1383 cls.ROLE_ALL: _('all'),
1384 1384 cls.ROLE_HTTP: _('http/web interface'),
1385 1385 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1386 1386 cls.ROLE_API: _('api calls'),
1387 1387 cls.ROLE_FEED: _('feed access'),
1388 1388 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1389 1389 }.get(role, role)
1390 1390
1391 1391 @classmethod
1392 1392 def _get_role_description(cls, role):
1393 1393 return {
1394 1394 cls.ROLE_ALL: _('Token for all actions.'),
1395 1395 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1396 1396 'login using `api_access_controllers_whitelist` functionality.'),
1397 1397 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1398 1398 'Requires auth_token authentication plugin to be active. <br/>'
1399 1399 'Such Token should be used then instead of a password to '
1400 1400 'interact with a repository, and additionally can be '
1401 1401 'limited to single repository using repo scope.'),
1402 1402 cls.ROLE_API: _('Token limited to api calls.'),
1403 1403 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1404 1404 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1405 1405 }.get(role, role)
1406 1406
1407 1407 @property
1408 1408 def role_humanized(self):
1409 1409 return self._get_role_name(self.role)
1410 1410
1411 1411 def _get_scope(self):
1412 1412 if self.repo:
1413 1413 return 'Repository: {}'.format(self.repo.repo_name)
1414 1414 if self.repo_group:
1415 1415 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1416 1416 return 'Global'
1417 1417
1418 1418 @property
1419 1419 def scope_humanized(self):
1420 1420 return self._get_scope()
1421 1421
1422 1422 @property
1423 1423 def token_obfuscated(self):
1424 1424 if self.api_key:
1425 1425 return self.api_key[:4] + "****"
1426 1426
1427 1427
1428 1428 class UserEmailMap(Base, BaseModel):
1429 1429 __tablename__ = 'user_email_map'
1430 1430 __table_args__ = (
1431 1431 Index('uem_email_idx', 'email'),
1432 1432 Index('uem_user_id_idx', 'user_id'),
1433 1433 UniqueConstraint('email'),
1434 1434 base_table_args
1435 1435 )
1436 1436
1437 1437 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1438 1438 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1439 1439 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1440 1440 user = relationship('User', lazy='joined', back_populates='user_emails')
1441 1441
1442 1442 @validates('_email')
1443 1443 def validate_email(self, key, email):
1444 1444 # check if this email is not main one
1445 1445 main_email = Session().query(User).filter(User.email == email).scalar()
1446 1446 if main_email is not None:
1447 1447 raise AttributeError('email %s is present is user table' % email)
1448 1448 return email
1449 1449
1450 1450 @hybrid_property
1451 1451 def email(self):
1452 1452 return self._email
1453 1453
1454 1454 @email.setter
1455 1455 def email(self, val):
1456 1456 self._email = val.lower() if val else None
1457 1457
1458 1458
1459 1459 class UserIpMap(Base, BaseModel):
1460 1460 __tablename__ = 'user_ip_map'
1461 1461 __table_args__ = (
1462 1462 UniqueConstraint('user_id', 'ip_addr'),
1463 1463 base_table_args
1464 1464 )
1465 1465
1466 1466 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1467 1467 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1468 1468 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1469 1469 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1470 1470 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1471 1471 user = relationship('User', lazy='joined', back_populates='user_ip_map')
1472 1472
1473 1473 @hybrid_property
1474 1474 def description_safe(self):
1475 1475 from rhodecode.lib import helpers as h
1476 1476 return h.escape(self.description)
1477 1477
1478 1478 @classmethod
1479 1479 def _get_ip_range(cls, ip_addr):
1480 1480 net = ipaddress.ip_network(safe_str(ip_addr), strict=False)
1481 1481 return [str(net.network_address), str(net.broadcast_address)]
1482 1482
1483 1483 def __json__(self):
1484 1484 return {
1485 1485 'ip_addr': self.ip_addr,
1486 1486 'ip_range': self._get_ip_range(self.ip_addr),
1487 1487 }
1488 1488
1489 1489 def __repr__(self):
1490 1490 return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>"
1491 1491
1492 1492
1493 1493 class UserSshKeys(Base, BaseModel):
1494 1494 __tablename__ = 'user_ssh_keys'
1495 1495 __table_args__ = (
1496 1496 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1497 1497
1498 1498 UniqueConstraint('ssh_key_fingerprint'),
1499 1499
1500 1500 base_table_args
1501 1501 )
1502 1502
1503 1503 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1504 1504 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1505 1505 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1506 1506
1507 1507 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1508 1508
1509 1509 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1510 1510 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1511 1511 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1512 1512
1513 1513 user = relationship('User', lazy='joined', back_populates='user_ssh_keys')
1514 1514
1515 1515 def __json__(self):
1516 1516 data = {
1517 1517 'ssh_fingerprint': self.ssh_key_fingerprint,
1518 1518 'description': self.description,
1519 1519 'created_on': self.created_on
1520 1520 }
1521 1521 return data
1522 1522
1523 1523 def get_api_data(self):
1524 1524 data = self.__json__()
1525 1525 return data
1526 1526
1527 1527
1528 1528 class UserLog(Base, BaseModel):
1529 1529 __tablename__ = 'user_logs'
1530 1530 __table_args__ = (
1531 1531 base_table_args,
1532 1532 )
1533 1533
1534 1534 VERSION_1 = 'v1'
1535 1535 VERSION_2 = 'v2'
1536 1536 VERSIONS = [VERSION_1, VERSION_2]
1537 1537
1538 1538 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1539 1539 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1540 1540 username = Column("username", String(255), nullable=True, unique=None, default=None)
1541 1541 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1542 1542 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1543 1543 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1544 1544 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1545 1545 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1546 1546
1547 1547 version = Column("version", String(255), nullable=True, default=VERSION_1)
1548 1548 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1549 1549 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1550 1550 user = relationship('User', cascade='', back_populates='user_log')
1551 1551 repository = relationship('Repository', cascade='', back_populates='logs')
1552 1552
1553 1553 def __repr__(self):
1554 1554 return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>"
1555 1555
1556 1556 def __json__(self):
1557 1557 return {
1558 1558 'user_id': self.user_id,
1559 1559 'username': self.username,
1560 1560 'repository_id': self.repository_id,
1561 1561 'repository_name': self.repository_name,
1562 1562 'user_ip': self.user_ip,
1563 1563 'action_date': self.action_date,
1564 1564 'action': self.action,
1565 1565 }
1566 1566
1567 1567 @hybrid_property
1568 1568 def entry_id(self):
1569 1569 return self.user_log_id
1570 1570
1571 1571 @property
1572 1572 def action_as_day(self):
1573 1573 return datetime.date(*self.action_date.timetuple()[:3])
1574 1574
1575 1575
1576 1576 class UserGroup(Base, BaseModel):
1577 1577 __tablename__ = 'users_groups'
1578 1578 __table_args__ = (
1579 1579 base_table_args,
1580 1580 )
1581 1581
1582 1582 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1583 1583 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1584 1584 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1585 1585 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1586 1586 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1587 1587 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1588 1588 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1589 1589 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1590 1590
1591 1591 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group')
1592 1592 users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group')
1593 1593 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group')
1594 1594 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group')
1595 1595 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group')
1596 1596
1597 1597 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group')
1598 1598
1599 1599 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group')
1600 1600 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups')
1601 1601
1602 1602 @classmethod
1603 1603 def _load_group_data(cls, column):
1604 1604 if not column:
1605 1605 return {}
1606 1606
1607 1607 try:
1608 1608 return json.loads(column) or {}
1609 1609 except TypeError:
1610 1610 return {}
1611 1611
1612 1612 @hybrid_property
1613 1613 def description_safe(self):
1614 1614 from rhodecode.lib import helpers as h
1615 1615 return h.escape(self.user_group_description)
1616 1616
1617 1617 @hybrid_property
1618 1618 def group_data(self):
1619 1619 return self._load_group_data(self._group_data)
1620 1620
1621 1621 @group_data.expression
1622 1622 def group_data(self, **kwargs):
1623 1623 return self._group_data
1624 1624
1625 1625 @group_data.setter
1626 1626 def group_data(self, val):
1627 1627 try:
1628 1628 self._group_data = json.dumps(val)
1629 1629 except Exception:
1630 1630 log.error(traceback.format_exc())
1631 1631
1632 1632 @classmethod
1633 1633 def _load_sync(cls, group_data):
1634 1634 if group_data:
1635 1635 return group_data.get('extern_type')
1636 1636
1637 1637 @property
1638 1638 def sync(self):
1639 1639 return self._load_sync(self.group_data)
1640 1640
1641 1641 def __repr__(self):
1642 1642 return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>"
1643 1643
1644 1644 @classmethod
1645 1645 def get_by_group_name(cls, group_name, cache=False,
1646 1646 case_insensitive=False):
1647 1647 if case_insensitive:
1648 1648 q = cls.query().filter(func.lower(cls.users_group_name) ==
1649 1649 func.lower(group_name))
1650 1650
1651 1651 else:
1652 1652 q = cls.query().filter(cls.users_group_name == group_name)
1653 1653 if cache:
1654 1654 name_key = _hash_key(group_name)
1655 1655 q = q.options(
1656 1656 FromCache("sql_cache_short", f"get_group_{name_key}"))
1657 1657 return q.scalar()
1658 1658
1659 1659 @classmethod
1660 1660 def get(cls, user_group_id, cache=False):
1661 1661 if not user_group_id:
1662 1662 return
1663 1663
1664 1664 user_group = cls.query()
1665 1665 if cache:
1666 1666 user_group = user_group.options(
1667 1667 FromCache("sql_cache_short", f"get_users_group_{user_group_id}"))
1668 1668 return user_group.get(user_group_id)
1669 1669
1670 1670 def permissions(self, with_admins=True, with_owner=True,
1671 1671 expand_from_user_groups=False):
1672 1672 """
1673 1673 Permissions for user groups
1674 1674 """
1675 1675 _admin_perm = 'usergroup.admin'
1676 1676
1677 1677 owner_row = []
1678 1678 if with_owner:
1679 1679 usr = AttributeDict(self.user.get_dict())
1680 1680 usr.owner_row = True
1681 1681 usr.permission = _admin_perm
1682 1682 owner_row.append(usr)
1683 1683
1684 1684 super_admin_ids = []
1685 1685 super_admin_rows = []
1686 1686 if with_admins:
1687 1687 for usr in User.get_all_super_admins():
1688 1688 super_admin_ids.append(usr.user_id)
1689 1689 # if this admin is also owner, don't double the record
1690 1690 if usr.user_id == owner_row[0].user_id:
1691 1691 owner_row[0].admin_row = True
1692 1692 else:
1693 1693 usr = AttributeDict(usr.get_dict())
1694 1694 usr.admin_row = True
1695 1695 usr.permission = _admin_perm
1696 1696 super_admin_rows.append(usr)
1697 1697
1698 1698 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1699 1699 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1700 1700 joinedload(UserUserGroupToPerm.user),
1701 1701 joinedload(UserUserGroupToPerm.permission),)
1702 1702
1703 1703 # get owners and admins and permissions. We do a trick of re-writing
1704 1704 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1705 1705 # has a global reference and changing one object propagates to all
1706 1706 # others. This means if admin is also an owner admin_row that change
1707 1707 # would propagate to both objects
1708 1708 perm_rows = []
1709 1709 for _usr in q.all():
1710 1710 usr = AttributeDict(_usr.user.get_dict())
1711 1711 # if this user is also owner/admin, mark as duplicate record
1712 1712 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1713 1713 usr.duplicate_perm = True
1714 1714 usr.permission = _usr.permission.permission_name
1715 1715 perm_rows.append(usr)
1716 1716
1717 1717 # filter the perm rows by 'default' first and then sort them by
1718 1718 # admin,write,read,none permissions sorted again alphabetically in
1719 1719 # each group
1720 1720 perm_rows = sorted(perm_rows, key=display_user_sort)
1721 1721
1722 1722 user_groups_rows = []
1723 1723 if expand_from_user_groups:
1724 1724 for ug in self.permission_user_groups(with_members=True):
1725 1725 for user_data in ug.members:
1726 1726 user_groups_rows.append(user_data)
1727 1727
1728 1728 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1729 1729
1730 1730 def permission_user_groups(self, with_members=False):
1731 1731 q = UserGroupUserGroupToPerm.query()\
1732 1732 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1733 1733 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1734 1734 joinedload(UserGroupUserGroupToPerm.target_user_group),
1735 1735 joinedload(UserGroupUserGroupToPerm.permission),)
1736 1736
1737 1737 perm_rows = []
1738 1738 for _user_group in q.all():
1739 1739 entry = AttributeDict(_user_group.user_group.get_dict())
1740 1740 entry.permission = _user_group.permission.permission_name
1741 1741 if with_members:
1742 1742 entry.members = [x.user.get_dict()
1743 1743 for x in _user_group.user_group.members]
1744 1744 perm_rows.append(entry)
1745 1745
1746 1746 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1747 1747 return perm_rows
1748 1748
1749 1749 def _get_default_perms(self, user_group, suffix=''):
1750 1750 from rhodecode.model.permission import PermissionModel
1751 1751 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1752 1752
1753 1753 def get_default_perms(self, suffix=''):
1754 1754 return self._get_default_perms(self, suffix)
1755 1755
1756 1756 def get_api_data(self, with_group_members=True, include_secrets=False):
1757 1757 """
1758 1758 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1759 1759 basically forwarded.
1760 1760
1761 1761 """
1762 1762 user_group = self
1763 1763 data = {
1764 1764 'users_group_id': user_group.users_group_id,
1765 1765 'group_name': user_group.users_group_name,
1766 1766 'group_description': user_group.user_group_description,
1767 1767 'active': user_group.users_group_active,
1768 1768 'owner': user_group.user.username,
1769 1769 'sync': user_group.sync,
1770 1770 'owner_email': user_group.user.email,
1771 1771 }
1772 1772
1773 1773 if with_group_members:
1774 1774 users = []
1775 1775 for user in user_group.members:
1776 1776 user = user.user
1777 1777 users.append(user.get_api_data(include_secrets=include_secrets))
1778 1778 data['users'] = users
1779 1779
1780 1780 return data
1781 1781
1782 1782
1783 1783 class UserGroupMember(Base, BaseModel):
1784 1784 __tablename__ = 'users_groups_members'
1785 1785 __table_args__ = (
1786 1786 base_table_args,
1787 1787 )
1788 1788
1789 1789 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1790 1790 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1791 1791 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1792 1792
1793 1793 user = relationship('User', lazy='joined', back_populates='group_member')
1794 1794 users_group = relationship('UserGroup', back_populates='members')
1795 1795
1796 1796 def __init__(self, gr_id='', u_id=''):
1797 1797 self.users_group_id = gr_id
1798 1798 self.user_id = u_id
1799 1799
1800 1800
1801 1801 class RepositoryField(Base, BaseModel):
1802 1802 __tablename__ = 'repositories_fields'
1803 1803 __table_args__ = (
1804 1804 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1805 1805 base_table_args,
1806 1806 )
1807 1807
1808 1808 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1809 1809
1810 1810 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1811 1811 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1812 1812 field_key = Column("field_key", String(250))
1813 1813 field_label = Column("field_label", String(1024), nullable=False)
1814 1814 field_value = Column("field_value", String(10000), nullable=False)
1815 1815 field_desc = Column("field_desc", String(1024), nullable=False)
1816 1816 field_type = Column("field_type", String(255), nullable=False, unique=None)
1817 1817 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1818 1818
1819 1819 repository = relationship('Repository', back_populates='extra_fields')
1820 1820
1821 1821 @property
1822 1822 def field_key_prefixed(self):
1823 1823 return 'ex_%s' % self.field_key
1824 1824
1825 1825 @classmethod
1826 1826 def un_prefix_key(cls, key):
1827 1827 if key.startswith(cls.PREFIX):
1828 1828 return key[len(cls.PREFIX):]
1829 1829 return key
1830 1830
1831 1831 @classmethod
1832 1832 def get_by_key_name(cls, key, repo):
1833 1833 row = cls.query()\
1834 1834 .filter(cls.repository == repo)\
1835 1835 .filter(cls.field_key == key).scalar()
1836 1836 return row
1837 1837
1838 1838
1839 1839 class Repository(Base, BaseModel):
1840 1840 __tablename__ = 'repositories'
1841 1841 __table_args__ = (
1842 1842 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1843 1843 base_table_args,
1844 1844 )
1845 1845 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1846 1846 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1847 1847 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1848 1848
1849 1849 STATE_CREATED = 'repo_state_created'
1850 1850 STATE_PENDING = 'repo_state_pending'
1851 1851 STATE_ERROR = 'repo_state_error'
1852 1852
1853 1853 LOCK_AUTOMATIC = 'lock_auto'
1854 1854 LOCK_API = 'lock_api'
1855 1855 LOCK_WEB = 'lock_web'
1856 1856 LOCK_PULL = 'lock_pull'
1857 1857
1858 1858 NAME_SEP = URL_SEP
1859 1859
1860 1860 repo_id = Column(
1861 1861 "repo_id", Integer(), nullable=False, unique=True, default=None,
1862 1862 primary_key=True)
1863 1863 _repo_name = Column(
1864 1864 "repo_name", Text(), nullable=False, default=None)
1865 1865 repo_name_hash = Column(
1866 1866 "repo_name_hash", String(255), nullable=False, unique=True)
1867 1867 repo_state = Column("repo_state", String(255), nullable=True)
1868 1868
1869 1869 clone_uri = Column(
1870 1870 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1871 1871 default=None)
1872 1872 push_uri = Column(
1873 1873 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1874 1874 default=None)
1875 1875 repo_type = Column(
1876 1876 "repo_type", String(255), nullable=False, unique=False, default=None)
1877 1877 user_id = Column(
1878 1878 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1879 1879 unique=False, default=None)
1880 1880 private = Column(
1881 1881 "private", Boolean(), nullable=True, unique=None, default=None)
1882 1882 archived = Column(
1883 1883 "archived", Boolean(), nullable=True, unique=None, default=None)
1884 1884 enable_statistics = Column(
1885 1885 "statistics", Boolean(), nullable=True, unique=None, default=True)
1886 1886 enable_downloads = Column(
1887 1887 "downloads", Boolean(), nullable=True, unique=None, default=True)
1888 1888 description = Column(
1889 1889 "description", String(10000), nullable=True, unique=None, default=None)
1890 1890 created_on = Column(
1891 1891 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1892 1892 default=datetime.datetime.now)
1893 1893 updated_on = Column(
1894 1894 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1895 1895 default=datetime.datetime.now)
1896 1896 _landing_revision = Column(
1897 1897 "landing_revision", String(255), nullable=False, unique=False,
1898 1898 default=None)
1899 1899 enable_locking = Column(
1900 1900 "enable_locking", Boolean(), nullable=False, unique=None,
1901 1901 default=False)
1902 1902 _locked = Column(
1903 1903 "locked", String(255), nullable=True, unique=False, default=None)
1904 1904 _changeset_cache = Column(
1905 1905 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1906 1906
1907 1907 fork_id = Column(
1908 1908 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1909 1909 nullable=True, unique=False, default=None)
1910 1910 group_id = Column(
1911 1911 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1912 1912 unique=False, default=None)
1913 1913
1914 1914 user = relationship('User', lazy='joined', back_populates='repositories')
1915 1915 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1916 1916 group = relationship('RepoGroup', lazy='joined')
1917 1917 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
1918 1918 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository')
1919 1919 stats = relationship('Statistics', cascade='all', uselist=False)
1920 1920
1921 1921 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository')
1922 1922 extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository')
1923 1923
1924 1924 logs = relationship('UserLog', back_populates='repository')
1925 1925
1926 1926 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo')
1927 1927
1928 1928 pull_requests_source = relationship(
1929 1929 'PullRequest',
1930 1930 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1931 1931 cascade="all, delete-orphan",
1932 1932 overlaps="source_repo"
1933 1933 )
1934 1934 pull_requests_target = relationship(
1935 1935 'PullRequest',
1936 1936 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1937 1937 cascade="all, delete-orphan",
1938 1938 overlaps="target_repo"
1939 1939 )
1940 1940
1941 1941 ui = relationship('RepoRhodeCodeUi', cascade="all")
1942 1942 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1943 1943 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo')
1944 1944
1945 1945 scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo')
1946 1946
1947 1947 # no cascade, set NULL
1948 1948 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True)
1949 1949
1950 1950 review_rules = relationship('RepoReviewRule')
1951 1951 user_branch_perms = relationship('UserToRepoBranchPermission')
1952 1952 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission')
1953 1953
1954 1954 def __repr__(self):
1955 1955 return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name)
1956 1956
1957 1957 @hybrid_property
1958 1958 def description_safe(self):
1959 1959 from rhodecode.lib import helpers as h
1960 1960 return h.escape(self.description)
1961 1961
1962 1962 @hybrid_property
1963 1963 def landing_rev(self):
1964 1964 # always should return [rev_type, rev], e.g ['branch', 'master']
1965 1965 if self._landing_revision:
1966 1966 _rev_info = self._landing_revision.split(':')
1967 1967 if len(_rev_info) < 2:
1968 1968 _rev_info.insert(0, 'rev')
1969 1969 return [_rev_info[0], _rev_info[1]]
1970 1970 return [None, None]
1971 1971
1972 1972 @property
1973 1973 def landing_ref_type(self):
1974 1974 return self.landing_rev[0]
1975 1975
1976 1976 @property
1977 1977 def landing_ref_name(self):
1978 1978 return self.landing_rev[1]
1979 1979
1980 1980 @landing_rev.setter
1981 1981 def landing_rev(self, val):
1982 1982 if ':' not in val:
1983 1983 raise ValueError('value must be delimited with `:` and consist '
1984 1984 'of <rev_type>:<rev>, got %s instead' % val)
1985 1985 self._landing_revision = val
1986 1986
1987 1987 @hybrid_property
1988 1988 def locked(self):
1989 1989 if self._locked:
1990 1990 user_id, timelocked, reason = self._locked.split(':')
1991 1991 lock_values = int(user_id), timelocked, reason
1992 1992 else:
1993 1993 lock_values = [None, None, None]
1994 1994 return lock_values
1995 1995
1996 1996 @locked.setter
1997 1997 def locked(self, val):
1998 1998 if val and isinstance(val, (list, tuple)):
1999 1999 self._locked = ':'.join(map(str, val))
2000 2000 else:
2001 2001 self._locked = None
2002 2002
2003 2003 @classmethod
2004 2004 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2005 2005 from rhodecode.lib.vcs.backends.base import EmptyCommit
2006 2006 dummy = EmptyCommit().__json__()
2007 2007 if not changeset_cache_raw:
2008 2008 dummy['source_repo_id'] = repo_id
2009 2009 return json.loads(json.dumps(dummy))
2010 2010
2011 2011 try:
2012 2012 return json.loads(changeset_cache_raw)
2013 2013 except TypeError:
2014 2014 return dummy
2015 2015 except Exception:
2016 2016 log.error(traceback.format_exc())
2017 2017 return dummy
2018 2018
2019 2019 @hybrid_property
2020 2020 def changeset_cache(self):
2021 2021 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
2022 2022
2023 2023 @changeset_cache.setter
2024 2024 def changeset_cache(self, val):
2025 2025 try:
2026 2026 self._changeset_cache = json.dumps(val)
2027 2027 except Exception:
2028 2028 log.error(traceback.format_exc())
2029 2029
2030 2030 @hybrid_property
2031 2031 def repo_name(self):
2032 2032 return self._repo_name
2033 2033
2034 2034 @repo_name.setter
2035 2035 def repo_name(self, value):
2036 2036 self._repo_name = value
2037 2037 self.repo_name_hash = sha1(safe_bytes(value))
2038 2038
2039 2039 @classmethod
2040 2040 def normalize_repo_name(cls, repo_name):
2041 2041 """
2042 2042 Normalizes os specific repo_name to the format internally stored inside
2043 2043 database using URL_SEP
2044 2044
2045 2045 :param cls:
2046 2046 :param repo_name:
2047 2047 """
2048 2048 return cls.NAME_SEP.join(repo_name.split(os.sep))
2049 2049
2050 2050 @classmethod
2051 2051 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
2052 2052 session = Session()
2053 2053 q = session.query(cls).filter(cls.repo_name == repo_name)
2054 2054
2055 2055 if cache:
2056 2056 if identity_cache:
2057 2057 val = cls.identity_cache(session, 'repo_name', repo_name)
2058 2058 if val:
2059 2059 return val
2060 2060 else:
2061 2061 cache_key = f"get_repo_by_name_{_hash_key(repo_name)}"
2062 2062 q = q.options(
2063 2063 FromCache("sql_cache_short", cache_key))
2064 2064
2065 2065 return q.scalar()
2066 2066
2067 2067 @classmethod
2068 2068 def get_by_id_or_repo_name(cls, repoid):
2069 2069 if isinstance(repoid, int):
2070 2070 try:
2071 2071 repo = cls.get(repoid)
2072 2072 except ValueError:
2073 2073 repo = None
2074 2074 else:
2075 2075 repo = cls.get_by_repo_name(repoid)
2076 2076 return repo
2077 2077
2078 2078 @classmethod
2079 2079 def get_by_full_path(cls, repo_full_path):
2080 2080 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
2081 2081 repo_name = cls.normalize_repo_name(repo_name)
2082 2082 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
2083 2083
2084 2084 @classmethod
2085 2085 def get_repo_forks(cls, repo_id):
2086 2086 return cls.query().filter(Repository.fork_id == repo_id)
2087 2087
2088 2088 @classmethod
2089 2089 def base_path(cls):
2090 2090 """
2091 2091 Returns base path when all repos are stored
2092 2092
2093 2093 :param cls:
2094 2094 """
2095 2095 from rhodecode.lib.utils import get_rhodecode_repo_store_path
2096 2096 return get_rhodecode_repo_store_path()
2097 2097
2098 2098 @classmethod
2099 2099 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
2100 2100 case_insensitive=True, archived=False):
2101 2101 q = Repository.query()
2102 2102
2103 2103 if not archived:
2104 2104 q = q.filter(Repository.archived.isnot(true()))
2105 2105
2106 2106 if not isinstance(user_id, Optional):
2107 2107 q = q.filter(Repository.user_id == user_id)
2108 2108
2109 2109 if not isinstance(group_id, Optional):
2110 2110 q = q.filter(Repository.group_id == group_id)
2111 2111
2112 2112 if case_insensitive:
2113 2113 q = q.order_by(func.lower(Repository.repo_name))
2114 2114 else:
2115 2115 q = q.order_by(Repository.repo_name)
2116 2116
2117 2117 return q.all()
2118 2118
2119 2119 @property
2120 2120 def repo_uid(self):
2121 2121 return '_{}'.format(self.repo_id)
2122 2122
2123 2123 @property
2124 2124 def forks(self):
2125 2125 """
2126 2126 Return forks of this repo
2127 2127 """
2128 2128 return Repository.get_repo_forks(self.repo_id)
2129 2129
2130 2130 @property
2131 2131 def parent(self):
2132 2132 """
2133 2133 Returns fork parent
2134 2134 """
2135 2135 return self.fork
2136 2136
2137 2137 @property
2138 2138 def just_name(self):
2139 2139 return self.repo_name.split(self.NAME_SEP)[-1]
2140 2140
2141 2141 @property
2142 2142 def groups_with_parents(self):
2143 2143 groups = []
2144 2144 if self.group is None:
2145 2145 return groups
2146 2146
2147 2147 cur_gr = self.group
2148 2148 groups.insert(0, cur_gr)
2149 2149 while 1:
2150 2150 gr = getattr(cur_gr, 'parent_group', None)
2151 2151 cur_gr = cur_gr.parent_group
2152 2152 if gr is None:
2153 2153 break
2154 2154 groups.insert(0, gr)
2155 2155
2156 2156 return groups
2157 2157
2158 2158 @property
2159 2159 def groups_and_repo(self):
2160 2160 return self.groups_with_parents, self
2161 2161
2162 2162 @property
2163 2163 def repo_path(self):
2164 2164 """
2165 2165 Returns base full path for that repository means where it actually
2166 2166 exists on a filesystem
2167 2167 """
2168 2168 return self.base_path()
2169 2169
2170 2170 @property
2171 2171 def repo_full_path(self):
2172 2172 p = [self.repo_path]
2173 2173 # we need to split the name by / since this is how we store the
2174 2174 # names in the database, but that eventually needs to be converted
2175 2175 # into a valid system path
2176 2176 p += self.repo_name.split(self.NAME_SEP)
2177 2177 return os.path.join(*map(safe_str, p))
2178 2178
2179 2179 @property
2180 2180 def cache_keys(self):
2181 2181 """
2182 2182 Returns associated cache keys for that repo
2183 2183 """
2184 2184 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2185 2185 return CacheKey.query()\
2186 2186 .filter(CacheKey.cache_key == repo_namespace_key)\
2187 2187 .order_by(CacheKey.cache_key)\
2188 2188 .all()
2189 2189
2190 2190 @property
2191 2191 def cached_diffs_relative_dir(self):
2192 2192 """
2193 2193 Return a relative to the repository store path of cached diffs
2194 2194 used for safe display for users, who shouldn't know the absolute store
2195 2195 path
2196 2196 """
2197 2197 return os.path.join(
2198 2198 os.path.dirname(self.repo_name),
2199 2199 self.cached_diffs_dir.split(os.path.sep)[-1])
2200 2200
2201 2201 @property
2202 2202 def cached_diffs_dir(self):
2203 2203 path = self.repo_full_path
2204 2204 return os.path.join(
2205 2205 os.path.dirname(path),
2206 2206 f'.__shadow_diff_cache_repo_{self.repo_id}')
2207 2207
2208 2208 def cached_diffs(self):
2209 2209 diff_cache_dir = self.cached_diffs_dir
2210 2210 if os.path.isdir(diff_cache_dir):
2211 2211 return os.listdir(diff_cache_dir)
2212 2212 return []
2213 2213
2214 2214 def shadow_repos(self):
2215 2215 shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}'
2216 2216 return [
2217 2217 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2218 2218 if x.startswith(shadow_repos_pattern)
2219 2219 ]
2220 2220
2221 2221 def get_new_name(self, repo_name):
2222 2222 """
2223 2223 returns new full repository name based on assigned group and new new
2224 2224
2225 2225 :param repo_name:
2226 2226 """
2227 2227 path_prefix = self.group.full_path_splitted if self.group else []
2228 2228 return self.NAME_SEP.join(path_prefix + [repo_name])
2229 2229
2230 2230 @property
2231 2231 def _config(self):
2232 2232 """
2233 2233 Returns db based config object.
2234 2234 """
2235 2235 from rhodecode.lib.utils import make_db_config
2236 2236 return make_db_config(clear_session=False, repo=self)
2237 2237
2238 2238 def permissions(self, with_admins=True, with_owner=True,
2239 2239 expand_from_user_groups=False):
2240 2240 """
2241 2241 Permissions for repositories
2242 2242 """
2243 2243 _admin_perm = 'repository.admin'
2244 2244
2245 2245 owner_row = []
2246 2246 if with_owner:
2247 2247 usr = AttributeDict(self.user.get_dict())
2248 2248 usr.owner_row = True
2249 2249 usr.permission = _admin_perm
2250 2250 usr.permission_id = None
2251 2251 owner_row.append(usr)
2252 2252
2253 2253 super_admin_ids = []
2254 2254 super_admin_rows = []
2255 2255 if with_admins:
2256 2256 for usr in User.get_all_super_admins():
2257 2257 super_admin_ids.append(usr.user_id)
2258 2258 # if this admin is also owner, don't double the record
2259 2259 if usr.user_id == owner_row[0].user_id:
2260 2260 owner_row[0].admin_row = True
2261 2261 else:
2262 2262 usr = AttributeDict(usr.get_dict())
2263 2263 usr.admin_row = True
2264 2264 usr.permission = _admin_perm
2265 2265 usr.permission_id = None
2266 2266 super_admin_rows.append(usr)
2267 2267
2268 2268 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2269 2269 q = q.options(joinedload(UserRepoToPerm.repository),
2270 2270 joinedload(UserRepoToPerm.user),
2271 2271 joinedload(UserRepoToPerm.permission),)
2272 2272
2273 2273 # get owners and admins and permissions. We do a trick of re-writing
2274 2274 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2275 2275 # has a global reference and changing one object propagates to all
2276 2276 # others. This means if admin is also an owner admin_row that change
2277 2277 # would propagate to both objects
2278 2278 perm_rows = []
2279 2279 for _usr in q.all():
2280 2280 usr = AttributeDict(_usr.user.get_dict())
2281 2281 # if this user is also owner/admin, mark as duplicate record
2282 2282 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2283 2283 usr.duplicate_perm = True
2284 2284 # also check if this permission is maybe used by branch_permissions
2285 2285 if _usr.branch_perm_entry:
2286 2286 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2287 2287
2288 2288 usr.permission = _usr.permission.permission_name
2289 2289 usr.permission_id = _usr.repo_to_perm_id
2290 2290 perm_rows.append(usr)
2291 2291
2292 2292 # filter the perm rows by 'default' first and then sort them by
2293 2293 # admin,write,read,none permissions sorted again alphabetically in
2294 2294 # each group
2295 2295 perm_rows = sorted(perm_rows, key=display_user_sort)
2296 2296
2297 2297 user_groups_rows = []
2298 2298 if expand_from_user_groups:
2299 2299 for ug in self.permission_user_groups(with_members=True):
2300 2300 for user_data in ug.members:
2301 2301 user_groups_rows.append(user_data)
2302 2302
2303 2303 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2304 2304
2305 2305 def permission_user_groups(self, with_members=True):
2306 2306 q = UserGroupRepoToPerm.query()\
2307 2307 .filter(UserGroupRepoToPerm.repository == self)
2308 2308 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2309 2309 joinedload(UserGroupRepoToPerm.users_group),
2310 2310 joinedload(UserGroupRepoToPerm.permission),)
2311 2311
2312 2312 perm_rows = []
2313 2313 for _user_group in q.all():
2314 2314 entry = AttributeDict(_user_group.users_group.get_dict())
2315 2315 entry.permission = _user_group.permission.permission_name
2316 2316 if with_members:
2317 2317 entry.members = [x.user.get_dict()
2318 2318 for x in _user_group.users_group.members]
2319 2319 perm_rows.append(entry)
2320 2320
2321 2321 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2322 2322 return perm_rows
2323 2323
2324 2324 def get_api_data(self, include_secrets=False):
2325 2325 """
2326 2326 Common function for generating repo api data
2327 2327
2328 2328 :param include_secrets: See :meth:`User.get_api_data`.
2329 2329
2330 2330 """
2331 2331 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2332 2332 # move this methods on models level.
2333 2333 from rhodecode.model.settings import SettingsModel
2334 2334 from rhodecode.model.repo import RepoModel
2335 2335
2336 2336 repo = self
2337 2337 _user_id, _time, _reason = self.locked
2338 2338
2339 2339 data = {
2340 2340 'repo_id': repo.repo_id,
2341 2341 'repo_name': repo.repo_name,
2342 2342 'repo_type': repo.repo_type,
2343 2343 'clone_uri': repo.clone_uri or '',
2344 2344 'push_uri': repo.push_uri or '',
2345 2345 'url': RepoModel().get_url(self),
2346 2346 'private': repo.private,
2347 2347 'created_on': repo.created_on,
2348 2348 'description': repo.description_safe,
2349 2349 'landing_rev': repo.landing_rev,
2350 2350 'owner': repo.user.username,
2351 2351 'fork_of': repo.fork.repo_name if repo.fork else None,
2352 2352 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2353 2353 'enable_statistics': repo.enable_statistics,
2354 2354 'enable_locking': repo.enable_locking,
2355 2355 'enable_downloads': repo.enable_downloads,
2356 2356 'last_changeset': repo.changeset_cache,
2357 2357 'locked_by': User.get(_user_id).get_api_data(
2358 2358 include_secrets=include_secrets) if _user_id else None,
2359 2359 'locked_date': time_to_datetime(_time) if _time else None,
2360 2360 'lock_reason': _reason if _reason else None,
2361 2361 }
2362 2362
2363 2363 # TODO: mikhail: should be per-repo settings here
2364 2364 rc_config = SettingsModel().get_all_settings()
2365 2365 repository_fields = str2bool(
2366 2366 rc_config.get('rhodecode_repository_fields'))
2367 2367 if repository_fields:
2368 2368 for f in self.extra_fields:
2369 2369 data[f.field_key_prefixed] = f.field_value
2370 2370
2371 2371 return data
2372 2372
2373 2373 @classmethod
2374 2374 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2375 2375 if not lock_time:
2376 2376 lock_time = time.time()
2377 2377 if not lock_reason:
2378 2378 lock_reason = cls.LOCK_AUTOMATIC
2379 2379 repo.locked = [user_id, lock_time, lock_reason]
2380 2380 Session().add(repo)
2381 2381 Session().commit()
2382 2382
2383 2383 @classmethod
2384 2384 def unlock(cls, repo):
2385 2385 repo.locked = None
2386 2386 Session().add(repo)
2387 2387 Session().commit()
2388 2388
2389 2389 @classmethod
2390 2390 def getlock(cls, repo):
2391 2391 return repo.locked
2392 2392
2393 2393 def get_locking_state(self, action, user_id, only_when_enabled=True):
2394 2394 """
2395 2395 Checks locking on this repository, if locking is enabled and lock is
2396 2396 present returns a tuple of make_lock, locked, locked_by.
2397 2397 make_lock can have 3 states None (do nothing) True, make lock
2398 2398 False release lock, This value is later propagated to hooks, which
2399 2399 do the locking. Think about this as signals passed to hooks what to do.
2400 2400
2401 2401 """
2402 2402 # TODO: johbo: This is part of the business logic and should be moved
2403 2403 # into the RepositoryModel.
2404 2404
2405 2405 if action not in ('push', 'pull'):
2406 2406 raise ValueError("Invalid action value: %s" % repr(action))
2407 2407
2408 2408 # defines if locked error should be thrown to user
2409 2409 currently_locked = False
2410 2410 # defines if new lock should be made, tri-state
2411 2411 make_lock = None
2412 2412 repo = self
2413 2413 user = User.get(user_id)
2414 2414
2415 2415 lock_info = repo.locked
2416 2416
2417 2417 if repo and (repo.enable_locking or not only_when_enabled):
2418 2418 if action == 'push':
2419 2419 # check if it's already locked !, if it is compare users
2420 2420 locked_by_user_id = lock_info[0]
2421 2421 if user.user_id == locked_by_user_id:
2422 2422 log.debug(
2423 2423 'Got `push` action from user %s, now unlocking', user)
2424 2424 # unlock if we have push from user who locked
2425 2425 make_lock = False
2426 2426 else:
2427 2427 # we're not the same user who locked, ban with
2428 2428 # code defined in settings (default is 423 HTTP Locked) !
2429 2429 log.debug('Repo %s is currently locked by %s', repo, user)
2430 2430 currently_locked = True
2431 2431 elif action == 'pull':
2432 2432 # [0] user [1] date
2433 2433 if lock_info[0] and lock_info[1]:
2434 2434 log.debug('Repo %s is currently locked by %s', repo, user)
2435 2435 currently_locked = True
2436 2436 else:
2437 2437 log.debug('Setting lock on repo %s by %s', repo, user)
2438 2438 make_lock = True
2439 2439
2440 2440 else:
2441 2441 log.debug('Repository %s do not have locking enabled', repo)
2442 2442
2443 2443 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2444 2444 make_lock, currently_locked, lock_info)
2445 2445
2446 2446 from rhodecode.lib.auth import HasRepoPermissionAny
2447 2447 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2448 2448 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2449 2449 # if we don't have at least write permission we cannot make a lock
2450 2450 log.debug('lock state reset back to FALSE due to lack '
2451 2451 'of at least read permission')
2452 2452 make_lock = False
2453 2453
2454 2454 return make_lock, currently_locked, lock_info
2455 2455
2456 2456 @property
2457 2457 def last_commit_cache_update_diff(self):
2458 2458 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2459 2459
2460 2460 @classmethod
2461 2461 def _load_commit_change(cls, last_commit_cache):
2462 2462 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2463 2463 empty_date = datetime.datetime.fromtimestamp(0)
2464 2464 date_latest = last_commit_cache.get('date', empty_date)
2465 2465 try:
2466 2466 return parse_datetime(date_latest)
2467 2467 except Exception:
2468 2468 return empty_date
2469 2469
2470 2470 @property
2471 2471 def last_commit_change(self):
2472 2472 return self._load_commit_change(self.changeset_cache)
2473 2473
2474 2474 @property
2475 2475 def last_db_change(self):
2476 2476 return self.updated_on
2477 2477
2478 2478 @property
2479 2479 def clone_uri_hidden(self):
2480 2480 clone_uri = self.clone_uri
2481 2481 if clone_uri:
2482 2482 import urlobject
2483 2483 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2484 2484 if url_obj.password:
2485 2485 clone_uri = url_obj.with_password('*****')
2486 2486 return clone_uri
2487 2487
2488 2488 @property
2489 2489 def push_uri_hidden(self):
2490 2490 push_uri = self.push_uri
2491 2491 if push_uri:
2492 2492 import urlobject
2493 2493 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2494 2494 if url_obj.password:
2495 2495 push_uri = url_obj.with_password('*****')
2496 2496 return push_uri
2497 2497
2498 2498 def clone_url(self, **override):
2499 2499 from rhodecode.model.settings import SettingsModel
2500 2500
2501 2501 uri_tmpl = None
2502 2502 if 'with_id' in override:
2503 2503 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2504 2504 del override['with_id']
2505 2505
2506 2506 if 'uri_tmpl' in override:
2507 2507 uri_tmpl = override['uri_tmpl']
2508 2508 del override['uri_tmpl']
2509 2509
2510 2510 ssh = False
2511 2511 if 'ssh' in override:
2512 2512 ssh = True
2513 2513 del override['ssh']
2514 2514
2515 2515 # we didn't override our tmpl from **overrides
2516 2516 request = get_current_request()
2517 2517 if not uri_tmpl:
2518 2518 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2519 2519 rc_config = request.call_context.rc_config
2520 2520 else:
2521 2521 rc_config = SettingsModel().get_all_settings(cache=True)
2522 2522
2523 2523 if ssh:
2524 2524 uri_tmpl = rc_config.get(
2525 2525 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2526 2526
2527 2527 else:
2528 2528 uri_tmpl = rc_config.get(
2529 2529 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2530 2530
2531 2531 return get_clone_url(request=request,
2532 2532 uri_tmpl=uri_tmpl,
2533 2533 repo_name=self.repo_name,
2534 2534 repo_id=self.repo_id,
2535 2535 repo_type=self.repo_type,
2536 2536 **override)
2537 2537
2538 2538 def set_state(self, state):
2539 2539 self.repo_state = state
2540 2540 Session().add(self)
2541 2541 #==========================================================================
2542 2542 # SCM PROPERTIES
2543 2543 #==========================================================================
2544 2544
2545 2545 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2546 2546 return get_commit_safe(
2547 2547 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2548 2548 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2549 2549
2550 2550 def get_changeset(self, rev=None, pre_load=None):
2551 2551 warnings.warn("Use get_commit", DeprecationWarning)
2552 2552 commit_id = None
2553 2553 commit_idx = None
2554 2554 if isinstance(rev, str):
2555 2555 commit_id = rev
2556 2556 else:
2557 2557 commit_idx = rev
2558 2558 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2559 2559 pre_load=pre_load)
2560 2560
2561 2561 def get_landing_commit(self):
2562 2562 """
2563 2563 Returns landing commit, or if that doesn't exist returns the tip
2564 2564 """
2565 2565 _rev_type, _rev = self.landing_rev
2566 2566 commit = self.get_commit(_rev)
2567 2567 if isinstance(commit, EmptyCommit):
2568 2568 return self.get_commit()
2569 2569 return commit
2570 2570
2571 2571 def flush_commit_cache(self):
2572 2572 self.update_commit_cache(cs_cache={'raw_id':'0'})
2573 2573 self.update_commit_cache()
2574 2574
2575 2575 def update_commit_cache(self, cs_cache=None, config=None):
2576 2576 """
2577 2577 Update cache of last commit for repository
2578 2578 cache_keys should be::
2579 2579
2580 2580 source_repo_id
2581 2581 short_id
2582 2582 raw_id
2583 2583 revision
2584 2584 parents
2585 2585 message
2586 2586 date
2587 2587 author
2588 2588 updated_on
2589 2589
2590 2590 """
2591 2591 from rhodecode.lib.vcs.backends.base import BaseCommit
2592 2592 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2593 2593 empty_date = datetime.datetime.fromtimestamp(0)
2594 2594 repo_commit_count = 0
2595 2595
2596 2596 if cs_cache is None:
2597 2597 # use no-cache version here
2598 2598 try:
2599 2599 scm_repo = self.scm_instance(cache=False, config=config)
2600 2600 except VCSError:
2601 2601 scm_repo = None
2602 2602 empty = scm_repo is None or scm_repo.is_empty()
2603 2603
2604 2604 if not empty:
2605 2605 cs_cache = scm_repo.get_commit(
2606 2606 pre_load=["author", "date", "message", "parents", "branch"])
2607 2607 repo_commit_count = scm_repo.count()
2608 2608 else:
2609 2609 cs_cache = EmptyCommit()
2610 2610
2611 2611 if isinstance(cs_cache, BaseCommit):
2612 2612 cs_cache = cs_cache.__json__()
2613 2613
2614 2614 def is_outdated(new_cs_cache):
2615 2615 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2616 2616 new_cs_cache['revision'] != self.changeset_cache['revision']):
2617 2617 return True
2618 2618 return False
2619 2619
2620 2620 # check if we have maybe already latest cached revision
2621 2621 if is_outdated(cs_cache) or not self.changeset_cache:
2622 2622 _current_datetime = datetime.datetime.utcnow()
2623 2623 last_change = cs_cache.get('date') or _current_datetime
2624 2624 # we check if last update is newer than the new value
2625 2625 # if yes, we use the current timestamp instead. Imagine you get
2626 2626 # old commit pushed 1y ago, we'd set last update 1y to ago.
2627 2627 last_change_timestamp = datetime_to_time(last_change)
2628 2628 current_timestamp = datetime_to_time(last_change)
2629 2629 if last_change_timestamp > current_timestamp and not empty:
2630 2630 cs_cache['date'] = _current_datetime
2631 2631
2632 2632 # also store size of repo
2633 2633 cs_cache['repo_commit_count'] = repo_commit_count
2634 2634
2635 2635 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2636 2636 cs_cache['updated_on'] = time.time()
2637 2637 self.changeset_cache = cs_cache
2638 2638 self.updated_on = last_change
2639 2639 Session().add(self)
2640 2640 Session().commit()
2641 2641
2642 2642 else:
2643 2643 if empty:
2644 2644 cs_cache = EmptyCommit().__json__()
2645 2645 else:
2646 2646 cs_cache = self.changeset_cache
2647 2647
2648 2648 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2649 2649
2650 2650 cs_cache['updated_on'] = time.time()
2651 2651 self.changeset_cache = cs_cache
2652 2652 self.updated_on = _date_latest
2653 2653 Session().add(self)
2654 2654 Session().commit()
2655 2655
2656 2656 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2657 2657 self.repo_name, cs_cache, _date_latest)
2658 2658
2659 2659 @property
2660 2660 def tip(self):
2661 2661 return self.get_commit('tip')
2662 2662
2663 2663 @property
2664 2664 def author(self):
2665 2665 return self.tip.author
2666 2666
2667 2667 @property
2668 2668 def last_change(self):
2669 2669 return self.scm_instance().last_change
2670 2670
2671 2671 def get_comments(self, revisions=None):
2672 2672 """
2673 2673 Returns comments for this repository grouped by revisions
2674 2674
2675 2675 :param revisions: filter query by revisions only
2676 2676 """
2677 2677 cmts = ChangesetComment.query()\
2678 2678 .filter(ChangesetComment.repo == self)
2679 2679 if revisions:
2680 2680 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2681 2681 grouped = collections.defaultdict(list)
2682 2682 for cmt in cmts.all():
2683 2683 grouped[cmt.revision].append(cmt)
2684 2684 return grouped
2685 2685
2686 2686 def statuses(self, revisions=None):
2687 2687 """
2688 2688 Returns statuses for this repository
2689 2689
2690 2690 :param revisions: list of revisions to get statuses for
2691 2691 """
2692 2692 statuses = ChangesetStatus.query()\
2693 2693 .filter(ChangesetStatus.repo == self)\
2694 2694 .filter(ChangesetStatus.version == 0)
2695 2695
2696 2696 if revisions:
2697 2697 # Try doing the filtering in chunks to avoid hitting limits
2698 2698 size = 500
2699 2699 status_results = []
2700 2700 for chunk in range(0, len(revisions), size):
2701 2701 status_results += statuses.filter(
2702 2702 ChangesetStatus.revision.in_(
2703 2703 revisions[chunk: chunk+size])
2704 2704 ).all()
2705 2705 else:
2706 2706 status_results = statuses.all()
2707 2707
2708 2708 grouped = {}
2709 2709
2710 2710 # maybe we have open new pullrequest without a status?
2711 2711 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2712 2712 status_lbl = ChangesetStatus.get_status_lbl(stat)
2713 2713 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2714 2714 for rev in pr.revisions:
2715 2715 pr_id = pr.pull_request_id
2716 2716 pr_repo = pr.target_repo.repo_name
2717 2717 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2718 2718
2719 2719 for stat in status_results:
2720 2720 pr_id = pr_repo = None
2721 2721 if stat.pull_request:
2722 2722 pr_id = stat.pull_request.pull_request_id
2723 2723 pr_repo = stat.pull_request.target_repo.repo_name
2724 2724 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2725 2725 pr_id, pr_repo]
2726 2726 return grouped
2727 2727
2728 2728 # ==========================================================================
2729 2729 # SCM CACHE INSTANCE
2730 2730 # ==========================================================================
2731 2731
2732 2732 def scm_instance(self, **kwargs):
2733 2733 import rhodecode
2734 2734
2735 2735 # Passing a config will not hit the cache currently only used
2736 2736 # for repo2dbmapper
2737 2737 config = kwargs.pop('config', None)
2738 2738 cache = kwargs.pop('cache', None)
2739 2739 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2740 2740 if vcs_full_cache is not None:
2741 2741 # allows override global config
2742 2742 full_cache = vcs_full_cache
2743 2743 else:
2744 2744 full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache')
2745 2745 # if cache is NOT defined use default global, else we have a full
2746 2746 # control over cache behaviour
2747 2747 if cache is None and full_cache and not config:
2748 2748 log.debug('Initializing pure cached instance for %s', self.repo_path)
2749 2749 return self._get_instance_cached()
2750 2750
2751 2751 # cache here is sent to the "vcs server"
2752 2752 return self._get_instance(cache=bool(cache), config=config)
2753 2753
2754 2754 def _get_instance_cached(self):
2755 2755 from rhodecode.lib import rc_cache
2756 2756
2757 2757 cache_namespace_uid = f'repo_instance.{self.repo_id}'
2758 2758 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2759 2759
2760 2760 # we must use thread scoped cache here,
2761 2761 # because each thread of gevent needs it's own not shared connection and cache
2762 2762 # we also alter `args` so the cache key is individual for every green thread.
2763 2763 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2764 2764 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key, thread_scoped=True)
2765 2765
2766 2766 # our wrapped caching function that takes state_uid to save the previous state in
2767 2767 def cache_generator(_state_uid):
2768 2768
2769 2769 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2770 2770 def get_instance_cached(_repo_id, _process_context_id):
2771 2771 # we save in cached func the generation state so we can detect a change and invalidate caches
2772 2772 return _state_uid, self._get_instance(repo_state_uid=_state_uid)
2773 2773
2774 2774 return get_instance_cached
2775 2775
2776 2776 with inv_context_manager as invalidation_context:
2777 2777 cache_state_uid = invalidation_context.state_uid
2778 2778 cache_func = cache_generator(cache_state_uid)
2779 2779
2780 2780 args = self.repo_id, inv_context_manager.proc_key
2781 2781
2782 2782 previous_state_uid, instance = cache_func(*args)
2783 2783
2784 2784 # now compare keys, the "cache" state vs expected state.
2785 2785 if previous_state_uid != cache_state_uid:
2786 2786 log.warning('Cached state uid %s is different than current state uid %s',
2787 2787 previous_state_uid, cache_state_uid)
2788 2788 _, instance = cache_func.refresh(*args)
2789 2789
2790 2790 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2791 2791 return instance
2792 2792
2793 2793 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2794 2794 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2795 2795 self.repo_type, self.repo_path, cache)
2796 2796 config = config or self._config
2797 2797 custom_wire = {
2798 2798 'cache': cache, # controls the vcs.remote cache
2799 2799 'repo_state_uid': repo_state_uid
2800 2800 }
2801 2801
2802 2802 repo = get_vcs_instance(
2803 2803 repo_path=safe_str(self.repo_full_path),
2804 2804 config=config,
2805 2805 with_wire=custom_wire,
2806 2806 create=False,
2807 2807 _vcs_alias=self.repo_type)
2808 2808 if repo is not None:
2809 2809 repo.count() # cache rebuild
2810 2810
2811 2811 return repo
2812 2812
2813 2813 def get_shadow_repository_path(self, workspace_id):
2814 2814 from rhodecode.lib.vcs.backends.base import BaseRepository
2815 2815 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2816 2816 self.repo_full_path, self.repo_id, workspace_id)
2817 2817 return shadow_repo_path
2818 2818
2819 2819 def __json__(self):
2820 2820 return {'landing_rev': self.landing_rev}
2821 2821
2822 2822 def get_dict(self):
2823 2823
2824 2824 # Since we transformed `repo_name` to a hybrid property, we need to
2825 2825 # keep compatibility with the code which uses `repo_name` field.
2826 2826
2827 2827 result = super(Repository, self).get_dict()
2828 2828 result['repo_name'] = result.pop('_repo_name', None)
2829 2829 result.pop('_changeset_cache', '')
2830 2830 return result
2831 2831
2832 2832
2833 2833 class RepoGroup(Base, BaseModel):
2834 2834 __tablename__ = 'groups'
2835 2835 __table_args__ = (
2836 2836 UniqueConstraint('group_name', 'group_parent_id'),
2837 2837 base_table_args,
2838 2838 )
2839 2839
2840 2840 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2841 2841
2842 2842 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2843 2843 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2844 2844 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2845 2845 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2846 2846 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2847 2847 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2848 2848 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2849 2849 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2850 2850 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2851 2851 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2852 2852 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2853 2853
2854 2854 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group')
2855 2855 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group')
2856 2856 parent_group = relationship('RepoGroup', remote_side=group_id)
2857 2857 user = relationship('User', back_populates='repository_groups')
2858 2858 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group')
2859 2859
2860 2860 # no cascade, set NULL
2861 2861 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True)
2862 2862
2863 2863 def __init__(self, group_name='', parent_group=None):
2864 2864 self.group_name = group_name
2865 2865 self.parent_group = parent_group
2866 2866
2867 2867 def __repr__(self):
2868 2868 return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>"
2869 2869
2870 2870 @hybrid_property
2871 2871 def group_name(self):
2872 2872 return self._group_name
2873 2873
2874 2874 @group_name.setter
2875 2875 def group_name(self, value):
2876 2876 self._group_name = value
2877 2877 self.group_name_hash = self.hash_repo_group_name(value)
2878 2878
2879 2879 @classmethod
2880 2880 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2881 2881 from rhodecode.lib.vcs.backends.base import EmptyCommit
2882 2882 dummy = EmptyCommit().__json__()
2883 2883 if not changeset_cache_raw:
2884 2884 dummy['source_repo_id'] = repo_id
2885 2885 return json.loads(json.dumps(dummy))
2886 2886
2887 2887 try:
2888 2888 return json.loads(changeset_cache_raw)
2889 2889 except TypeError:
2890 2890 return dummy
2891 2891 except Exception:
2892 2892 log.error(traceback.format_exc())
2893 2893 return dummy
2894 2894
2895 2895 @hybrid_property
2896 2896 def changeset_cache(self):
2897 2897 return self._load_changeset_cache('', self._changeset_cache)
2898 2898
2899 2899 @changeset_cache.setter
2900 2900 def changeset_cache(self, val):
2901 2901 try:
2902 2902 self._changeset_cache = json.dumps(val)
2903 2903 except Exception:
2904 2904 log.error(traceback.format_exc())
2905 2905
2906 2906 @validates('group_parent_id')
2907 2907 def validate_group_parent_id(self, key, val):
2908 2908 """
2909 2909 Check cycle references for a parent group to self
2910 2910 """
2911 2911 if self.group_id and val:
2912 2912 assert val != self.group_id
2913 2913
2914 2914 return val
2915 2915
2916 2916 @hybrid_property
2917 2917 def description_safe(self):
2918 2918 from rhodecode.lib import helpers as h
2919 2919 return h.escape(self.group_description)
2920 2920
2921 2921 @classmethod
2922 2922 def hash_repo_group_name(cls, repo_group_name):
2923 2923 val = remove_formatting(repo_group_name)
2924 2924 val = safe_str(val).lower()
2925 2925 chars = []
2926 2926 for c in val:
2927 2927 if c not in string.ascii_letters:
2928 2928 c = str(ord(c))
2929 2929 chars.append(c)
2930 2930
2931 2931 return ''.join(chars)
2932 2932
2933 2933 @classmethod
2934 2934 def _generate_choice(cls, repo_group):
2935 2935 from webhelpers2.html import literal as _literal
2936 2936
2937 2937 def _name(k):
2938 2938 return _literal(cls.CHOICES_SEPARATOR.join(k))
2939 2939
2940 2940 return repo_group.group_id, _name(repo_group.full_path_splitted)
2941 2941
2942 2942 @classmethod
2943 2943 def groups_choices(cls, groups=None, show_empty_group=True):
2944 2944 if not groups:
2945 2945 groups = cls.query().all()
2946 2946
2947 2947 repo_groups = []
2948 2948 if show_empty_group:
2949 2949 repo_groups = [(-1, '-- %s --' % _('No parent'))]
2950 2950
2951 2951 repo_groups.extend([cls._generate_choice(x) for x in groups])
2952 2952
2953 2953 repo_groups = sorted(
2954 2954 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2955 2955 return repo_groups
2956 2956
2957 2957 @classmethod
2958 2958 def url_sep(cls):
2959 2959 return URL_SEP
2960 2960
2961 2961 @classmethod
2962 2962 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2963 2963 if case_insensitive:
2964 2964 gr = cls.query().filter(func.lower(cls.group_name)
2965 2965 == func.lower(group_name))
2966 2966 else:
2967 2967 gr = cls.query().filter(cls.group_name == group_name)
2968 2968 if cache:
2969 2969 name_key = _hash_key(group_name)
2970 2970 gr = gr.options(
2971 2971 FromCache("sql_cache_short", f"get_group_{name_key}"))
2972 2972 return gr.scalar()
2973 2973
2974 2974 @classmethod
2975 2975 def get_user_personal_repo_group(cls, user_id):
2976 2976 user = User.get(user_id)
2977 2977 if user.username == User.DEFAULT_USER:
2978 2978 return None
2979 2979
2980 2980 return cls.query()\
2981 2981 .filter(cls.personal == true()) \
2982 2982 .filter(cls.user == user) \
2983 2983 .order_by(cls.group_id.asc()) \
2984 2984 .first()
2985 2985
2986 2986 @classmethod
2987 2987 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2988 2988 case_insensitive=True):
2989 2989 q = RepoGroup.query()
2990 2990
2991 2991 if not isinstance(user_id, Optional):
2992 2992 q = q.filter(RepoGroup.user_id == user_id)
2993 2993
2994 2994 if not isinstance(group_id, Optional):
2995 2995 q = q.filter(RepoGroup.group_parent_id == group_id)
2996 2996
2997 2997 if case_insensitive:
2998 2998 q = q.order_by(func.lower(RepoGroup.group_name))
2999 2999 else:
3000 3000 q = q.order_by(RepoGroup.group_name)
3001 3001 return q.all()
3002 3002
3003 3003 @property
3004 3004 def parents(self, parents_recursion_limit=10):
3005 3005 groups = []
3006 3006 if self.parent_group is None:
3007 3007 return groups
3008 3008 cur_gr = self.parent_group
3009 3009 groups.insert(0, cur_gr)
3010 3010 cnt = 0
3011 3011 while 1:
3012 3012 cnt += 1
3013 3013 gr = getattr(cur_gr, 'parent_group', None)
3014 3014 cur_gr = cur_gr.parent_group
3015 3015 if gr is None:
3016 3016 break
3017 3017 if cnt == parents_recursion_limit:
3018 3018 # this will prevent accidental infinit loops
3019 3019 log.error('more than %s parents found for group %s, stopping '
3020 3020 'recursive parent fetching', parents_recursion_limit, self)
3021 3021 break
3022 3022
3023 3023 groups.insert(0, gr)
3024 3024 return groups
3025 3025
3026 3026 @property
3027 3027 def last_commit_cache_update_diff(self):
3028 3028 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
3029 3029
3030 3030 @classmethod
3031 3031 def _load_commit_change(cls, last_commit_cache):
3032 3032 from rhodecode.lib.vcs.utils.helpers import parse_datetime
3033 3033 empty_date = datetime.datetime.fromtimestamp(0)
3034 3034 date_latest = last_commit_cache.get('date', empty_date)
3035 3035 try:
3036 3036 return parse_datetime(date_latest)
3037 3037 except Exception:
3038 3038 return empty_date
3039 3039
3040 3040 @property
3041 3041 def last_commit_change(self):
3042 3042 return self._load_commit_change(self.changeset_cache)
3043 3043
3044 3044 @property
3045 3045 def last_db_change(self):
3046 3046 return self.updated_on
3047 3047
3048 3048 @property
3049 3049 def children(self):
3050 3050 return RepoGroup.query().filter(RepoGroup.parent_group == self)
3051 3051
3052 3052 @property
3053 3053 def name(self):
3054 3054 return self.group_name.split(RepoGroup.url_sep())[-1]
3055 3055
3056 3056 @property
3057 3057 def full_path(self):
3058 3058 return self.group_name
3059 3059
3060 3060 @property
3061 3061 def full_path_splitted(self):
3062 3062 return self.group_name.split(RepoGroup.url_sep())
3063 3063
3064 3064 @property
3065 3065 def repositories(self):
3066 3066 return Repository.query()\
3067 3067 .filter(Repository.group == self)\
3068 3068 .order_by(Repository.repo_name)
3069 3069
3070 3070 @property
3071 3071 def repositories_recursive_count(self):
3072 3072 cnt = self.repositories.count()
3073 3073
3074 3074 def children_count(group):
3075 3075 cnt = 0
3076 3076 for child in group.children:
3077 3077 cnt += child.repositories.count()
3078 3078 cnt += children_count(child)
3079 3079 return cnt
3080 3080
3081 3081 return cnt + children_count(self)
3082 3082
3083 3083 def _recursive_objects(self, include_repos=True, include_groups=True):
3084 3084 all_ = []
3085 3085
3086 3086 def _get_members(root_gr):
3087 3087 if include_repos:
3088 3088 for r in root_gr.repositories:
3089 3089 all_.append(r)
3090 3090 childs = root_gr.children.all()
3091 3091 if childs:
3092 3092 for gr in childs:
3093 3093 if include_groups:
3094 3094 all_.append(gr)
3095 3095 _get_members(gr)
3096 3096
3097 3097 root_group = []
3098 3098 if include_groups:
3099 3099 root_group = [self]
3100 3100
3101 3101 _get_members(self)
3102 3102 return root_group + all_
3103 3103
3104 3104 def recursive_groups_and_repos(self):
3105 3105 """
3106 3106 Recursive return all groups, with repositories in those groups
3107 3107 """
3108 3108 return self._recursive_objects()
3109 3109
3110 3110 def recursive_groups(self):
3111 3111 """
3112 3112 Returns all children groups for this group including children of children
3113 3113 """
3114 3114 return self._recursive_objects(include_repos=False)
3115 3115
3116 3116 def recursive_repos(self):
3117 3117 """
3118 3118 Returns all children repositories for this group
3119 3119 """
3120 3120 return self._recursive_objects(include_groups=False)
3121 3121
3122 3122 def get_new_name(self, group_name):
3123 3123 """
3124 3124 returns new full group name based on parent and new name
3125 3125
3126 3126 :param group_name:
3127 3127 """
3128 3128 path_prefix = (self.parent_group.full_path_splitted if
3129 3129 self.parent_group else [])
3130 3130 return RepoGroup.url_sep().join(path_prefix + [group_name])
3131 3131
3132 3132 def update_commit_cache(self, config=None):
3133 3133 """
3134 3134 Update cache of last commit for newest repository inside this repository group.
3135 3135 cache_keys should be::
3136 3136
3137 3137 source_repo_id
3138 3138 short_id
3139 3139 raw_id
3140 3140 revision
3141 3141 parents
3142 3142 message
3143 3143 date
3144 3144 author
3145 3145
3146 3146 """
3147 3147 from rhodecode.lib.vcs.utils.helpers import parse_datetime
3148 3148 empty_date = datetime.datetime.fromtimestamp(0)
3149 3149
3150 3150 def repo_groups_and_repos(root_gr):
3151 3151 for _repo in root_gr.repositories:
3152 3152 yield _repo
3153 3153 for child_group in root_gr.children.all():
3154 3154 yield child_group
3155 3155
3156 3156 latest_repo_cs_cache = {}
3157 3157 for obj in repo_groups_and_repos(self):
3158 3158 repo_cs_cache = obj.changeset_cache
3159 3159 date_latest = latest_repo_cs_cache.get('date', empty_date)
3160 3160 date_current = repo_cs_cache.get('date', empty_date)
3161 3161 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3162 3162 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3163 3163 latest_repo_cs_cache = repo_cs_cache
3164 3164 if hasattr(obj, 'repo_id'):
3165 3165 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3166 3166 else:
3167 3167 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3168 3168
3169 3169 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3170 3170
3171 3171 latest_repo_cs_cache['updated_on'] = time.time()
3172 3172 self.changeset_cache = latest_repo_cs_cache
3173 3173 self.updated_on = _date_latest
3174 3174 Session().add(self)
3175 3175 Session().commit()
3176 3176
3177 3177 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3178 3178 self.group_name, latest_repo_cs_cache, _date_latest)
3179 3179
3180 3180 def permissions(self, with_admins=True, with_owner=True,
3181 3181 expand_from_user_groups=False):
3182 3182 """
3183 3183 Permissions for repository groups
3184 3184 """
3185 3185 _admin_perm = 'group.admin'
3186 3186
3187 3187 owner_row = []
3188 3188 if with_owner:
3189 3189 usr = AttributeDict(self.user.get_dict())
3190 3190 usr.owner_row = True
3191 3191 usr.permission = _admin_perm
3192 3192 owner_row.append(usr)
3193 3193
3194 3194 super_admin_ids = []
3195 3195 super_admin_rows = []
3196 3196 if with_admins:
3197 3197 for usr in User.get_all_super_admins():
3198 3198 super_admin_ids.append(usr.user_id)
3199 3199 # if this admin is also owner, don't double the record
3200 3200 if usr.user_id == owner_row[0].user_id:
3201 3201 owner_row[0].admin_row = True
3202 3202 else:
3203 3203 usr = AttributeDict(usr.get_dict())
3204 3204 usr.admin_row = True
3205 3205 usr.permission = _admin_perm
3206 3206 super_admin_rows.append(usr)
3207 3207
3208 3208 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3209 3209 q = q.options(joinedload(UserRepoGroupToPerm.group),
3210 3210 joinedload(UserRepoGroupToPerm.user),
3211 3211 joinedload(UserRepoGroupToPerm.permission),)
3212 3212
3213 3213 # get owners and admins and permissions. We do a trick of re-writing
3214 3214 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3215 3215 # has a global reference and changing one object propagates to all
3216 3216 # others. This means if admin is also an owner admin_row that change
3217 3217 # would propagate to both objects
3218 3218 perm_rows = []
3219 3219 for _usr in q.all():
3220 3220 usr = AttributeDict(_usr.user.get_dict())
3221 3221 # if this user is also owner/admin, mark as duplicate record
3222 3222 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3223 3223 usr.duplicate_perm = True
3224 3224 usr.permission = _usr.permission.permission_name
3225 3225 perm_rows.append(usr)
3226 3226
3227 3227 # filter the perm rows by 'default' first and then sort them by
3228 3228 # admin,write,read,none permissions sorted again alphabetically in
3229 3229 # each group
3230 3230 perm_rows = sorted(perm_rows, key=display_user_sort)
3231 3231
3232 3232 user_groups_rows = []
3233 3233 if expand_from_user_groups:
3234 3234 for ug in self.permission_user_groups(with_members=True):
3235 3235 for user_data in ug.members:
3236 3236 user_groups_rows.append(user_data)
3237 3237
3238 3238 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3239 3239
3240 3240 def permission_user_groups(self, with_members=False):
3241 3241 q = UserGroupRepoGroupToPerm.query()\
3242 3242 .filter(UserGroupRepoGroupToPerm.group == self)
3243 3243 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3244 3244 joinedload(UserGroupRepoGroupToPerm.users_group),
3245 3245 joinedload(UserGroupRepoGroupToPerm.permission),)
3246 3246
3247 3247 perm_rows = []
3248 3248 for _user_group in q.all():
3249 3249 entry = AttributeDict(_user_group.users_group.get_dict())
3250 3250 entry.permission = _user_group.permission.permission_name
3251 3251 if with_members:
3252 3252 entry.members = [x.user.get_dict()
3253 3253 for x in _user_group.users_group.members]
3254 3254 perm_rows.append(entry)
3255 3255
3256 3256 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3257 3257 return perm_rows
3258 3258
3259 3259 def get_api_data(self):
3260 3260 """
3261 3261 Common function for generating api data
3262 3262
3263 3263 """
3264 3264 group = self
3265 3265 data = {
3266 3266 'group_id': group.group_id,
3267 3267 'group_name': group.group_name,
3268 3268 'group_description': group.description_safe,
3269 3269 'parent_group': group.parent_group.group_name if group.parent_group else None,
3270 3270 'repositories': [x.repo_name for x in group.repositories],
3271 3271 'owner': group.user.username,
3272 3272 }
3273 3273 return data
3274 3274
3275 3275 def get_dict(self):
3276 3276 # Since we transformed `group_name` to a hybrid property, we need to
3277 3277 # keep compatibility with the code which uses `group_name` field.
3278 3278 result = super(RepoGroup, self).get_dict()
3279 3279 result['group_name'] = result.pop('_group_name', None)
3280 3280 result.pop('_changeset_cache', '')
3281 3281 return result
3282 3282
3283 3283
3284 3284 class Permission(Base, BaseModel):
3285 3285 __tablename__ = 'permissions'
3286 3286 __table_args__ = (
3287 3287 Index('p_perm_name_idx', 'permission_name'),
3288 3288 base_table_args,
3289 3289 )
3290 3290
3291 3291 PERMS = [
3292 3292 ('hg.admin', _('RhodeCode Super Administrator')),
3293 3293
3294 3294 ('repository.none', _('Repository no access')),
3295 3295 ('repository.read', _('Repository read access')),
3296 3296 ('repository.write', _('Repository write access')),
3297 3297 ('repository.admin', _('Repository admin access')),
3298 3298
3299 3299 ('group.none', _('Repository group no access')),
3300 3300 ('group.read', _('Repository group read access')),
3301 3301 ('group.write', _('Repository group write access')),
3302 3302 ('group.admin', _('Repository group admin access')),
3303 3303
3304 3304 ('usergroup.none', _('User group no access')),
3305 3305 ('usergroup.read', _('User group read access')),
3306 3306 ('usergroup.write', _('User group write access')),
3307 3307 ('usergroup.admin', _('User group admin access')),
3308 3308
3309 3309 ('branch.none', _('Branch no permissions')),
3310 3310 ('branch.merge', _('Branch access by web merge')),
3311 3311 ('branch.push', _('Branch access by push')),
3312 3312 ('branch.push_force', _('Branch access by push with force')),
3313 3313
3314 3314 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3315 3315 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3316 3316
3317 3317 ('hg.usergroup.create.false', _('User Group creation disabled')),
3318 3318 ('hg.usergroup.create.true', _('User Group creation enabled')),
3319 3319
3320 3320 ('hg.create.none', _('Repository creation disabled')),
3321 3321 ('hg.create.repository', _('Repository creation enabled')),
3322 3322 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3323 3323 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3324 3324
3325 3325 ('hg.fork.none', _('Repository forking disabled')),
3326 3326 ('hg.fork.repository', _('Repository forking enabled')),
3327 3327
3328 3328 ('hg.register.none', _('Registration disabled')),
3329 3329 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3330 3330 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3331 3331
3332 3332 ('hg.password_reset.enabled', _('Password reset enabled')),
3333 3333 ('hg.password_reset.hidden', _('Password reset hidden')),
3334 3334 ('hg.password_reset.disabled', _('Password reset disabled')),
3335 3335
3336 3336 ('hg.extern_activate.manual', _('Manual activation of external account')),
3337 3337 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3338 3338
3339 3339 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3340 3340 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3341 3341 ]
3342 3342
3343 3343 # definition of system default permissions for DEFAULT user, created on
3344 3344 # system setup
3345 3345 DEFAULT_USER_PERMISSIONS = [
3346 3346 # object perms
3347 3347 'repository.read',
3348 3348 'group.read',
3349 3349 'usergroup.read',
3350 3350 # branch, for backward compat we need same value as before so forced pushed
3351 3351 'branch.push_force',
3352 3352 # global
3353 3353 'hg.create.repository',
3354 3354 'hg.repogroup.create.false',
3355 3355 'hg.usergroup.create.false',
3356 3356 'hg.create.write_on_repogroup.true',
3357 3357 'hg.fork.repository',
3358 3358 'hg.register.manual_activate',
3359 3359 'hg.password_reset.enabled',
3360 3360 'hg.extern_activate.auto',
3361 3361 'hg.inherit_default_perms.true',
3362 3362 ]
3363 3363
3364 3364 # defines which permissions are more important higher the more important
3365 3365 # Weight defines which permissions are more important.
3366 3366 # The higher number the more important.
3367 3367 PERM_WEIGHTS = {
3368 3368 'repository.none': 0,
3369 3369 'repository.read': 1,
3370 3370 'repository.write': 3,
3371 3371 'repository.admin': 4,
3372 3372
3373 3373 'group.none': 0,
3374 3374 'group.read': 1,
3375 3375 'group.write': 3,
3376 3376 'group.admin': 4,
3377 3377
3378 3378 'usergroup.none': 0,
3379 3379 'usergroup.read': 1,
3380 3380 'usergroup.write': 3,
3381 3381 'usergroup.admin': 4,
3382 3382
3383 3383 'branch.none': 0,
3384 3384 'branch.merge': 1,
3385 3385 'branch.push': 3,
3386 3386 'branch.push_force': 4,
3387 3387
3388 3388 'hg.repogroup.create.false': 0,
3389 3389 'hg.repogroup.create.true': 1,
3390 3390
3391 3391 'hg.usergroup.create.false': 0,
3392 3392 'hg.usergroup.create.true': 1,
3393 3393
3394 3394 'hg.fork.none': 0,
3395 3395 'hg.fork.repository': 1,
3396 3396 'hg.create.none': 0,
3397 3397 'hg.create.repository': 1
3398 3398 }
3399 3399
3400 3400 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3401 3401 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3402 3402 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3403 3403
3404 3404 def __repr__(self):
3405 3405 return "<%s('%s:%s')>" % (
3406 3406 self.cls_name, self.permission_id, self.permission_name
3407 3407 )
3408 3408
3409 3409 @classmethod
3410 3410 def get_by_key(cls, key):
3411 3411 return cls.query().filter(cls.permission_name == key).scalar()
3412 3412
3413 3413 @classmethod
3414 3414 def get_default_repo_perms(cls, user_id, repo_id=None):
3415 3415 q = Session().query(UserRepoToPerm, Repository, Permission)\
3416 3416 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3417 3417 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3418 3418 .filter(UserRepoToPerm.user_id == user_id)
3419 3419 if repo_id:
3420 3420 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3421 3421 return q.all()
3422 3422
3423 3423 @classmethod
3424 3424 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3425 3425 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3426 3426 .join(
3427 3427 Permission,
3428 3428 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3429 3429 .join(
3430 3430 UserRepoToPerm,
3431 3431 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3432 3432 .filter(UserRepoToPerm.user_id == user_id)
3433 3433
3434 3434 if repo_id:
3435 3435 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3436 3436 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3437 3437
3438 3438 @classmethod
3439 3439 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3440 3440 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3441 3441 .join(
3442 3442 Permission,
3443 3443 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3444 3444 .join(
3445 3445 Repository,
3446 3446 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3447 3447 .join(
3448 3448 UserGroup,
3449 3449 UserGroupRepoToPerm.users_group_id ==
3450 3450 UserGroup.users_group_id)\
3451 3451 .join(
3452 3452 UserGroupMember,
3453 3453 UserGroupRepoToPerm.users_group_id ==
3454 3454 UserGroupMember.users_group_id)\
3455 3455 .filter(
3456 3456 UserGroupMember.user_id == user_id,
3457 3457 UserGroup.users_group_active == true())
3458 3458 if repo_id:
3459 3459 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3460 3460 return q.all()
3461 3461
3462 3462 @classmethod
3463 3463 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3464 3464 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3465 3465 .join(
3466 3466 Permission,
3467 3467 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3468 3468 .join(
3469 3469 UserGroupRepoToPerm,
3470 3470 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3471 3471 .join(
3472 3472 UserGroup,
3473 3473 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3474 3474 .join(
3475 3475 UserGroupMember,
3476 3476 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3477 3477 .filter(
3478 3478 UserGroupMember.user_id == user_id,
3479 3479 UserGroup.users_group_active == true())
3480 3480
3481 3481 if repo_id:
3482 3482 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3483 3483 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3484 3484
3485 3485 @classmethod
3486 3486 def get_default_group_perms(cls, user_id, repo_group_id=None):
3487 3487 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3488 3488 .join(
3489 3489 Permission,
3490 3490 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3491 3491 .join(
3492 3492 RepoGroup,
3493 3493 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3494 3494 .filter(UserRepoGroupToPerm.user_id == user_id)
3495 3495 if repo_group_id:
3496 3496 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3497 3497 return q.all()
3498 3498
3499 3499 @classmethod
3500 3500 def get_default_group_perms_from_user_group(
3501 3501 cls, user_id, repo_group_id=None):
3502 3502 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3503 3503 .join(
3504 3504 Permission,
3505 3505 UserGroupRepoGroupToPerm.permission_id ==
3506 3506 Permission.permission_id)\
3507 3507 .join(
3508 3508 RepoGroup,
3509 3509 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3510 3510 .join(
3511 3511 UserGroup,
3512 3512 UserGroupRepoGroupToPerm.users_group_id ==
3513 3513 UserGroup.users_group_id)\
3514 3514 .join(
3515 3515 UserGroupMember,
3516 3516 UserGroupRepoGroupToPerm.users_group_id ==
3517 3517 UserGroupMember.users_group_id)\
3518 3518 .filter(
3519 3519 UserGroupMember.user_id == user_id,
3520 3520 UserGroup.users_group_active == true())
3521 3521 if repo_group_id:
3522 3522 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3523 3523 return q.all()
3524 3524
3525 3525 @classmethod
3526 3526 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3527 3527 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3528 3528 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3529 3529 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3530 3530 .filter(UserUserGroupToPerm.user_id == user_id)
3531 3531 if user_group_id:
3532 3532 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3533 3533 return q.all()
3534 3534
3535 3535 @classmethod
3536 3536 def get_default_user_group_perms_from_user_group(
3537 3537 cls, user_id, user_group_id=None):
3538 3538 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3539 3539 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3540 3540 .join(
3541 3541 Permission,
3542 3542 UserGroupUserGroupToPerm.permission_id ==
3543 3543 Permission.permission_id)\
3544 3544 .join(
3545 3545 TargetUserGroup,
3546 3546 UserGroupUserGroupToPerm.target_user_group_id ==
3547 3547 TargetUserGroup.users_group_id)\
3548 3548 .join(
3549 3549 UserGroup,
3550 3550 UserGroupUserGroupToPerm.user_group_id ==
3551 3551 UserGroup.users_group_id)\
3552 3552 .join(
3553 3553 UserGroupMember,
3554 3554 UserGroupUserGroupToPerm.user_group_id ==
3555 3555 UserGroupMember.users_group_id)\
3556 3556 .filter(
3557 3557 UserGroupMember.user_id == user_id,
3558 3558 UserGroup.users_group_active == true())
3559 3559 if user_group_id:
3560 3560 q = q.filter(
3561 3561 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3562 3562
3563 3563 return q.all()
3564 3564
3565 3565
3566 3566 class UserRepoToPerm(Base, BaseModel):
3567 3567 __tablename__ = 'repo_to_perm'
3568 3568 __table_args__ = (
3569 3569 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3570 3570 base_table_args
3571 3571 )
3572 3572
3573 3573 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3574 3574 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3575 3575 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3576 3576 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3577 3577
3578 3578 user = relationship('User', back_populates="repo_to_perm")
3579 3579 repository = relationship('Repository', back_populates="repo_to_perm")
3580 3580 permission = relationship('Permission')
3581 3581
3582 3582 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm')
3583 3583
3584 3584 @classmethod
3585 3585 def create(cls, user, repository, permission):
3586 3586 n = cls()
3587 3587 n.user = user
3588 3588 n.repository = repository
3589 3589 n.permission = permission
3590 3590 Session().add(n)
3591 3591 return n
3592 3592
3593 3593 def __repr__(self):
3594 3594 return f'<{self.user} => {self.repository} >'
3595 3595
3596 3596
3597 3597 class UserUserGroupToPerm(Base, BaseModel):
3598 3598 __tablename__ = 'user_user_group_to_perm'
3599 3599 __table_args__ = (
3600 3600 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3601 3601 base_table_args
3602 3602 )
3603 3603
3604 3604 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3605 3605 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3606 3606 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3607 3607 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3608 3608
3609 3609 user = relationship('User', back_populates='user_group_to_perm')
3610 3610 user_group = relationship('UserGroup', back_populates='user_user_group_to_perm')
3611 3611 permission = relationship('Permission')
3612 3612
3613 3613 @classmethod
3614 3614 def create(cls, user, user_group, permission):
3615 3615 n = cls()
3616 3616 n.user = user
3617 3617 n.user_group = user_group
3618 3618 n.permission = permission
3619 3619 Session().add(n)
3620 3620 return n
3621 3621
3622 3622 def __repr__(self):
3623 3623 return f'<{self.user} => {self.user_group} >'
3624 3624
3625 3625
3626 3626 class UserToPerm(Base, BaseModel):
3627 3627 __tablename__ = 'user_to_perm'
3628 3628 __table_args__ = (
3629 3629 UniqueConstraint('user_id', 'permission_id'),
3630 3630 base_table_args
3631 3631 )
3632 3632
3633 3633 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3634 3634 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3635 3635 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3636 3636
3637 3637 user = relationship('User', back_populates='user_perms')
3638 3638 permission = relationship('Permission', lazy='joined')
3639 3639
3640 3640 def __repr__(self):
3641 3641 return f'<{self.user} => {self.permission} >'
3642 3642
3643 3643
3644 3644 class UserGroupRepoToPerm(Base, BaseModel):
3645 3645 __tablename__ = 'users_group_repo_to_perm'
3646 3646 __table_args__ = (
3647 3647 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3648 3648 base_table_args
3649 3649 )
3650 3650
3651 3651 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3652 3652 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3653 3653 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3654 3654 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3655 3655
3656 3656 users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm')
3657 3657 permission = relationship('Permission')
3658 3658 repository = relationship('Repository', back_populates='users_group_to_perm')
3659 3659 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm')
3660 3660
3661 3661 @classmethod
3662 3662 def create(cls, users_group, repository, permission):
3663 3663 n = cls()
3664 3664 n.users_group = users_group
3665 3665 n.repository = repository
3666 3666 n.permission = permission
3667 3667 Session().add(n)
3668 3668 return n
3669 3669
3670 3670 def __repr__(self):
3671 3671 return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >'
3672 3672
3673 3673
3674 3674 class UserGroupUserGroupToPerm(Base, BaseModel):
3675 3675 __tablename__ = 'user_group_user_group_to_perm'
3676 3676 __table_args__ = (
3677 3677 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3678 3678 CheckConstraint('target_user_group_id != user_group_id'),
3679 3679 base_table_args
3680 3680 )
3681 3681
3682 3682 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3683 3683 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3684 3684 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3685 3685 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3686 3686
3687 3687 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm')
3688 3688 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3689 3689 permission = relationship('Permission')
3690 3690
3691 3691 @classmethod
3692 3692 def create(cls, target_user_group, user_group, permission):
3693 3693 n = cls()
3694 3694 n.target_user_group = target_user_group
3695 3695 n.user_group = user_group
3696 3696 n.permission = permission
3697 3697 Session().add(n)
3698 3698 return n
3699 3699
3700 3700 def __repr__(self):
3701 3701 return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >'
3702 3702
3703 3703
3704 3704 class UserGroupToPerm(Base, BaseModel):
3705 3705 __tablename__ = 'users_group_to_perm'
3706 3706 __table_args__ = (
3707 3707 UniqueConstraint('users_group_id', 'permission_id',),
3708 3708 base_table_args
3709 3709 )
3710 3710
3711 3711 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3712 3712 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3713 3713 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3714 3714
3715 3715 users_group = relationship('UserGroup', back_populates='users_group_to_perm')
3716 3716 permission = relationship('Permission')
3717 3717
3718 3718
3719 3719 class UserRepoGroupToPerm(Base, BaseModel):
3720 3720 __tablename__ = 'user_repo_group_to_perm'
3721 3721 __table_args__ = (
3722 3722 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3723 3723 base_table_args
3724 3724 )
3725 3725
3726 3726 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3727 3727 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3728 3728 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3729 3729 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3730 3730
3731 3731 user = relationship('User', back_populates='repo_group_to_perm')
3732 3732 group = relationship('RepoGroup', back_populates='repo_group_to_perm')
3733 3733 permission = relationship('Permission')
3734 3734
3735 3735 @classmethod
3736 3736 def create(cls, user, repository_group, permission):
3737 3737 n = cls()
3738 3738 n.user = user
3739 3739 n.group = repository_group
3740 3740 n.permission = permission
3741 3741 Session().add(n)
3742 3742 return n
3743 3743
3744 3744
3745 3745 class UserGroupRepoGroupToPerm(Base, BaseModel):
3746 3746 __tablename__ = 'users_group_repo_group_to_perm'
3747 3747 __table_args__ = (
3748 3748 UniqueConstraint('users_group_id', 'group_id'),
3749 3749 base_table_args
3750 3750 )
3751 3751
3752 3752 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3753 3753 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3754 3754 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3755 3755 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3756 3756
3757 3757 users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm')
3758 3758 permission = relationship('Permission')
3759 3759 group = relationship('RepoGroup', back_populates='users_group_to_perm')
3760 3760
3761 3761 @classmethod
3762 3762 def create(cls, user_group, repository_group, permission):
3763 3763 n = cls()
3764 3764 n.users_group = user_group
3765 3765 n.group = repository_group
3766 3766 n.permission = permission
3767 3767 Session().add(n)
3768 3768 return n
3769 3769
3770 3770 def __repr__(self):
3771 3771 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3772 3772
3773 3773
3774 3774 class Statistics(Base, BaseModel):
3775 3775 __tablename__ = 'statistics'
3776 3776 __table_args__ = (
3777 3777 base_table_args
3778 3778 )
3779 3779
3780 3780 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3781 3781 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3782 3782 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3783 3783 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data
3784 3784 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data
3785 3785 languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data
3786 3786
3787 3787 repository = relationship('Repository', single_parent=True, viewonly=True)
3788 3788
3789 3789
3790 3790 class UserFollowing(Base, BaseModel):
3791 3791 __tablename__ = 'user_followings'
3792 3792 __table_args__ = (
3793 3793 UniqueConstraint('user_id', 'follows_repository_id'),
3794 3794 UniqueConstraint('user_id', 'follows_user_id'),
3795 3795 base_table_args
3796 3796 )
3797 3797
3798 3798 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3799 3799 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3800 3800 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3801 3801 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3802 3802 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3803 3803
3804 3804 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings')
3805 3805
3806 3806 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3807 3807 follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers')
3808 3808
3809 3809 @classmethod
3810 3810 def get_repo_followers(cls, repo_id):
3811 3811 return cls.query().filter(cls.follows_repo_id == repo_id)
3812 3812
3813 3813
3814 3814 class CacheKey(Base, BaseModel):
3815 3815 __tablename__ = 'cache_invalidation'
3816 3816 __table_args__ = (
3817 3817 UniqueConstraint('cache_key'),
3818 3818 Index('key_idx', 'cache_key'),
3819 3819 Index('cache_args_idx', 'cache_args'),
3820 3820 base_table_args,
3821 3821 )
3822 3822
3823 3823 CACHE_TYPE_FEED = 'FEED'
3824 3824
3825 3825 # namespaces used to register process/thread aware caches
3826 3826 REPO_INVALIDATION_NAMESPACE = 'repo_cache.v1:{repo_id}'
3827 3827
3828 3828 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3829 3829 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3830 3830 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3831 3831 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3832 3832 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3833 3833
3834 3834 def __init__(self, cache_key, cache_args='', cache_state_uid=None, cache_active=False):
3835 3835 self.cache_key = cache_key
3836 3836 self.cache_args = cache_args
3837 3837 self.cache_active = cache_active
3838 3838 # first key should be same for all entries, since all workers should share it
3839 3839 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3840 3840
3841 3841 def __repr__(self):
3842 3842 return "<%s('%s:%s[%s]')>" % (
3843 3843 self.cls_name,
3844 3844 self.cache_id, self.cache_key, self.cache_active)
3845 3845
3846 3846 def _cache_key_partition(self):
3847 3847 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3848 3848 return prefix, repo_name, suffix
3849 3849
3850 3850 def get_prefix(self):
3851 3851 """
3852 3852 Try to extract prefix from existing cache key. The key could consist
3853 3853 of prefix, repo_name, suffix
3854 3854 """
3855 3855 # this returns prefix, repo_name, suffix
3856 3856 return self._cache_key_partition()[0]
3857 3857
3858 3858 def get_suffix(self):
3859 3859 """
3860 3860 get suffix that might have been used in _get_cache_key to
3861 3861 generate self.cache_key. Only used for informational purposes
3862 3862 in repo_edit.mako.
3863 3863 """
3864 3864 # prefix, repo_name, suffix
3865 3865 return self._cache_key_partition()[2]
3866 3866
3867 3867 @classmethod
3868 3868 def generate_new_state_uid(cls, based_on=None):
3869 3869 if based_on:
3870 3870 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3871 3871 else:
3872 3872 return str(uuid.uuid4())
3873 3873
3874 3874 @classmethod
3875 3875 def delete_all_cache(cls):
3876 3876 """
3877 3877 Delete all cache keys from database.
3878 3878 Should only be run when all instances are down and all entries
3879 3879 thus stale.
3880 3880 """
3881 3881 cls.query().delete()
3882 3882 Session().commit()
3883 3883
3884 3884 @classmethod
3885 3885 def set_invalidate(cls, cache_uid, delete=False):
3886 3886 """
3887 3887 Mark all caches of a repo as invalid in the database.
3888 3888 """
3889 3889 try:
3890 3890 qry = Session().query(cls).filter(cls.cache_key == cache_uid)
3891 3891 if delete:
3892 3892 qry.delete()
3893 3893 log.debug('cache objects deleted for cache args %s',
3894 3894 safe_str(cache_uid))
3895 3895 else:
3896 3896 new_uid = cls.generate_new_state_uid()
3897 3897 qry.update({"cache_state_uid": new_uid,
3898 3898 "cache_args": f"repo_state:{time.time()}"})
3899 3899 log.debug('cache object %s set new UID %s',
3900 3900 safe_str(cache_uid), new_uid)
3901 3901
3902 3902 Session().commit()
3903 3903 except Exception:
3904 3904 log.exception(
3905 3905 'Cache key invalidation failed for cache args %s',
3906 3906 safe_str(cache_uid))
3907 3907 Session().rollback()
3908 3908
3909 3909 @classmethod
3910 3910 def get_active_cache(cls, cache_key):
3911 3911 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3912 3912 if inv_obj:
3913 3913 return inv_obj
3914 3914 return None
3915 3915
3916 3916 @classmethod
3917 3917 def get_namespace_map(cls, namespace):
3918 3918 return {
3919 3919 x.cache_key: x
3920 3920 for x in cls.query().filter(cls.cache_args == namespace)}
3921 3921
3922 3922
3923 3923 class ChangesetComment(Base, BaseModel):
3924 3924 __tablename__ = 'changeset_comments'
3925 3925 __table_args__ = (
3926 3926 Index('cc_revision_idx', 'revision'),
3927 3927 base_table_args,
3928 3928 )
3929 3929
3930 3930 COMMENT_OUTDATED = 'comment_outdated'
3931 3931 COMMENT_TYPE_NOTE = 'note'
3932 3932 COMMENT_TYPE_TODO = 'todo'
3933 3933 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3934 3934
3935 3935 OP_IMMUTABLE = 'immutable'
3936 3936 OP_CHANGEABLE = 'changeable'
3937 3937
3938 3938 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3939 3939 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3940 3940 revision = Column('revision', String(40), nullable=True)
3941 3941 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3942 3942 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3943 3943 line_no = Column('line_no', Unicode(10), nullable=True)
3944 3944 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3945 3945 f_path = Column('f_path', Unicode(1000), nullable=True)
3946 3946 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3947 3947 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3948 3948 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3949 3949 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3950 3950 renderer = Column('renderer', Unicode(64), nullable=True)
3951 3951 display_state = Column('display_state', Unicode(128), nullable=True)
3952 3952 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3953 3953 draft = Column('draft', Boolean(), nullable=True, default=False)
3954 3954
3955 3955 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3956 3956 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3957 3957
3958 3958 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3959 3959 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3960 3960
3961 3961 author = relationship('User', lazy='select', back_populates='user_comments')
3962 3962 repo = relationship('Repository', back_populates='comments')
3963 3963 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment')
3964 3964 pull_request = relationship('PullRequest', lazy='select', back_populates='comments')
3965 3965 pull_request_version = relationship('PullRequestVersion', lazy='select')
3966 3966 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment")
3967 3967
3968 3968 @classmethod
3969 3969 def get_users(cls, revision=None, pull_request_id=None):
3970 3970 """
3971 3971 Returns user associated with this ChangesetComment. ie those
3972 3972 who actually commented
3973 3973
3974 3974 :param cls:
3975 3975 :param revision:
3976 3976 """
3977 3977 q = Session().query(User).join(ChangesetComment.author)
3978 3978 if revision:
3979 3979 q = q.filter(cls.revision == revision)
3980 3980 elif pull_request_id:
3981 3981 q = q.filter(cls.pull_request_id == pull_request_id)
3982 3982 return q.all()
3983 3983
3984 3984 @classmethod
3985 3985 def get_index_from_version(cls, pr_version, versions=None, num_versions=None) -> int:
3986 3986 if pr_version is None:
3987 3987 return 0
3988 3988
3989 3989 if versions is not None:
3990 3990 num_versions = [x.pull_request_version_id for x in versions]
3991 3991
3992 3992 num_versions = num_versions or []
3993 3993 try:
3994 3994 return num_versions.index(pr_version) + 1
3995 3995 except (IndexError, ValueError):
3996 3996 return 0
3997 3997
3998 3998 @property
3999 3999 def outdated(self):
4000 4000 return self.display_state == self.COMMENT_OUTDATED
4001 4001
4002 4002 @property
4003 4003 def outdated_js(self):
4004 4004 return str_json(self.display_state == self.COMMENT_OUTDATED)
4005 4005
4006 4006 @property
4007 4007 def immutable(self):
4008 4008 return self.immutable_state == self.OP_IMMUTABLE
4009 4009
4010 4010 def outdated_at_version(self, version: int) -> bool:
4011 4011 """
4012 4012 Checks if comment is outdated for given pull request version
4013 4013 """
4014 4014
4015 4015 def version_check():
4016 4016 return self.pull_request_version_id and self.pull_request_version_id != version
4017 4017
4018 4018 if self.is_inline:
4019 4019 return self.outdated and version_check()
4020 4020 else:
4021 4021 # general comments don't have .outdated set, also latest don't have a version
4022 4022 return version_check()
4023 4023
4024 4024 def outdated_at_version_js(self, version):
4025 4025 """
4026 4026 Checks if comment is outdated for given pull request version
4027 4027 """
4028 4028 return str_json(self.outdated_at_version(version))
4029 4029
4030 4030 def older_than_version(self, version: int) -> bool:
4031 4031 """
4032 4032 Checks if comment is made from a previous version than given.
4033 4033 Assumes self.pull_request_version.pull_request_version_id is an integer if not None.
4034 4034 """
4035 4035
4036 4036 # If version is None, return False as the current version cannot be less than None
4037 4037 if version is None:
4038 4038 return False
4039 4039
4040 4040 # Ensure that the version is an integer to prevent TypeError on comparison
4041 4041 if not isinstance(version, int):
4042 4042 raise ValueError("The provided version must be an integer.")
4043 4043
4044 4044 # Initialize current version to 0 or pull_request_version_id if it's available
4045 4045 cur_ver = 0
4046 4046 if self.pull_request_version and self.pull_request_version.pull_request_version_id is not None:
4047 4047 cur_ver = self.pull_request_version.pull_request_version_id
4048 4048
4049 4049 # Return True if the current version is less than the given version
4050 4050 return cur_ver < version
4051 4051
4052 4052 def older_than_version_js(self, version):
4053 4053 """
4054 4054 Checks if comment is made from previous version than given
4055 4055 """
4056 4056 return str_json(self.older_than_version(version))
4057 4057
4058 4058 @property
4059 4059 def commit_id(self):
4060 4060 """New style naming to stop using .revision"""
4061 4061 return self.revision
4062 4062
4063 4063 @property
4064 4064 def resolved(self):
4065 4065 return self.resolved_by[0] if self.resolved_by else None
4066 4066
4067 4067 @property
4068 4068 def is_todo(self):
4069 4069 return self.comment_type == self.COMMENT_TYPE_TODO
4070 4070
4071 4071 @property
4072 4072 def is_inline(self):
4073 4073 if self.line_no and self.f_path:
4074 4074 return True
4075 4075 return False
4076 4076
4077 4077 @property
4078 4078 def last_version(self):
4079 4079 version = 0
4080 4080 if self.history:
4081 4081 version = self.history[-1].version
4082 4082 return version
4083 4083
4084 4084 def get_index_version(self, versions):
4085 4085 return self.get_index_from_version(
4086 4086 self.pull_request_version_id, versions)
4087 4087
4088 4088 @property
4089 4089 def review_status(self):
4090 4090 if self.status_change:
4091 4091 return self.status_change[0].status
4092 4092
4093 4093 @property
4094 4094 def review_status_lbl(self):
4095 4095 if self.status_change:
4096 4096 return self.status_change[0].status_lbl
4097 4097
4098 4098 def __repr__(self):
4099 4099 if self.comment_id:
4100 4100 return f'<DB:Comment #{self.comment_id}>'
4101 4101 else:
4102 4102 return f'<DB:Comment at {id(self)!r}>'
4103 4103
4104 4104 def get_api_data(self):
4105 4105 comment = self
4106 4106
4107 4107 data = {
4108 4108 'comment_id': comment.comment_id,
4109 4109 'comment_type': comment.comment_type,
4110 4110 'comment_text': comment.text,
4111 4111 'comment_status': comment.status_change,
4112 4112 'comment_f_path': comment.f_path,
4113 4113 'comment_lineno': comment.line_no,
4114 4114 'comment_author': comment.author,
4115 4115 'comment_created_on': comment.created_on,
4116 4116 'comment_resolved_by': self.resolved,
4117 4117 'comment_commit_id': comment.revision,
4118 4118 'comment_pull_request_id': comment.pull_request_id,
4119 4119 'comment_last_version': self.last_version
4120 4120 }
4121 4121 return data
4122 4122
4123 4123 def __json__(self):
4124 4124 data = dict()
4125 4125 data.update(self.get_api_data())
4126 4126 return data
4127 4127
4128 4128
4129 4129 class ChangesetCommentHistory(Base, BaseModel):
4130 4130 __tablename__ = 'changeset_comments_history'
4131 4131 __table_args__ = (
4132 4132 Index('cch_comment_id_idx', 'comment_id'),
4133 4133 base_table_args,
4134 4134 )
4135 4135
4136 4136 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
4137 4137 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
4138 4138 version = Column("version", Integer(), nullable=False, default=0)
4139 4139 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
4140 4140 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
4141 4141 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4142 4142 deleted = Column('deleted', Boolean(), default=False)
4143 4143
4144 4144 author = relationship('User', lazy='joined')
4145 4145 comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history")
4146 4146
4147 4147 @classmethod
4148 4148 def get_version(cls, comment_id):
4149 4149 q = Session().query(ChangesetCommentHistory).filter(
4150 4150 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
4151 4151 if q.count() == 0:
4152 4152 return 1
4153 4153 elif q.count() >= q[0].version:
4154 4154 return q.count() + 1
4155 4155 else:
4156 4156 return q[0].version + 1
4157 4157
4158 4158
4159 4159 class ChangesetStatus(Base, BaseModel):
4160 4160 __tablename__ = 'changeset_statuses'
4161 4161 __table_args__ = (
4162 4162 Index('cs_revision_idx', 'revision'),
4163 4163 Index('cs_version_idx', 'version'),
4164 4164 UniqueConstraint('repo_id', 'revision', 'version'),
4165 4165 base_table_args
4166 4166 )
4167 4167
4168 4168 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
4169 4169 STATUS_APPROVED = 'approved'
4170 4170 STATUS_REJECTED = 'rejected'
4171 4171 STATUS_UNDER_REVIEW = 'under_review'
4172 4172
4173 4173 STATUSES = [
4174 4174 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
4175 4175 (STATUS_APPROVED, _("Approved")),
4176 4176 (STATUS_REJECTED, _("Rejected")),
4177 4177 (STATUS_UNDER_REVIEW, _("Under Review")),
4178 4178 ]
4179 4179
4180 4180 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4181 4181 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4182 4182 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4183 4183 revision = Column('revision', String(40), nullable=False)
4184 4184 status = Column('status', String(128), nullable=False, default=DEFAULT)
4185 4185 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4186 4186 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4187 4187 version = Column('version', Integer(), nullable=False, default=0)
4188 4188 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4189 4189
4190 4190 author = relationship('User', lazy='select')
4191 4191 repo = relationship('Repository', lazy='select')
4192 4192 comment = relationship('ChangesetComment', lazy='select', back_populates='status_change')
4193 4193 pull_request = relationship('PullRequest', lazy='select', back_populates='statuses')
4194 4194
4195 4195 def __repr__(self):
4196 4196 return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>"
4197 4197
4198 4198 @classmethod
4199 4199 def get_status_lbl(cls, value):
4200 4200 return dict(cls.STATUSES).get(value)
4201 4201
4202 4202 @property
4203 4203 def status_lbl(self):
4204 4204 return ChangesetStatus.get_status_lbl(self.status)
4205 4205
4206 4206 def get_api_data(self):
4207 4207 status = self
4208 4208 data = {
4209 4209 'status_id': status.changeset_status_id,
4210 4210 'status': status.status,
4211 4211 }
4212 4212 return data
4213 4213
4214 4214 def __json__(self):
4215 4215 data = dict()
4216 4216 data.update(self.get_api_data())
4217 4217 return data
4218 4218
4219 4219
4220 4220 class _SetState(object):
4221 4221 """
4222 4222 Context processor allowing changing state for sensitive operation such as
4223 4223 pull request update or merge
4224 4224 """
4225 4225
4226 4226 def __init__(self, pull_request, pr_state, back_state=None):
4227 4227 self._pr = pull_request
4228 4228 self._org_state = back_state or pull_request.pull_request_state
4229 4229 self._pr_state = pr_state
4230 4230 self._current_state = None
4231 4231
4232 4232 def __enter__(self):
4233 4233 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4234 4234 self._pr, self._pr_state)
4235 4235 self.set_pr_state(self._pr_state)
4236 4236 return self
4237 4237
4238 4238 def __exit__(self, exc_type, exc_val, exc_tb):
4239 4239 if exc_val is not None or exc_type is not None:
4240 4240 log.error(traceback.format_tb(exc_tb))
4241 4241 return None
4242 4242
4243 4243 self.set_pr_state(self._org_state)
4244 4244 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4245 4245 self._pr, self._org_state)
4246 4246
4247 4247 @property
4248 4248 def state(self):
4249 4249 return self._current_state
4250 4250
4251 4251 def set_pr_state(self, pr_state):
4252 4252 try:
4253 4253 self._pr.pull_request_state = pr_state
4254 4254 Session().add(self._pr)
4255 4255 Session().commit()
4256 4256 self._current_state = pr_state
4257 4257 except Exception:
4258 4258 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4259 4259 raise
4260 4260
4261 4261
4262 4262 class _PullRequestBase(BaseModel):
4263 4263 """
4264 4264 Common attributes of pull request and version entries.
4265 4265 """
4266 4266
4267 4267 # .status values
4268 4268 STATUS_NEW = 'new'
4269 4269 STATUS_OPEN = 'open'
4270 4270 STATUS_CLOSED = 'closed'
4271 4271
4272 4272 # available states
4273 4273 STATE_CREATING = 'creating'
4274 4274 STATE_UPDATING = 'updating'
4275 4275 STATE_MERGING = 'merging'
4276 4276 STATE_CREATED = 'created'
4277 4277
4278 4278 title = Column('title', Unicode(255), nullable=True)
4279 4279 description = Column(
4280 4280 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4281 4281 nullable=True)
4282 4282 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4283 4283
4284 4284 # new/open/closed status of pull request (not approve/reject/etc)
4285 4285 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4286 4286 created_on = Column(
4287 4287 'created_on', DateTime(timezone=False), nullable=False,
4288 4288 default=datetime.datetime.now)
4289 4289 updated_on = Column(
4290 4290 'updated_on', DateTime(timezone=False), nullable=False,
4291 4291 default=datetime.datetime.now)
4292 4292
4293 4293 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4294 4294
4295 4295 @declared_attr
4296 4296 def user_id(cls):
4297 4297 return Column(
4298 4298 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4299 4299 unique=None)
4300 4300
4301 4301 # 500 revisions max
4302 4302 _revisions = Column(
4303 4303 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4304 4304
4305 4305 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4306 4306
4307 4307 @declared_attr
4308 4308 def source_repo_id(cls):
4309 4309 # TODO: dan: rename column to source_repo_id
4310 4310 return Column(
4311 4311 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4312 4312 nullable=False)
4313 4313
4314 4314 @declared_attr
4315 4315 def pr_source(cls):
4316 4316 return relationship(
4317 4317 'Repository',
4318 4318 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4319 4319 overlaps="pull_requests_source"
4320 4320 )
4321 4321
4322 4322 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4323 4323
4324 4324 @hybrid_property
4325 4325 def source_ref(self):
4326 4326 return self._source_ref
4327 4327
4328 4328 @source_ref.setter
4329 4329 def source_ref(self, val):
4330 4330 parts = (val or '').split(':')
4331 4331 if len(parts) != 3:
4332 4332 raise ValueError(
4333 4333 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4334 4334 self._source_ref = safe_str(val)
4335 4335
4336 4336 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4337 4337
4338 4338 @hybrid_property
4339 4339 def target_ref(self):
4340 4340 return self._target_ref
4341 4341
4342 4342 @target_ref.setter
4343 4343 def target_ref(self, val):
4344 4344 parts = (val or '').split(':')
4345 4345 if len(parts) != 3:
4346 4346 raise ValueError(
4347 4347 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4348 4348 self._target_ref = safe_str(val)
4349 4349
4350 4350 @declared_attr
4351 4351 def target_repo_id(cls):
4352 4352 # TODO: dan: rename column to target_repo_id
4353 4353 return Column(
4354 4354 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4355 4355 nullable=False)
4356 4356
4357 4357 @declared_attr
4358 4358 def pr_target(cls):
4359 4359 return relationship(
4360 4360 'Repository',
4361 4361 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4362 4362 overlaps="pull_requests_target"
4363 4363 )
4364 4364
4365 4365 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4366 4366
4367 4367 # TODO: dan: rename column to last_merge_source_rev
4368 4368 _last_merge_source_rev = Column(
4369 4369 'last_merge_org_rev', String(40), nullable=True)
4370 4370 # TODO: dan: rename column to last_merge_target_rev
4371 4371 _last_merge_target_rev = Column(
4372 4372 'last_merge_other_rev', String(40), nullable=True)
4373 4373 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4374 4374 last_merge_metadata = Column(
4375 4375 'last_merge_metadata', MutationObj.as_mutable(
4376 4376 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4377 4377
4378 4378 merge_rev = Column('merge_rev', String(40), nullable=True)
4379 4379
4380 4380 reviewer_data = Column(
4381 4381 'reviewer_data_json', MutationObj.as_mutable(
4382 4382 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4383 4383
4384 4384 @property
4385 4385 def reviewer_data_json(self):
4386 4386 return str_json(self.reviewer_data)
4387 4387
4388 4388 @property
4389 4389 def last_merge_metadata_parsed(self):
4390 4390 metadata = {}
4391 4391 if not self.last_merge_metadata:
4392 4392 return metadata
4393 4393
4394 4394 if hasattr(self.last_merge_metadata, 'de_coerce'):
4395 4395 for k, v in self.last_merge_metadata.de_coerce().items():
4396 4396 if k in ['target_ref', 'source_ref']:
4397 4397 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4398 4398 else:
4399 4399 if hasattr(v, 'de_coerce'):
4400 4400 metadata[k] = v.de_coerce()
4401 4401 else:
4402 4402 metadata[k] = v
4403 4403 return metadata
4404 4404
4405 4405 @property
4406 4406 def work_in_progress(self):
4407 4407 """checks if pull request is work in progress by checking the title"""
4408 4408 title = self.title.upper()
4409 4409 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4410 4410 return True
4411 4411 return False
4412 4412
4413 4413 @property
4414 4414 def title_safe(self):
4415 4415 return self.title\
4416 4416 .replace('{', '{{')\
4417 4417 .replace('}', '}}')
4418 4418
4419 4419 @hybrid_property
4420 4420 def description_safe(self):
4421 4421 from rhodecode.lib import helpers as h
4422 4422 return h.escape(self.description)
4423 4423
4424 4424 @hybrid_property
4425 4425 def revisions(self):
4426 4426 return self._revisions.split(':') if self._revisions else []
4427 4427
4428 4428 @revisions.setter
4429 4429 def revisions(self, val):
4430 4430 self._revisions = ':'.join(val)
4431 4431
4432 4432 @hybrid_property
4433 4433 def last_merge_status(self):
4434 4434 return safe_int(self._last_merge_status)
4435 4435
4436 4436 @last_merge_status.setter
4437 4437 def last_merge_status(self, val):
4438 4438 self._last_merge_status = val
4439 4439
4440 4440 @declared_attr
4441 4441 def author(cls):
4442 4442 return relationship(
4443 4443 'User', lazy='joined',
4444 4444 #TODO, problem that is somehow :?
4445 4445 #back_populates='user_pull_requests'
4446 4446 )
4447 4447
4448 4448 @declared_attr
4449 4449 def source_repo(cls):
4450 4450 return relationship(
4451 4451 'Repository',
4452 4452 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4453 4453 overlaps="pr_source"
4454 4454 )
4455 4455
4456 4456 @property
4457 4457 def source_ref_parts(self):
4458 4458 return self.unicode_to_reference(self.source_ref)
4459 4459
4460 4460 @declared_attr
4461 4461 def target_repo(cls):
4462 4462 return relationship(
4463 4463 'Repository',
4464 4464 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4465 4465 overlaps="pr_target"
4466 4466 )
4467 4467
4468 4468 @property
4469 4469 def target_ref_parts(self):
4470 4470 return self.unicode_to_reference(self.target_ref)
4471 4471
4472 4472 @property
4473 4473 def shadow_merge_ref(self):
4474 4474 return self.unicode_to_reference(self._shadow_merge_ref)
4475 4475
4476 4476 @shadow_merge_ref.setter
4477 4477 def shadow_merge_ref(self, ref):
4478 4478 self._shadow_merge_ref = self.reference_to_unicode(ref)
4479 4479
4480 4480 @staticmethod
4481 4481 def unicode_to_reference(raw):
4482 4482 return unicode_to_reference(raw)
4483 4483
4484 4484 @staticmethod
4485 4485 def reference_to_unicode(ref):
4486 4486 return reference_to_unicode(ref)
4487 4487
4488 4488 def get_api_data(self, with_merge_state=True):
4489 4489 from rhodecode.model.pull_request import PullRequestModel
4490 4490
4491 4491 pull_request = self
4492 4492 if with_merge_state:
4493 4493 merge_response, merge_status, msg = \
4494 4494 PullRequestModel().merge_status(pull_request)
4495 4495 merge_state = {
4496 4496 'status': merge_status,
4497 4497 'message': safe_str(msg),
4498 4498 }
4499 4499 else:
4500 4500 merge_state = {'status': 'not_available',
4501 4501 'message': 'not_available'}
4502 4502
4503 4503 merge_data = {
4504 4504 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4505 4505 'reference': (
4506 4506 pull_request.shadow_merge_ref.asdict()
4507 4507 if pull_request.shadow_merge_ref else None),
4508 4508 }
4509 4509
4510 4510 data = {
4511 4511 'pull_request_id': pull_request.pull_request_id,
4512 4512 'url': PullRequestModel().get_url(pull_request),
4513 4513 'title': pull_request.title,
4514 4514 'description': pull_request.description,
4515 4515 'status': pull_request.status,
4516 4516 'state': pull_request.pull_request_state,
4517 4517 'created_on': pull_request.created_on,
4518 4518 'updated_on': pull_request.updated_on,
4519 4519 'commit_ids': pull_request.revisions,
4520 4520 'review_status': pull_request.calculated_review_status(),
4521 4521 'mergeable': merge_state,
4522 4522 'source': {
4523 4523 'clone_url': pull_request.source_repo.clone_url(),
4524 4524 'repository': pull_request.source_repo.repo_name,
4525 4525 'reference': {
4526 4526 'name': pull_request.source_ref_parts.name,
4527 4527 'type': pull_request.source_ref_parts.type,
4528 4528 'commit_id': pull_request.source_ref_parts.commit_id,
4529 4529 },
4530 4530 },
4531 4531 'target': {
4532 4532 'clone_url': pull_request.target_repo.clone_url(),
4533 4533 'repository': pull_request.target_repo.repo_name,
4534 4534 'reference': {
4535 4535 'name': pull_request.target_ref_parts.name,
4536 4536 'type': pull_request.target_ref_parts.type,
4537 4537 'commit_id': pull_request.target_ref_parts.commit_id,
4538 4538 },
4539 4539 },
4540 4540 'merge': merge_data,
4541 4541 'author': pull_request.author.get_api_data(include_secrets=False,
4542 4542 details='basic'),
4543 4543 'reviewers': [
4544 4544 {
4545 4545 'user': reviewer.get_api_data(include_secrets=False,
4546 4546 details='basic'),
4547 4547 'reasons': reasons,
4548 4548 'review_status': st[0][1].status if st else 'not_reviewed',
4549 4549 }
4550 4550 for obj, reviewer, reasons, mandatory, st in
4551 4551 pull_request.reviewers_statuses()
4552 4552 ]
4553 4553 }
4554 4554
4555 4555 return data
4556 4556
4557 4557 def set_state(self, pull_request_state, final_state=None):
4558 4558 """
4559 4559 # goes from initial state to updating to initial state.
4560 4560 # initial state can be changed by specifying back_state=
4561 4561 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4562 4562 pull_request.merge()
4563 4563
4564 4564 :param pull_request_state:
4565 4565 :param final_state:
4566 4566
4567 4567 """
4568 4568
4569 4569 return _SetState(self, pull_request_state, back_state=final_state)
4570 4570
4571 4571
4572 4572 class PullRequest(Base, _PullRequestBase):
4573 4573 __tablename__ = 'pull_requests'
4574 4574 __table_args__ = (
4575 4575 base_table_args,
4576 4576 )
4577 4577 LATEST_VER = 'latest'
4578 4578
4579 4579 pull_request_id = Column(
4580 4580 'pull_request_id', Integer(), nullable=False, primary_key=True)
4581 4581
4582 4582 def __repr__(self):
4583 4583 if self.pull_request_id:
4584 4584 return f'<DB:PullRequest #{self.pull_request_id}>'
4585 4585 else:
4586 4586 return f'<DB:PullRequest at {id(self)!r}>'
4587 4587
4588 def __str__(self):
4589 if self.pull_request_id:
4590 return f'#{self.pull_request_id}'
4591 else:
4592 return f'#{id(self)!r}'
4593
4588 4594 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request')
4589 4595 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request')
4590 4596 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request')
4591 4597 versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request')
4592 4598
4593 4599 @classmethod
4594 4600 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4595 4601 internal_methods=None):
4596 4602
4597 4603 class PullRequestDisplay(object):
4598 4604 """
4599 4605 Special object wrapper for showing PullRequest data via Versions
4600 4606 It mimics PR object as close as possible. This is read only object
4601 4607 just for display
4602 4608 """
4603 4609
4604 4610 def __init__(self, attrs, internal=None):
4605 4611 self.attrs = attrs
4606 4612 # internal have priority over the given ones via attrs
4607 4613 self.internal = internal or ['versions']
4608 4614
4609 4615 def __getattr__(self, item):
4610 4616 if item in self.internal:
4611 4617 return getattr(self, item)
4612 4618 try:
4613 4619 return self.attrs[item]
4614 4620 except KeyError:
4615 4621 raise AttributeError(
4616 4622 '%s object has no attribute %s' % (self, item))
4617 4623
4618 4624 def __repr__(self):
4619 4625 pr_id = self.attrs.get('pull_request_id')
4620 4626 return f'<DB:PullRequestDisplay #{pr_id}>'
4621 4627
4622 4628 def versions(self):
4623 4629 return pull_request_obj.versions.order_by(
4624 4630 PullRequestVersion.pull_request_version_id).all()
4625 4631
4626 4632 def is_closed(self):
4627 4633 return pull_request_obj.is_closed()
4628 4634
4629 4635 def is_state_changing(self):
4630 4636 return pull_request_obj.is_state_changing()
4631 4637
4632 4638 @property
4633 4639 def pull_request_version_id(self):
4634 4640 return getattr(pull_request_obj, 'pull_request_version_id', None)
4635 4641
4636 4642 @property
4637 4643 def pull_request_last_version(self):
4638 4644 return pull_request_obj.pull_request_last_version
4639 4645
4640 4646 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4641 4647
4642 4648 attrs.author = StrictAttributeDict(
4643 4649 pull_request_obj.author.get_api_data())
4644 4650 if pull_request_obj.target_repo:
4645 4651 attrs.target_repo = StrictAttributeDict(
4646 4652 pull_request_obj.target_repo.get_api_data())
4647 4653 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4648 4654
4649 4655 if pull_request_obj.source_repo:
4650 4656 attrs.source_repo = StrictAttributeDict(
4651 4657 pull_request_obj.source_repo.get_api_data())
4652 4658 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4653 4659
4654 4660 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4655 4661 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4656 4662 attrs.revisions = pull_request_obj.revisions
4657 4663 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4658 4664 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4659 4665 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4660 4666 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4661 4667
4662 4668 return PullRequestDisplay(attrs, internal=internal_methods)
4663 4669
4664 4670 def is_closed(self):
4665 4671 return self.status == self.STATUS_CLOSED
4666 4672
4667 4673 def is_state_changing(self):
4668 4674 return self.pull_request_state != PullRequest.STATE_CREATED
4669 4675
4670 4676 def __json__(self):
4671 4677 return {
4672 4678 'revisions': self.revisions,
4673 4679 'versions': self.versions_count
4674 4680 }
4675 4681
4676 4682 def calculated_review_status(self):
4677 4683 from rhodecode.model.changeset_status import ChangesetStatusModel
4678 4684 return ChangesetStatusModel().calculated_review_status(self)
4679 4685
4680 4686 def reviewers_statuses(self, user=None):
4681 4687 from rhodecode.model.changeset_status import ChangesetStatusModel
4682 4688 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4683 4689
4684 4690 def get_pull_request_reviewers(self, role=None):
4685 4691 qry = PullRequestReviewers.query()\
4686 4692 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4687 4693 if role:
4688 4694 qry = qry.filter(PullRequestReviewers.role == role)
4689 4695
4690 4696 return qry.all()
4691 4697
4692 4698 @property
4693 4699 def reviewers_count(self):
4694 4700 qry = PullRequestReviewers.query()\
4695 4701 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4696 4702 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4697 4703 return qry.count()
4698 4704
4699 4705 @property
4700 4706 def observers_count(self):
4701 4707 qry = PullRequestReviewers.query()\
4702 4708 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4703 4709 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4704 4710 return qry.count()
4705 4711
4706 4712 def observers(self):
4707 4713 qry = PullRequestReviewers.query()\
4708 4714 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4709 4715 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4710 4716 .all()
4711 4717
4712 4718 for entry in qry:
4713 4719 yield entry, entry.user
4714 4720
4715 4721 @property
4716 4722 def workspace_id(self):
4717 4723 from rhodecode.model.pull_request import PullRequestModel
4718 4724 return PullRequestModel()._workspace_id(self)
4719 4725
4720 4726 def get_shadow_repo(self):
4721 4727 workspace_id = self.workspace_id
4722 4728 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4723 4729 if os.path.isdir(shadow_repository_path):
4724 4730 vcs_obj = self.target_repo.scm_instance()
4725 4731 return vcs_obj.get_shadow_instance(shadow_repository_path)
4726 4732
4727 4733 @property
4728 4734 def versions_count(self):
4729 4735 """
4730 4736 return number of versions this PR have, e.g a PR that once been
4731 4737 updated will have 2 versions
4732 4738 """
4733 4739 return self.versions.count() + 1
4734 4740
4735 4741 @property
4736 4742 def pull_request_last_version(self):
4737 4743 return self.versions_count
4738 4744
4739 4745
4740 4746 class PullRequestVersion(Base, _PullRequestBase):
4741 4747 __tablename__ = 'pull_request_versions'
4742 4748 __table_args__ = (
4743 4749 base_table_args,
4744 4750 )
4745 4751
4746 4752 pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True)
4747 4753 pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
4748 4754 pull_request = relationship('PullRequest', back_populates='versions')
4749 4755
4750 4756 def __repr__(self):
4751 4757 if self.pull_request_version_id:
4752 4758 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4753 4759 else:
4754 4760 return f'<DB:PullRequestVersion at {id(self)!r}>'
4755 4761
4756 4762 @property
4757 4763 def reviewers(self):
4758 4764 return self.pull_request.reviewers
4759 4765
4760 4766 @property
4761 4767 def versions(self):
4762 4768 return self.pull_request.versions
4763 4769
4764 4770 def is_closed(self):
4765 4771 # calculate from original
4766 4772 return self.pull_request.status == self.STATUS_CLOSED
4767 4773
4768 4774 def is_state_changing(self):
4769 4775 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4770 4776
4771 4777 def calculated_review_status(self):
4772 4778 return self.pull_request.calculated_review_status()
4773 4779
4774 4780 def reviewers_statuses(self):
4775 4781 return self.pull_request.reviewers_statuses()
4776 4782
4777 4783 def observers(self):
4778 4784 return self.pull_request.observers()
4779 4785
4780 4786
4781 4787 class PullRequestReviewers(Base, BaseModel):
4782 4788 __tablename__ = 'pull_request_reviewers'
4783 4789 __table_args__ = (
4784 4790 base_table_args,
4785 4791 )
4786 4792 ROLE_REVIEWER = 'reviewer'
4787 4793 ROLE_OBSERVER = 'observer'
4788 4794 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4789 4795
4790 4796 @hybrid_property
4791 4797 def reasons(self):
4792 4798 if not self._reasons:
4793 4799 return []
4794 4800 return self._reasons
4795 4801
4796 4802 @reasons.setter
4797 4803 def reasons(self, val):
4798 4804 val = val or []
4799 4805 if any(not isinstance(x, str) for x in val):
4800 4806 raise Exception('invalid reasons type, must be list of strings')
4801 4807 self._reasons = val
4802 4808
4803 4809 pull_requests_reviewers_id = Column(
4804 4810 'pull_requests_reviewers_id', Integer(), nullable=False,
4805 4811 primary_key=True)
4806 4812 pull_request_id = Column(
4807 4813 "pull_request_id", Integer(),
4808 4814 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4809 4815 user_id = Column(
4810 4816 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4811 4817 _reasons = Column(
4812 4818 'reason', MutationList.as_mutable(
4813 4819 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4814 4820
4815 4821 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4816 4822 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4817 4823
4818 4824 user = relationship('User')
4819 4825 pull_request = relationship('PullRequest', back_populates='reviewers')
4820 4826
4821 4827 rule_data = Column(
4822 4828 'rule_data_json',
4823 4829 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4824 4830
4825 4831 def rule_user_group_data(self):
4826 4832 """
4827 4833 Returns the voting user group rule data for this reviewer
4828 4834 """
4829 4835
4830 4836 if self.rule_data and 'vote_rule' in self.rule_data:
4831 4837 user_group_data = {}
4832 4838 if 'rule_user_group_entry_id' in self.rule_data:
4833 4839 # means a group with voting rules !
4834 4840 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4835 4841 user_group_data['name'] = self.rule_data['rule_name']
4836 4842 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4837 4843
4838 4844 return user_group_data
4839 4845
4840 4846 @classmethod
4841 4847 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4842 4848 qry = PullRequestReviewers.query()\
4843 4849 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4844 4850 if role:
4845 4851 qry = qry.filter(PullRequestReviewers.role == role)
4846 4852
4847 4853 return qry.all()
4848 4854
4849 4855 def __repr__(self):
4850 4856 return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>"
4851 4857
4852 4858
4853 4859 class Notification(Base, BaseModel):
4854 4860 __tablename__ = 'notifications'
4855 4861 __table_args__ = (
4856 4862 Index('notification_type_idx', 'type'),
4857 4863 base_table_args,
4858 4864 )
4859 4865
4860 4866 TYPE_CHANGESET_COMMENT = 'cs_comment'
4861 4867 TYPE_MESSAGE = 'message'
4862 4868 TYPE_MENTION = 'mention'
4863 4869 TYPE_REGISTRATION = 'registration'
4864 4870 TYPE_PULL_REQUEST = 'pull_request'
4865 4871 TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
4866 4872 TYPE_PULL_REQUEST_UPDATE = 'pull_request_update'
4867 4873
4868 4874 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4869 4875 subject = Column('subject', Unicode(512), nullable=True)
4870 4876 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4871 4877 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4872 4878 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4873 4879 type_ = Column('type', Unicode(255))
4874 4880
4875 4881 created_by_user = relationship('User', back_populates='user_created_notifications')
4876 4882 notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification')
4877 4883
4878 4884 @property
4879 4885 def recipients(self):
4880 4886 return [x.user for x in UserNotification.query()\
4881 4887 .filter(UserNotification.notification == self)\
4882 4888 .order_by(UserNotification.user_id.asc()).all()]
4883 4889
4884 4890 @classmethod
4885 4891 def create(cls, created_by, subject, body, recipients, type_=None):
4886 4892 if type_ is None:
4887 4893 type_ = Notification.TYPE_MESSAGE
4888 4894
4889 4895 notification = cls()
4890 4896 notification.created_by_user = created_by
4891 4897 notification.subject = subject
4892 4898 notification.body = body
4893 4899 notification.type_ = type_
4894 4900 notification.created_on = datetime.datetime.now()
4895 4901
4896 4902 # For each recipient link the created notification to his account
4897 4903 for u in recipients:
4898 4904 assoc = UserNotification()
4899 4905 assoc.user_id = u.user_id
4900 4906 assoc.notification = notification
4901 4907
4902 4908 # if created_by is inside recipients mark his notification
4903 4909 # as read
4904 4910 if u.user_id == created_by.user_id:
4905 4911 assoc.read = True
4906 4912 Session().add(assoc)
4907 4913
4908 4914 Session().add(notification)
4909 4915
4910 4916 return notification
4911 4917
4912 4918
4913 4919 class UserNotification(Base, BaseModel):
4914 4920 __tablename__ = 'user_to_notification'
4915 4921 __table_args__ = (
4916 4922 UniqueConstraint('user_id', 'notification_id'),
4917 4923 base_table_args
4918 4924 )
4919 4925
4920 4926 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4921 4927 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4922 4928 read = Column('read', Boolean, default=False)
4923 4929 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4924 4930
4925 4931 user = relationship('User', lazy="joined", back_populates='notifications')
4926 4932 notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users')
4927 4933
4928 4934 def mark_as_read(self):
4929 4935 self.read = True
4930 4936 Session().add(self)
4931 4937
4932 4938
4933 4939 class UserNotice(Base, BaseModel):
4934 4940 __tablename__ = 'user_notices'
4935 4941 __table_args__ = (
4936 4942 base_table_args
4937 4943 )
4938 4944
4939 4945 NOTIFICATION_TYPE_MESSAGE = 'message'
4940 4946 NOTIFICATION_TYPE_NOTICE = 'notice'
4941 4947
4942 4948 NOTIFICATION_LEVEL_INFO = 'info'
4943 4949 NOTIFICATION_LEVEL_WARNING = 'warning'
4944 4950 NOTIFICATION_LEVEL_ERROR = 'error'
4945 4951
4946 4952 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4947 4953
4948 4954 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4949 4955 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4950 4956
4951 4957 notice_read = Column('notice_read', Boolean, default=False)
4952 4958
4953 4959 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4954 4960 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4955 4961
4956 4962 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4957 4963 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4958 4964
4959 4965 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4960 4966 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4961 4967
4962 4968 @classmethod
4963 4969 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4964 4970
4965 4971 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4966 4972 cls.NOTIFICATION_LEVEL_WARNING,
4967 4973 cls.NOTIFICATION_LEVEL_INFO]:
4968 4974 return
4969 4975
4970 4976 from rhodecode.model.user import UserModel
4971 4977 user = UserModel().get_user(user)
4972 4978
4973 4979 new_notice = UserNotice()
4974 4980 if not allow_duplicate:
4975 4981 existing_msg = UserNotice().query() \
4976 4982 .filter(UserNotice.user == user) \
4977 4983 .filter(UserNotice.notice_body == body) \
4978 4984 .filter(UserNotice.notice_read == false()) \
4979 4985 .scalar()
4980 4986 if existing_msg:
4981 4987 log.warning('Ignoring duplicate notice for user %s', user)
4982 4988 return
4983 4989
4984 4990 new_notice.user = user
4985 4991 new_notice.notice_subject = subject
4986 4992 new_notice.notice_body = body
4987 4993 new_notice.notification_level = notice_level
4988 4994 Session().add(new_notice)
4989 4995 Session().commit()
4990 4996
4991 4997
4992 4998 class Gist(Base, BaseModel):
4993 4999 __tablename__ = 'gists'
4994 5000 __table_args__ = (
4995 5001 Index('g_gist_access_id_idx', 'gist_access_id'),
4996 5002 Index('g_created_on_idx', 'created_on'),
4997 5003 base_table_args
4998 5004 )
4999 5005
5000 5006 GIST_PUBLIC = 'public'
5001 5007 GIST_PRIVATE = 'private'
5002 5008 DEFAULT_FILENAME = 'gistfile1.txt'
5003 5009
5004 5010 ACL_LEVEL_PUBLIC = 'acl_public'
5005 5011 ACL_LEVEL_PRIVATE = 'acl_private'
5006 5012
5007 5013 gist_id = Column('gist_id', Integer(), primary_key=True)
5008 5014 gist_access_id = Column('gist_access_id', Unicode(250))
5009 5015 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
5010 5016 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
5011 5017 gist_expires = Column('gist_expires', Float(53), nullable=False)
5012 5018 gist_type = Column('gist_type', Unicode(128), nullable=False)
5013 5019 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5014 5020 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5015 5021 acl_level = Column('acl_level', Unicode(128), nullable=True)
5016 5022
5017 5023 owner = relationship('User', back_populates='user_gists')
5018 5024
5019 5025 def __repr__(self):
5020 5026 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
5021 5027
5022 5028 @hybrid_property
5023 5029 def description_safe(self):
5024 5030 from rhodecode.lib import helpers as h
5025 5031 return h.escape(self.gist_description)
5026 5032
5027 5033 @classmethod
5028 5034 def get_or_404(cls, id_):
5029 5035 from pyramid.httpexceptions import HTTPNotFound
5030 5036
5031 5037 res = cls.query().filter(cls.gist_access_id == id_).scalar()
5032 5038 if not res:
5033 5039 log.debug('WARN: No DB entry with id %s', id_)
5034 5040 raise HTTPNotFound()
5035 5041 return res
5036 5042
5037 5043 @classmethod
5038 5044 def get_by_access_id(cls, gist_access_id):
5039 5045 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
5040 5046
5041 5047 def gist_url(self):
5042 5048 from rhodecode.model.gist import GistModel
5043 5049 return GistModel().get_url(self)
5044 5050
5045 5051 @classmethod
5046 5052 def base_path(cls):
5047 5053 """
5048 5054 Returns base path when all gists are stored
5049 5055
5050 5056 :param cls:
5051 5057 """
5052 5058 from rhodecode.model.gist import GIST_STORE_LOC
5053 5059 from rhodecode.lib.utils import get_rhodecode_repo_store_path
5054 5060 repo_store_path = get_rhodecode_repo_store_path()
5055 5061 return os.path.join(repo_store_path, GIST_STORE_LOC)
5056 5062
5057 5063 def get_api_data(self):
5058 5064 """
5059 5065 Common function for generating gist related data for API
5060 5066 """
5061 5067 gist = self
5062 5068 data = {
5063 5069 'gist_id': gist.gist_id,
5064 5070 'type': gist.gist_type,
5065 5071 'access_id': gist.gist_access_id,
5066 5072 'description': gist.gist_description,
5067 5073 'url': gist.gist_url(),
5068 5074 'expires': gist.gist_expires,
5069 5075 'created_on': gist.created_on,
5070 5076 'modified_at': gist.modified_at,
5071 5077 'content': None,
5072 5078 'acl_level': gist.acl_level,
5073 5079 }
5074 5080 return data
5075 5081
5076 5082 def __json__(self):
5077 5083 data = dict()
5078 5084 data.update(self.get_api_data())
5079 5085 return data
5080 5086 # SCM functions
5081 5087
5082 5088 def scm_instance(self, **kwargs):
5083 5089 """
5084 5090 Get an instance of VCS Repository
5085 5091
5086 5092 :param kwargs:
5087 5093 """
5088 5094 from rhodecode.model.gist import GistModel
5089 5095 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
5090 5096 return get_vcs_instance(
5091 5097 repo_path=safe_str(full_repo_path), create=False,
5092 5098 _vcs_alias=GistModel.vcs_backend)
5093 5099
5094 5100
5095 5101 class ExternalIdentity(Base, BaseModel):
5096 5102 __tablename__ = 'external_identities'
5097 5103 __table_args__ = (
5098 5104 Index('local_user_id_idx', 'local_user_id'),
5099 5105 Index('external_id_idx', 'external_id'),
5100 5106 base_table_args
5101 5107 )
5102 5108
5103 5109 external_id = Column('external_id', Unicode(255), default='', primary_key=True)
5104 5110 external_username = Column('external_username', Unicode(1024), default='')
5105 5111 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
5106 5112 provider_name = Column('provider_name', Unicode(255), default='', primary_key=True)
5107 5113 access_token = Column('access_token', String(1024), default='')
5108 5114 alt_token = Column('alt_token', String(1024), default='')
5109 5115 token_secret = Column('token_secret', String(1024), default='')
5110 5116
5111 5117 @classmethod
5112 5118 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
5113 5119 """
5114 5120 Returns ExternalIdentity instance based on search params
5115 5121
5116 5122 :param external_id:
5117 5123 :param provider_name:
5118 5124 :return: ExternalIdentity
5119 5125 """
5120 5126 query = cls.query()
5121 5127 query = query.filter(cls.external_id == external_id)
5122 5128 query = query.filter(cls.provider_name == provider_name)
5123 5129 if local_user_id:
5124 5130 query = query.filter(cls.local_user_id == local_user_id)
5125 5131 return query.first()
5126 5132
5127 5133 @classmethod
5128 5134 def user_by_external_id_and_provider(cls, external_id, provider_name):
5129 5135 """
5130 5136 Returns User instance based on search params
5131 5137
5132 5138 :param external_id:
5133 5139 :param provider_name:
5134 5140 :return: User
5135 5141 """
5136 5142 query = User.query()
5137 5143 query = query.filter(cls.external_id == external_id)
5138 5144 query = query.filter(cls.provider_name == provider_name)
5139 5145 query = query.filter(User.user_id == cls.local_user_id)
5140 5146 return query.first()
5141 5147
5142 5148 @classmethod
5143 5149 def by_local_user_id(cls, local_user_id):
5144 5150 """
5145 5151 Returns all tokens for user
5146 5152
5147 5153 :param local_user_id:
5148 5154 :return: ExternalIdentity
5149 5155 """
5150 5156 query = cls.query()
5151 5157 query = query.filter(cls.local_user_id == local_user_id)
5152 5158 return query
5153 5159
5154 5160 @classmethod
5155 5161 def load_provider_plugin(cls, plugin_id):
5156 5162 from rhodecode.authentication.base import loadplugin
5157 5163 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
5158 5164 auth_plugin = loadplugin(_plugin_id)
5159 5165 return auth_plugin
5160 5166
5161 5167
5162 5168 class Integration(Base, BaseModel):
5163 5169 __tablename__ = 'integrations'
5164 5170 __table_args__ = (
5165 5171 base_table_args
5166 5172 )
5167 5173
5168 5174 integration_id = Column('integration_id', Integer(), primary_key=True)
5169 5175 integration_type = Column('integration_type', String(255))
5170 5176 enabled = Column('enabled', Boolean(), nullable=False)
5171 5177 name = Column('name', String(255), nullable=False)
5172 5178 child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False)
5173 5179
5174 5180 settings = Column(
5175 5181 'settings_json', MutationObj.as_mutable(
5176 5182 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
5177 5183 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
5178 5184 repo = relationship('Repository', lazy='joined', back_populates='integrations')
5179 5185
5180 5186 repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
5181 5187 repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations')
5182 5188
5183 5189 @property
5184 5190 def scope(self):
5185 5191 if self.repo:
5186 5192 return repr(self.repo)
5187 5193 if self.repo_group:
5188 5194 if self.child_repos_only:
5189 5195 return repr(self.repo_group) + ' (child repos only)'
5190 5196 else:
5191 5197 return repr(self.repo_group) + ' (recursive)'
5192 5198 if self.child_repos_only:
5193 5199 return 'root_repos'
5194 5200 return 'global'
5195 5201
5196 5202 def __repr__(self):
5197 5203 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5198 5204
5199 5205
5200 5206 class RepoReviewRuleUser(Base, BaseModel):
5201 5207 __tablename__ = 'repo_review_rules_users'
5202 5208 __table_args__ = (
5203 5209 base_table_args
5204 5210 )
5205 5211 ROLE_REVIEWER = 'reviewer'
5206 5212 ROLE_OBSERVER = 'observer'
5207 5213 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5208 5214
5209 5215 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5210 5216 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5211 5217 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5212 5218 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5213 5219 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5214 5220 user = relationship('User', back_populates='user_review_rules')
5215 5221
5216 5222 def rule_data(self):
5217 5223 return {
5218 5224 'mandatory': self.mandatory,
5219 5225 'role': self.role,
5220 5226 }
5221 5227
5222 5228
5223 5229 class RepoReviewRuleUserGroup(Base, BaseModel):
5224 5230 __tablename__ = 'repo_review_rules_users_groups'
5225 5231 __table_args__ = (
5226 5232 base_table_args
5227 5233 )
5228 5234
5229 5235 VOTE_RULE_ALL = -1
5230 5236 ROLE_REVIEWER = 'reviewer'
5231 5237 ROLE_OBSERVER = 'observer'
5232 5238 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5233 5239
5234 5240 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5235 5241 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5236 5242 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5237 5243 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5238 5244 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5239 5245 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5240 5246 users_group = relationship('UserGroup')
5241 5247
5242 5248 def rule_data(self):
5243 5249 return {
5244 5250 'mandatory': self.mandatory,
5245 5251 'role': self.role,
5246 5252 'vote_rule': self.vote_rule
5247 5253 }
5248 5254
5249 5255 @property
5250 5256 def vote_rule_label(self):
5251 5257 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5252 5258 return 'all must vote'
5253 5259 else:
5254 5260 return 'min. vote {}'.format(self.vote_rule)
5255 5261
5256 5262
5257 5263 class RepoReviewRule(Base, BaseModel):
5258 5264 __tablename__ = 'repo_review_rules'
5259 5265 __table_args__ = (
5260 5266 base_table_args
5261 5267 )
5262 5268
5263 5269 repo_review_rule_id = Column(
5264 5270 'repo_review_rule_id', Integer(), primary_key=True)
5265 5271 repo_id = Column(
5266 5272 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5267 5273 repo = relationship('Repository', back_populates='review_rules')
5268 5274
5269 5275 review_rule_name = Column('review_rule_name', String(255))
5270 5276 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5271 5277 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5272 5278 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5273 5279
5274 5280 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5275 5281
5276 5282 # Legacy fields, just for backward compat
5277 5283 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5278 5284 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5279 5285
5280 5286 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5281 5287 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5282 5288
5283 5289 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5284 5290
5285 5291 rule_users = relationship('RepoReviewRuleUser')
5286 5292 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5287 5293
5288 5294 def _validate_pattern(self, value):
5289 5295 re.compile('^' + glob2re(value) + '$')
5290 5296
5291 5297 @hybrid_property
5292 5298 def source_branch_pattern(self):
5293 5299 return self._branch_pattern or '*'
5294 5300
5295 5301 @source_branch_pattern.setter
5296 5302 def source_branch_pattern(self, value):
5297 5303 self._validate_pattern(value)
5298 5304 self._branch_pattern = value or '*'
5299 5305
5300 5306 @hybrid_property
5301 5307 def target_branch_pattern(self):
5302 5308 return self._target_branch_pattern or '*'
5303 5309
5304 5310 @target_branch_pattern.setter
5305 5311 def target_branch_pattern(self, value):
5306 5312 self._validate_pattern(value)
5307 5313 self._target_branch_pattern = value or '*'
5308 5314
5309 5315 @hybrid_property
5310 5316 def file_pattern(self):
5311 5317 return self._file_pattern or '*'
5312 5318
5313 5319 @file_pattern.setter
5314 5320 def file_pattern(self, value):
5315 5321 self._validate_pattern(value)
5316 5322 self._file_pattern = value or '*'
5317 5323
5318 5324 @hybrid_property
5319 5325 def forbid_pr_author_to_review(self):
5320 5326 return self.pr_author == 'forbid_pr_author'
5321 5327
5322 5328 @hybrid_property
5323 5329 def include_pr_author_to_review(self):
5324 5330 return self.pr_author == 'include_pr_author'
5325 5331
5326 5332 @hybrid_property
5327 5333 def forbid_commit_author_to_review(self):
5328 5334 return self.commit_author == 'forbid_commit_author'
5329 5335
5330 5336 @hybrid_property
5331 5337 def include_commit_author_to_review(self):
5332 5338 return self.commit_author == 'include_commit_author'
5333 5339
5334 5340 def matches(self, source_branch, target_branch, files_changed):
5335 5341 """
5336 5342 Check if this review rule matches a branch/files in a pull request
5337 5343
5338 5344 :param source_branch: source branch name for the commit
5339 5345 :param target_branch: target branch name for the commit
5340 5346 :param files_changed: list of file paths changed in the pull request
5341 5347 """
5342 5348
5343 5349 source_branch = source_branch or ''
5344 5350 target_branch = target_branch or ''
5345 5351 files_changed = files_changed or []
5346 5352
5347 5353 branch_matches = True
5348 5354 if source_branch or target_branch:
5349 5355 if self.source_branch_pattern == '*':
5350 5356 source_branch_match = True
5351 5357 else:
5352 5358 if self.source_branch_pattern.startswith('re:'):
5353 5359 source_pattern = self.source_branch_pattern[3:]
5354 5360 else:
5355 5361 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5356 5362 source_branch_regex = re.compile(source_pattern)
5357 5363 source_branch_match = bool(source_branch_regex.search(source_branch))
5358 5364 if self.target_branch_pattern == '*':
5359 5365 target_branch_match = True
5360 5366 else:
5361 5367 if self.target_branch_pattern.startswith('re:'):
5362 5368 target_pattern = self.target_branch_pattern[3:]
5363 5369 else:
5364 5370 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5365 5371 target_branch_regex = re.compile(target_pattern)
5366 5372 target_branch_match = bool(target_branch_regex.search(target_branch))
5367 5373
5368 5374 branch_matches = source_branch_match and target_branch_match
5369 5375
5370 5376 files_matches = True
5371 5377 if self.file_pattern != '*':
5372 5378 files_matches = False
5373 5379 if self.file_pattern.startswith('re:'):
5374 5380 file_pattern = self.file_pattern[3:]
5375 5381 else:
5376 5382 file_pattern = glob2re(self.file_pattern)
5377 5383 file_regex = re.compile(file_pattern)
5378 5384 for file_data in files_changed:
5379 5385 filename = file_data.get('filename')
5380 5386
5381 5387 if file_regex.search(filename):
5382 5388 files_matches = True
5383 5389 break
5384 5390
5385 5391 return branch_matches and files_matches
5386 5392
5387 5393 @property
5388 5394 def review_users(self):
5389 5395 """ Returns the users which this rule applies to """
5390 5396
5391 5397 users = collections.OrderedDict()
5392 5398
5393 5399 for rule_user in self.rule_users:
5394 5400 if rule_user.user.active:
5395 5401 if rule_user.user not in users:
5396 5402 users[rule_user.user.username] = {
5397 5403 'user': rule_user.user,
5398 5404 'source': 'user',
5399 5405 'source_data': {},
5400 5406 'data': rule_user.rule_data()
5401 5407 }
5402 5408
5403 5409 for rule_user_group in self.rule_user_groups:
5404 5410 source_data = {
5405 5411 'user_group_id': rule_user_group.users_group.users_group_id,
5406 5412 'name': rule_user_group.users_group.users_group_name,
5407 5413 'members': len(rule_user_group.users_group.members)
5408 5414 }
5409 5415 for member in rule_user_group.users_group.members:
5410 5416 if member.user.active:
5411 5417 key = member.user.username
5412 5418 if key in users:
5413 5419 # skip this member as we have him already
5414 5420 # this prevents from override the "first" matched
5415 5421 # users with duplicates in multiple groups
5416 5422 continue
5417 5423
5418 5424 users[key] = {
5419 5425 'user': member.user,
5420 5426 'source': 'user_group',
5421 5427 'source_data': source_data,
5422 5428 'data': rule_user_group.rule_data()
5423 5429 }
5424 5430
5425 5431 return users
5426 5432
5427 5433 def user_group_vote_rule(self, user_id):
5428 5434
5429 5435 rules = []
5430 5436 if not self.rule_user_groups:
5431 5437 return rules
5432 5438
5433 5439 for user_group in self.rule_user_groups:
5434 5440 user_group_members = [x.user_id for x in user_group.users_group.members]
5435 5441 if user_id in user_group_members:
5436 5442 rules.append(user_group)
5437 5443 return rules
5438 5444
5439 5445 def __repr__(self):
5440 5446 return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>'
5441 5447
5442 5448
5443 5449 class ScheduleEntry(Base, BaseModel):
5444 5450 __tablename__ = 'schedule_entries'
5445 5451 __table_args__ = (
5446 5452 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5447 5453 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5448 5454 base_table_args,
5449 5455 )
5450 5456 SCHEDULE_TYPE_INTEGER = "integer"
5451 5457 SCHEDULE_TYPE_CRONTAB = "crontab"
5452 5458
5453 5459 schedule_types = [SCHEDULE_TYPE_CRONTAB, SCHEDULE_TYPE_INTEGER]
5454 5460 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5455 5461
5456 5462 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5457 5463 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5458 5464 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5459 5465
5460 5466 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5461 5467 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5462 5468
5463 5469 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5464 5470 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5465 5471
5466 5472 # task
5467 5473 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5468 5474 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5469 5475 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5470 5476 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5471 5477
5472 5478 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5473 5479 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5474 5480
5475 5481 @hybrid_property
5476 5482 def schedule_type(self):
5477 5483 return self._schedule_type
5478 5484
5479 5485 @schedule_type.setter
5480 5486 def schedule_type(self, val):
5481 5487 if val not in self.schedule_types:
5482 5488 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5483 5489 val, self.schedule_type))
5484 5490
5485 5491 self._schedule_type = val
5486 5492
5487 5493 @classmethod
5488 5494 def get_uid(cls, obj):
5489 5495 args = obj.task_args
5490 5496 kwargs = obj.task_kwargs
5491 5497 if isinstance(args, JsonRaw):
5492 5498 try:
5493 5499 args = json.loads(args)
5494 5500 except ValueError:
5495 5501 args = tuple()
5496 5502
5497 5503 if isinstance(kwargs, JsonRaw):
5498 5504 try:
5499 5505 kwargs = json.loads(kwargs)
5500 5506 except ValueError:
5501 5507 kwargs = dict()
5502 5508
5503 5509 dot_notation = obj.task_dot_notation
5504 5510 val = '.'.join(map(safe_str, [
5505 5511 sorted(dot_notation), args, sorted(kwargs.items())]))
5506 5512 return sha1(safe_bytes(val))
5507 5513
5508 5514 @classmethod
5509 5515 def get_by_schedule_name(cls, schedule_name):
5510 5516 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5511 5517
5512 5518 @classmethod
5513 5519 def get_by_schedule_id(cls, schedule_id):
5514 5520 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5515 5521
5516 5522 @property
5517 5523 def task(self):
5518 5524 return self.task_dot_notation
5519 5525
5520 5526 @property
5521 5527 def schedule(self):
5522 5528 from rhodecode.lib.celerylib.utils import raw_2_schedule
5523 5529 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5524 5530 return schedule
5525 5531
5526 5532 @property
5527 5533 def args(self):
5528 5534 try:
5529 5535 return list(self.task_args or [])
5530 5536 except ValueError:
5531 5537 return list()
5532 5538
5533 5539 @property
5534 5540 def kwargs(self):
5535 5541 try:
5536 5542 return dict(self.task_kwargs or {})
5537 5543 except ValueError:
5538 5544 return dict()
5539 5545
5540 5546 def _as_raw(self, val, indent=False):
5541 5547 if hasattr(val, 'de_coerce'):
5542 5548 val = val.de_coerce()
5543 5549 if val:
5544 5550 if indent:
5545 5551 val = ext_json.formatted_str_json(val)
5546 5552 else:
5547 5553 val = ext_json.str_json(val)
5548 5554
5549 5555 return val
5550 5556
5551 5557 @property
5552 5558 def schedule_definition_raw(self):
5553 5559 return self._as_raw(self.schedule_definition)
5554 5560
5555 5561 def args_raw(self, indent=False):
5556 5562 return self._as_raw(self.task_args, indent)
5557 5563
5558 5564 def kwargs_raw(self, indent=False):
5559 5565 return self._as_raw(self.task_kwargs, indent)
5560 5566
5561 5567 def __repr__(self):
5562 5568 return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>'
5563 5569
5564 5570
5565 5571 @event.listens_for(ScheduleEntry, 'before_update')
5566 5572 def update_task_uid(mapper, connection, target):
5567 5573 target.task_uid = ScheduleEntry.get_uid(target)
5568 5574
5569 5575
5570 5576 @event.listens_for(ScheduleEntry, 'before_insert')
5571 5577 def set_task_uid(mapper, connection, target):
5572 5578 target.task_uid = ScheduleEntry.get_uid(target)
5573 5579
5574 5580
5575 5581 class _BaseBranchPerms(BaseModel):
5576 5582 @classmethod
5577 5583 def compute_hash(cls, value):
5578 5584 return sha1_safe(value)
5579 5585
5580 5586 @hybrid_property
5581 5587 def branch_pattern(self):
5582 5588 return self._branch_pattern or '*'
5583 5589
5584 5590 @hybrid_property
5585 5591 def branch_hash(self):
5586 5592 return self._branch_hash
5587 5593
5588 5594 def _validate_glob(self, value):
5589 5595 re.compile('^' + glob2re(value) + '$')
5590 5596
5591 5597 @branch_pattern.setter
5592 5598 def branch_pattern(self, value):
5593 5599 self._validate_glob(value)
5594 5600 self._branch_pattern = value or '*'
5595 5601 # set the Hash when setting the branch pattern
5596 5602 self._branch_hash = self.compute_hash(self._branch_pattern)
5597 5603
5598 5604 def matches(self, branch):
5599 5605 """
5600 5606 Check if this the branch matches entry
5601 5607
5602 5608 :param branch: branch name for the commit
5603 5609 """
5604 5610
5605 5611 branch = branch or ''
5606 5612
5607 5613 branch_matches = True
5608 5614 if branch:
5609 5615 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5610 5616 branch_matches = bool(branch_regex.search(branch))
5611 5617
5612 5618 return branch_matches
5613 5619
5614 5620
5615 5621 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5616 5622 __tablename__ = 'user_to_repo_branch_permissions'
5617 5623 __table_args__ = (
5618 5624 base_table_args
5619 5625 )
5620 5626
5621 5627 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5622 5628
5623 5629 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5624 5630 repo = relationship('Repository', back_populates='user_branch_perms')
5625 5631
5626 5632 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5627 5633 permission = relationship('Permission')
5628 5634
5629 5635 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5630 5636 user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry')
5631 5637
5632 5638 rule_order = Column('rule_order', Integer(), nullable=False)
5633 5639 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5634 5640 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5635 5641
5636 5642 def __repr__(self):
5637 5643 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5638 5644
5639 5645
5640 5646 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5641 5647 __tablename__ = 'user_group_to_repo_branch_permissions'
5642 5648 __table_args__ = (
5643 5649 base_table_args
5644 5650 )
5645 5651
5646 5652 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5647 5653
5648 5654 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5649 5655 repo = relationship('Repository', back_populates='user_group_branch_perms')
5650 5656
5651 5657 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5652 5658 permission = relationship('Permission')
5653 5659
5654 5660 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5655 5661 user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms')
5656 5662
5657 5663 rule_order = Column('rule_order', Integer(), nullable=False)
5658 5664 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5659 5665 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5660 5666
5661 5667 def __repr__(self):
5662 5668 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5663 5669
5664 5670
5665 5671 class UserBookmark(Base, BaseModel):
5666 5672 __tablename__ = 'user_bookmarks'
5667 5673 __table_args__ = (
5668 5674 UniqueConstraint('user_id', 'bookmark_repo_id'),
5669 5675 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5670 5676 UniqueConstraint('user_id', 'bookmark_position'),
5671 5677 base_table_args
5672 5678 )
5673 5679
5674 5680 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5675 5681 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5676 5682 position = Column("bookmark_position", Integer(), nullable=False)
5677 5683 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5678 5684 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5679 5685 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5680 5686
5681 5687 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5682 5688 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5683 5689
5684 5690 user = relationship("User")
5685 5691
5686 5692 repository = relationship("Repository")
5687 5693 repository_group = relationship("RepoGroup")
5688 5694
5689 5695 @classmethod
5690 5696 def get_by_position_for_user(cls, position, user_id):
5691 5697 return cls.query() \
5692 5698 .filter(UserBookmark.user_id == user_id) \
5693 5699 .filter(UserBookmark.position == position).scalar()
5694 5700
5695 5701 @classmethod
5696 5702 def get_bookmarks_for_user(cls, user_id, cache=True):
5697 5703 bookmarks = select(
5698 5704 UserBookmark.title,
5699 5705 UserBookmark.position,
5700 5706 ) \
5701 5707 .add_columns(Repository.repo_id, Repository.repo_type, Repository.repo_name) \
5702 5708 .add_columns(RepoGroup.group_id, RepoGroup.group_name) \
5703 5709 .where(UserBookmark.user_id == user_id) \
5704 5710 .outerjoin(Repository, Repository.repo_id == UserBookmark.bookmark_repo_id) \
5705 5711 .outerjoin(RepoGroup, RepoGroup.group_id == UserBookmark.bookmark_repo_group_id) \
5706 5712 .order_by(UserBookmark.position.asc())
5707 5713
5708 5714 if cache:
5709 5715 bookmarks = bookmarks.options(
5710 5716 FromCache("sql_cache_short", f"get_user_{user_id}_bookmarks")
5711 5717 )
5712 5718
5713 5719 return Session().execute(bookmarks).all()
5714 5720
5715 5721 def __repr__(self):
5716 5722 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5717 5723
5718 5724
5719 5725 class FileStore(Base, BaseModel):
5720 5726 __tablename__ = 'file_store'
5721 5727 __table_args__ = (
5722 5728 base_table_args
5723 5729 )
5724 5730
5725 5731 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5726 5732 file_uid = Column('file_uid', String(1024), nullable=False)
5727 5733 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5728 5734 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5729 5735 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5730 5736
5731 5737 # sha256 hash
5732 5738 file_hash = Column('file_hash', String(512), nullable=False)
5733 5739 file_size = Column('file_size', BigInteger(), nullable=False)
5734 5740
5735 5741 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5736 5742 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5737 5743 accessed_count = Column('accessed_count', Integer(), default=0)
5738 5744
5739 5745 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5740 5746
5741 5747 # if repo/repo_group reference is set, check for permissions
5742 5748 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5743 5749
5744 5750 # hidden defines an attachment that should be hidden from showing in artifact listing
5745 5751 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5746 5752
5747 5753 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5748 5754 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts')
5749 5755
5750 5756 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5751 5757
5752 5758 # scope limited to user, which requester have access to
5753 5759 scope_user_id = Column(
5754 5760 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5755 5761 nullable=True, unique=None, default=None)
5756 5762 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts')
5757 5763
5758 5764 # scope limited to user group, which requester have access to
5759 5765 scope_user_group_id = Column(
5760 5766 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5761 5767 nullable=True, unique=None, default=None)
5762 5768 user_group = relationship('UserGroup', lazy='joined')
5763 5769
5764 5770 # scope limited to repo, which requester have access to
5765 5771 scope_repo_id = Column(
5766 5772 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5767 5773 nullable=True, unique=None, default=None)
5768 5774 repo = relationship('Repository', lazy='joined')
5769 5775
5770 5776 # scope limited to repo group, which requester have access to
5771 5777 scope_repo_group_id = Column(
5772 5778 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5773 5779 nullable=True, unique=None, default=None)
5774 5780 repo_group = relationship('RepoGroup', lazy='joined')
5775 5781
5776 5782 @classmethod
5777 5783 def get_scope(cls, scope_type, scope_id):
5778 5784 if scope_type == 'repo':
5779 5785 return f'repo:{scope_id}'
5780 5786 elif scope_type == 'repo-group':
5781 5787 return f'repo-group:{scope_id}'
5782 5788 elif scope_type == 'user':
5783 5789 return f'user:{scope_id}'
5784 5790 elif scope_type == 'user-group':
5785 5791 return f'user-group:{scope_id}'
5786 5792 else:
5787 5793 return scope_type
5788 5794
5789 5795 @classmethod
5790 5796 def get_by_store_uid(cls, file_store_uid, safe=False):
5791 5797 if safe:
5792 5798 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5793 5799 else:
5794 5800 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5795 5801
5796 5802 @classmethod
5797 5803 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5798 5804 file_description='', enabled=True, hidden=False, check_acl=True,
5799 5805 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5800 5806
5801 5807 store_entry = FileStore()
5802 5808 store_entry.file_uid = file_uid
5803 5809 store_entry.file_display_name = file_display_name
5804 5810 store_entry.file_org_name = filename
5805 5811 store_entry.file_size = file_size
5806 5812 store_entry.file_hash = file_hash
5807 5813 store_entry.file_description = file_description
5808 5814
5809 5815 store_entry.check_acl = check_acl
5810 5816 store_entry.enabled = enabled
5811 5817 store_entry.hidden = hidden
5812 5818
5813 5819 store_entry.user_id = user_id
5814 5820 store_entry.scope_user_id = scope_user_id
5815 5821 store_entry.scope_repo_id = scope_repo_id
5816 5822 store_entry.scope_repo_group_id = scope_repo_group_id
5817 5823
5818 5824 return store_entry
5819 5825
5820 5826 @classmethod
5821 5827 def store_metadata(cls, file_store_id, args, commit=True):
5822 5828 file_store = FileStore.get(file_store_id)
5823 5829 if file_store is None:
5824 5830 return
5825 5831
5826 5832 for section, key, value, value_type in args:
5827 5833 has_key = FileStoreMetadata().query() \
5828 5834 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5829 5835 .filter(FileStoreMetadata.file_store_meta_section == section) \
5830 5836 .filter(FileStoreMetadata.file_store_meta_key == key) \
5831 5837 .scalar()
5832 5838 if has_key:
5833 5839 msg = 'key `{}` already defined under section `{}` for this file.'\
5834 5840 .format(key, section)
5835 5841 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5836 5842
5837 5843 # NOTE(marcink): raises ArtifactMetadataBadValueType
5838 5844 FileStoreMetadata.valid_value_type(value_type)
5839 5845
5840 5846 meta_entry = FileStoreMetadata()
5841 5847 meta_entry.file_store = file_store
5842 5848 meta_entry.file_store_meta_section = section
5843 5849 meta_entry.file_store_meta_key = key
5844 5850 meta_entry.file_store_meta_value_type = value_type
5845 5851 meta_entry.file_store_meta_value = value
5846 5852
5847 5853 Session().add(meta_entry)
5848 5854
5849 5855 try:
5850 5856 if commit:
5851 5857 Session().commit()
5852 5858 except IntegrityError:
5853 5859 Session().rollback()
5854 5860 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5855 5861
5856 5862 @classmethod
5857 5863 def bump_access_counter(cls, file_uid, commit=True):
5858 5864 FileStore().query()\
5859 5865 .filter(FileStore.file_uid == file_uid)\
5860 5866 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5861 5867 FileStore.accessed_on: datetime.datetime.now()})
5862 5868 if commit:
5863 5869 Session().commit()
5864 5870
5865 5871 def __json__(self):
5866 5872 data = {
5867 5873 'filename': self.file_display_name,
5868 5874 'filename_org': self.file_org_name,
5869 5875 'file_uid': self.file_uid,
5870 5876 'description': self.file_description,
5871 5877 'hidden': self.hidden,
5872 5878 'size': self.file_size,
5873 5879 'created_on': self.created_on,
5874 5880 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5875 5881 'downloaded_times': self.accessed_count,
5876 5882 'sha256': self.file_hash,
5877 5883 'metadata': self.file_metadata,
5878 5884 }
5879 5885
5880 5886 return data
5881 5887
5882 5888 def __repr__(self):
5883 5889 return f'<FileStore({self.file_store_id})>'
5884 5890
5885 5891
5886 5892 class FileStoreMetadata(Base, BaseModel):
5887 5893 __tablename__ = 'file_store_metadata'
5888 5894 __table_args__ = (
5889 5895 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5890 5896 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5891 5897 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5892 5898 base_table_args
5893 5899 )
5894 5900 SETTINGS_TYPES = {
5895 5901 'str': safe_str,
5896 5902 'int': safe_int,
5897 5903 'unicode': safe_str,
5898 5904 'bool': str2bool,
5899 5905 'list': functools.partial(aslist, sep=',')
5900 5906 }
5901 5907
5902 5908 file_store_meta_id = Column(
5903 5909 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5904 5910 primary_key=True)
5905 5911 _file_store_meta_section = Column(
5906 5912 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5907 5913 nullable=True, unique=None, default=None)
5908 5914 _file_store_meta_section_hash = Column(
5909 5915 "file_store_meta_section_hash", String(255),
5910 5916 nullable=True, unique=None, default=None)
5911 5917 _file_store_meta_key = Column(
5912 5918 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5913 5919 nullable=True, unique=None, default=None)
5914 5920 _file_store_meta_key_hash = Column(
5915 5921 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5916 5922 _file_store_meta_value = Column(
5917 5923 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5918 5924 nullable=True, unique=None, default=None)
5919 5925 _file_store_meta_value_type = Column(
5920 5926 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5921 5927 default='unicode')
5922 5928
5923 5929 file_store_id = Column(
5924 5930 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5925 5931 nullable=True, unique=None, default=None)
5926 5932
5927 5933 file_store = relationship('FileStore', lazy='joined', viewonly=True)
5928 5934
5929 5935 @classmethod
5930 5936 def valid_value_type(cls, value):
5931 5937 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5932 5938 raise ArtifactMetadataBadValueType(
5933 5939 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5934 5940
5935 5941 @hybrid_property
5936 5942 def file_store_meta_section(self):
5937 5943 return self._file_store_meta_section
5938 5944
5939 5945 @file_store_meta_section.setter
5940 5946 def file_store_meta_section(self, value):
5941 5947 self._file_store_meta_section = value
5942 5948 self._file_store_meta_section_hash = _hash_key(value)
5943 5949
5944 5950 @hybrid_property
5945 5951 def file_store_meta_key(self):
5946 5952 return self._file_store_meta_key
5947 5953
5948 5954 @file_store_meta_key.setter
5949 5955 def file_store_meta_key(self, value):
5950 5956 self._file_store_meta_key = value
5951 5957 self._file_store_meta_key_hash = _hash_key(value)
5952 5958
5953 5959 @hybrid_property
5954 5960 def file_store_meta_value(self):
5955 5961 val = self._file_store_meta_value
5956 5962
5957 5963 if self._file_store_meta_value_type:
5958 5964 # e.g unicode.encrypted == unicode
5959 5965 _type = self._file_store_meta_value_type.split('.')[0]
5960 5966 # decode the encrypted value if it's encrypted field type
5961 5967 if '.encrypted' in self._file_store_meta_value_type:
5962 5968 cipher = EncryptedTextValue()
5963 5969 val = safe_str(cipher.process_result_value(val, None))
5964 5970 # do final type conversion
5965 5971 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5966 5972 val = converter(val)
5967 5973
5968 5974 return val
5969 5975
5970 5976 @file_store_meta_value.setter
5971 5977 def file_store_meta_value(self, val):
5972 5978 val = safe_str(val)
5973 5979 # encode the encrypted value
5974 5980 if '.encrypted' in self.file_store_meta_value_type:
5975 5981 cipher = EncryptedTextValue()
5976 5982 val = safe_str(cipher.process_bind_param(val, None))
5977 5983 self._file_store_meta_value = val
5978 5984
5979 5985 @hybrid_property
5980 5986 def file_store_meta_value_type(self):
5981 5987 return self._file_store_meta_value_type
5982 5988
5983 5989 @file_store_meta_value_type.setter
5984 5990 def file_store_meta_value_type(self, val):
5985 5991 # e.g unicode.encrypted
5986 5992 self.valid_value_type(val)
5987 5993 self._file_store_meta_value_type = val
5988 5994
5989 5995 def __json__(self):
5990 5996 data = {
5991 5997 'artifact': self.file_store.file_uid,
5992 5998 'section': self.file_store_meta_section,
5993 5999 'key': self.file_store_meta_key,
5994 6000 'value': self.file_store_meta_value,
5995 6001 }
5996 6002
5997 6003 return data
5998 6004
5999 6005 def __repr__(self):
6000 6006 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section,
6001 6007 self.file_store_meta_key, self.file_store_meta_value)
6002 6008
6003 6009
6004 6010 class DbMigrateVersion(Base, BaseModel):
6005 6011 __tablename__ = 'db_migrate_version'
6006 6012 __table_args__ = (
6007 6013 base_table_args,
6008 6014 )
6009 6015
6010 6016 repository_id = Column('repository_id', String(250), primary_key=True)
6011 6017 repository_path = Column('repository_path', Text)
6012 6018 version = Column('version', Integer)
6013 6019
6014 6020 @classmethod
6015 6021 def set_version(cls, version):
6016 6022 """
6017 6023 Helper for forcing a different version, usually for debugging purposes via ishell.
6018 6024 """
6019 6025 ver = DbMigrateVersion.query().first()
6020 6026 ver.version = version
6021 6027 Session().commit()
6022 6028
6023 6029
6024 6030 class DbSession(Base, BaseModel):
6025 6031 __tablename__ = 'db_session'
6026 6032 __table_args__ = (
6027 6033 base_table_args,
6028 6034 )
6029 6035
6030 6036 def __repr__(self):
6031 6037 return f'<DB:DbSession({self.id})>'
6032 6038
6033 6039 id = Column('id', Integer())
6034 6040 namespace = Column('namespace', String(255), primary_key=True)
6035 6041 accessed = Column('accessed', DateTime, nullable=False)
6036 6042 created = Column('created', DateTime, nullable=False)
6037 6043 data = Column('data', PickleType, nullable=False)
@@ -1,419 +1,420 b''
1 1
2 2 /******************************************************************************
3 3 * *
4 4 * DO NOT CHANGE THIS FILE MANUALLY *
5 5 * *
6 6 * *
7 7 * This file is automatically generated when the app starts up with *
8 8 * generate_js_files = true *
9 9 * *
10 10 * To add a route here pass jsroute=True to the route definition in the app *
11 11 * *
12 12 ******************************************************************************/
13 13 function registerRCRoutes() {
14 14 // routes registration
15 15 pyroutes.register('admin_artifacts', '/_admin/artifacts', []);
16 16 pyroutes.register('admin_artifacts_data', '/_admin/artifacts-data', []);
17 17 pyroutes.register('admin_artifacts_delete', '/_admin/artifacts/%(uid)s/delete', ['uid']);
18 18 pyroutes.register('admin_artifacts_show_all', '/_admin/artifacts', []);
19 19 pyroutes.register('admin_artifacts_show_info', '/_admin/artifacts/%(uid)s', ['uid']);
20 20 pyroutes.register('admin_artifacts_update', '/_admin/artifacts/%(uid)s/update', ['uid']);
21 21 pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']);
22 22 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
23 23 pyroutes.register('admin_automation', '/_admin/automation', []);
24 24 pyroutes.register('admin_automation_update', '/_admin/automation/%(entry_id)s/update', ['entry_id']);
25 25 pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []);
26 26 pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []);
27 27 pyroutes.register('admin_home', '/_admin', []);
28 28 pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []);
29 29 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
30 30 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
31 31 pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []);
32 32 pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []);
33 33 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
34 34 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
35 35 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
36 36 pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []);
37 37 pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []);
38 38 pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []);
39 39 pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []);
40 40 pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []);
41 41 pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []);
42 42 pyroutes.register('admin_scheduler', '/_admin/scheduler', []);
43 43 pyroutes.register('admin_scheduler_show_tasks', '/_admin/scheduler/_tasks', []);
44 44 pyroutes.register('admin_settings', '/_admin/settings', []);
45 45 pyroutes.register('admin_settings_email', '/_admin/settings/email', []);
46 46 pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []);
47 47 pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []);
48 48 pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']);
49 49 pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions_delete_all', []);
50 50 pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']);
51 51 pyroutes.register('admin_settings_global', '/_admin/settings/global', []);
52 52 pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []);
53 53 pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []);
54 54 pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []);
55 55 pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []);
56 56 pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []);
57 57 pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []);
58 58 pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []);
59 59 pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []);
60 60 pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []);
61 61 pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []);
62 62 pyroutes.register('admin_settings_license', '/_admin/settings/license', []);
63 63 pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []);
64 64 pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []);
65 65 pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []);
66 66 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
67 67 pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []);
68 68 pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []);
69 69 pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []);
70 70 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
71 71 pyroutes.register('admin_settings_scheduler_create', '/_admin/scheduler/create', []);
72 72 pyroutes.register('admin_settings_scheduler_delete', '/_admin/scheduler/%(schedule_id)s/delete', ['schedule_id']);
73 73 pyroutes.register('admin_settings_scheduler_edit', '/_admin/scheduler/%(schedule_id)s', ['schedule_id']);
74 74 pyroutes.register('admin_settings_scheduler_execute', '/_admin/scheduler/%(schedule_id)s/execute', ['schedule_id']);
75 75 pyroutes.register('admin_settings_scheduler_new', '/_admin/scheduler/new', []);
76 76 pyroutes.register('admin_settings_scheduler_update', '/_admin/scheduler/%(schedule_id)s/update', ['schedule_id']);
77 77 pyroutes.register('admin_settings_search', '/_admin/settings/search', []);
78 78 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
79 79 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
80 80 pyroutes.register('admin_settings_system', '/_admin/settings/system', []);
81 81 pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []);
82 82 pyroutes.register('admin_settings_update', '/_admin/settings/update', []);
83 83 pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []);
84 84 pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []);
85 85 pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []);
86 86 pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []);
87 87 pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []);
88 88 pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []);
89 89 pyroutes.register('apiv2', '/_admin/api', []);
90 90 pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']);
91 91 pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']);
92 92 pyroutes.register('auth_home', '/_admin/auth*traverse', []);
93 93 pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
94 94 pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
95 pyroutes.register('branch_remove', '/%(repo_name)s/%(branch_name)s/remove', ['repo_name', 'branch_name']);
95 96 pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
96 97 pyroutes.register('channelstream_proxy', '/_channelstream', []);
97 98 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
98 99 pyroutes.register('check_2fa', '/_admin/check_2fa', []);
99 100 pyroutes.register('commit_draft_comments_submit', '/%(repo_name)s/changeset/%(commit_id)s/draft_comments_submit', ['repo_name', 'commit_id']);
100 101 pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']);
101 102 pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']);
102 103 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
103 104 pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']);
104 105 pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']);
105 106 pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']);
106 107 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
107 108 pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']);
108 109 pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']);
109 110 pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']);
110 111 pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']);
111 112 pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']);
112 113 pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']);
113 114 pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']);
114 115 pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']);
115 116 pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']);
116 117 pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']);
117 118 pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']);
118 119 pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']);
119 120 pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']);
120 121 pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']);
121 122 pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']);
122 123 pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']);
123 124 pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']);
124 125 pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']);
125 126 pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']);
126 127 pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']);
127 128 pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']);
128 129 pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']);
129 130 pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']);
130 131 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
131 132 pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']);
132 133 pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']);
133 134 pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']);
134 135 pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']);
135 136 pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']);
136 137 pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']);
137 138 pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']);
138 139 pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']);
139 140 pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']);
140 141 pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']);
141 142 pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']);
142 143 pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']);
143 144 pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']);
144 145 pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']);
145 146 pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']);
146 147 pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']);
147 148 pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']);
148 149 pyroutes.register('edit_user_auth_tokens_view', '/_admin/users/%(user_id)s/edit/auth_tokens/view', ['user_id']);
149 150 pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']);
150 151 pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']);
151 152 pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']);
152 153 pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']);
153 154 pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']);
154 155 pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']);
155 156 pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']);
156 157 pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']);
157 158 pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']);
158 159 pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']);
159 160 pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']);
160 161 pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']);
161 162 pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']);
162 163 pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']);
163 164 pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']);
164 165 pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']);
165 166 pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']);
166 167 pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']);
167 168 pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']);
168 169 pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']);
169 170 pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']);
170 171 pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']);
171 172 pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']);
172 173 pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']);
173 174 pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']);
174 175 pyroutes.register('favicon', '/favicon.ico', []);
175 176 pyroutes.register('file_preview', '/_file_preview', []);
176 177 pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']);
177 178 pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']);
178 179 pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']);
179 180 pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']);
180 181 pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']);
181 182 pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']);
182 183 pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/rev/%(revision)s', ['gist_id', 'revision']);
183 184 pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']);
184 185 pyroutes.register('gists_create', '/_admin/gists/create', []);
185 186 pyroutes.register('gists_new', '/_admin/gists/new', []);
186 187 pyroutes.register('gists_show', '/_admin/gists', []);
187 188 pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']);
188 189 pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']);
189 190 pyroutes.register('global_integrations_home', '/_admin/integrations', []);
190 191 pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']);
191 192 pyroutes.register('global_integrations_new', '/_admin/integrations/new', []);
192 193 pyroutes.register('goto_switcher_data', '/_goto_data', []);
193 194 pyroutes.register('home', '/', []);
194 195 pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']);
195 196 pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']);
196 197 pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']);
197 198 pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']);
198 199 pyroutes.register('hovercard_username', '/_hovercard/username/%(username)s', ['username']);
199 200 pyroutes.register('journal', '/_admin/journal', []);
200 201 pyroutes.register('journal_atom', '/_admin/journal/atom', []);
201 202 pyroutes.register('journal_public', '/_admin/public_journal', []);
202 203 pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []);
203 204 pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []);
204 205 pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []);
205 206 pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []);
206 207 pyroutes.register('journal_rss', '/_admin/journal/rss', []);
207 208 pyroutes.register('login', '/_admin/login', []);
208 209 pyroutes.register('logout', '/_admin/logout', []);
209 210 pyroutes.register('main_page_repo_groups_data', '/_home_repo_groups', []);
210 211 pyroutes.register('main_page_repos_data', '/_home_repos', []);
211 212 pyroutes.register('markup_preview', '/_markup_preview', []);
212 213 pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []);
213 214 pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []);
214 215 pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []);
215 216 pyroutes.register('my_account_auth_tokens_view', '/_admin/my_account/auth_tokens/view', []);
216 217 pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []);
217 218 pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []);
218 219 pyroutes.register('my_account_configure_2fa', '/_admin/my_account/configure_2fa', []);
219 220 pyroutes.register('my_account_configure_2fa_update', '/_admin/my_account/configure_2fa_update', []);
220 221 pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
221 222 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
222 223 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
223 224 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
224 225 pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []);
225 226 pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []);
226 227 pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']);
227 228 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
228 229 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
229 230 pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []);
230 231 pyroutes.register('my_account_password', '/_admin/my_account/password', []);
231 232 pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []);
232 233 pyroutes.register('my_account_perms', '/_admin/my_account/perms', []);
233 234 pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
234 235 pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
235 236 pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
236 237 pyroutes.register('my_account_regenerate_2fa_recovery_codes', '/_admin/my_account/regenerate_recovery_codes', []);
237 238 pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
238 239 pyroutes.register('my_account_show_2fa_recovery_codes', '/_admin/my_account/recovery_codes', []);
239 240 pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []);
240 241 pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []);
241 242 pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []);
242 243 pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []);
243 244 pyroutes.register('my_account_update', '/_admin/my_account/update', []);
244 245 pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []);
245 246 pyroutes.register('my_account_watched', '/_admin/my_account/watched', []);
246 247 pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']);
247 248 pyroutes.register('notifications_mark_all_read', '/_admin/notifications_mark_all_read', []);
248 249 pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
249 250 pyroutes.register('notifications_show_all', '/_admin/notifications', []);
250 251 pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
251 252 pyroutes.register('ops_error_test', '/_admin/ops/error', []);
252 253 pyroutes.register('ops_healthcheck', '/_admin/ops/status', []);
253 254 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
254 255 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
255 256 pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']);
256 257 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
257 258 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
258 259 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
259 260 pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']);
260 261 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']);
261 262 pyroutes.register('pullrequest_comment_edit', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/edit', ['repo_name', 'pull_request_id', 'comment_id']);
262 263 pyroutes.register('pullrequest_comments', '/%(repo_name)s/pull-request/%(pull_request_id)s/comments', ['repo_name', 'pull_request_id']);
263 264 pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']);
264 265 pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']);
265 266 pyroutes.register('pullrequest_draft_comments_submit', '/%(repo_name)s/pull-request/%(pull_request_id)s/draft_comments_submit', ['repo_name', 'pull_request_id']);
266 267 pyroutes.register('pullrequest_drafts', '/%(repo_name)s/pull-request/%(pull_request_id)s/drafts', ['repo_name', 'pull_request_id']);
267 268 pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']);
268 269 pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']);
269 270 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
270 271 pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']);
271 272 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
272 273 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
273 274 pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']);
274 275 pyroutes.register('pullrequest_todos', '/%(repo_name)s/pull-request/%(pull_request_id)s/todos', ['repo_name', 'pull_request_id']);
275 276 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']);
276 277 pyroutes.register('register', '/_admin/register', []);
277 278 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
278 279 pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']);
279 280 pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']);
280 281 pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']);
281 282 pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']);
282 283 pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']);
283 284 pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']);
284 285 pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']);
285 286 pyroutes.register('repo_artifacts_stream_script', '/_file_store/stream-upload-script', []);
286 287 pyroutes.register('repo_artifacts_stream_store', '/_file_store/stream-upload', []);
287 288 pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']);
288 289 pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']);
289 290 pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']);
290 291 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
291 292 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
292 293 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
293 294 pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']);
294 295 pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']);
295 296 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
296 297 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
297 298 pyroutes.register('repo_commit_comment_edit', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/edit', ['repo_name', 'commit_id', 'comment_id']);
298 299 pyroutes.register('repo_commit_comment_history_view', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/history_view/%(comment_history_id)s', ['repo_name', 'commit_id', 'comment_id', 'comment_history_id']);
299 300 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
300 301 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
301 302 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
302 303 pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']);
303 304 pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']);
304 305 pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']);
305 306 pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']);
306 307 pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']);
307 308 pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']);
308 309 pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
309 310 pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
310 311 pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
311 312 pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']);
312 313 pyroutes.register('repo_create', '/_admin/repos/create', []);
313 314 pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']);
314 315 pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']);
315 316 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']);
316 317 pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
317 318 pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
318 319 pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
319 320 pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
320 321 pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
321 322 pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
322 323 pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
323 324 pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
324 325 pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']);
325 326 pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']);
326 327 pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
327 328 pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
328 329 pyroutes.register('repo_files_check_head', '/%(repo_name)s/check_head/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
329 330 pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
330 331 pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
331 332 pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']);
332 333 pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']);
333 334 pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
334 335 pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
335 336 pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
336 337 pyroutes.register('repo_files_replace_binary', '/%(repo_name)s/replace_binary/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
337 338 pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
338 339 pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
339 340 pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']);
340 341 pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']);
341 342 pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']);
342 343 pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']);
343 344 pyroutes.register('repo_group_create', '/_admin/repo_group/create', []);
344 345 pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']);
345 346 pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']);
346 347 pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']);
347 348 pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']);
348 349 pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']);
349 350 pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']);
350 351 pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']);
351 352 pyroutes.register('repo_group_list_data', '/_repo_groups', []);
352 353 pyroutes.register('repo_group_new', '/_admin/repo_group/new', []);
353 354 pyroutes.register('repo_groups', '/_admin/repo_groups', []);
354 355 pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []);
355 356 pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']);
356 357 pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']);
357 358 pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']);
358 359 pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']);
359 360 pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']);
360 361 pyroutes.register('repo_list_data', '/_repos', []);
361 362 pyroutes.register('repo_new', '/_admin/repos/new', []);
362 363 pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
363 364 pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']);
364 365 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
365 366 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
366 367 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
367 368 pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']);
368 369 pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']);
369 370 pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']);
370 371 pyroutes.register('repo_settings_quick_actions', '/%(repo_name)s/settings/quick-action', ['repo_name']);
371 372 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
372 373 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
373 374 pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']);
374 375 pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']);
375 376 pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']);
376 377 pyroutes.register('repos', '/_admin/repos', []);
377 378 pyroutes.register('repos_data', '/_admin/repos_data', []);
378 379 pyroutes.register('reset_password', '/_admin/password_reset', []);
379 380 pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []);
380 381 pyroutes.register('robots', '/robots.txt', []);
381 382 pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']);
382 383 pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']);
383 384 pyroutes.register('search', '/_admin/search', []);
384 385 pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']);
385 386 pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']);
386 387 pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']);
387 388 pyroutes.register('setup_2fa', '/_admin/setup_2fa', []);
388 389 pyroutes.register('store_user_session_value', '/_store_session_attr', []);
389 390 pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
390 391 pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
391 392 pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']);
392 393 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
393 394 pyroutes.register('upload_file', '/_file_store/upload', []);
394 395 pyroutes.register('user_autocomplete_data', '/_users', []);
395 396 pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']);
396 397 pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']);
397 398 pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']);
398 399 pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']);
399 400 pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']);
400 401 pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']);
401 402 pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']);
402 403 pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']);
403 404 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
404 405 pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']);
405 406 pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']);
406 407 pyroutes.register('user_groups', '/_admin/user_groups', []);
407 408 pyroutes.register('user_groups_create', '/_admin/user_groups/create', []);
408 409 pyroutes.register('user_groups_data', '/_admin/user_groups_data', []);
409 410 pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']);
410 411 pyroutes.register('user_groups_new', '/_admin/user_groups/new', []);
411 412 pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']);
412 413 pyroutes.register('user_notice_dismiss', '/_admin/users/%(user_id)s/notice_dismiss', ['user_id']);
413 414 pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']);
414 415 pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']);
415 416 pyroutes.register('users', '/_admin/users', []);
416 417 pyroutes.register('users_create', '/_admin/users/create', []);
417 418 pyroutes.register('users_data', '/_admin/users_data', []);
418 419 pyroutes.register('users_new', '/_admin/users/new', []);
419 420 }
@@ -1,118 +1,128 b''
1 1 <%inherit file="/base/base.mako"/>
2 2 <%namespace name="components" file="/summary/components.mako"/>
3 3
4 4 <%def name="title()">
5 5 ${_('{} Branches').format(c.repo_name)}
6 6 %if c.rhodecode_name:
7 7 &middot; ${h.branding(c.rhodecode_name)}
8 8 %endif
9 9 </%def>
10 10
11 11 <%def name="breadcrumbs_links()"></%def>
12 12
13 13 <%def name="menu_bar_nav()">
14 14 ${self.menu_items(active='repositories')}
15 15 </%def>
16 16
17 17 <%def name="menu_bar_subnav()">
18 18 ${self.repo_menu(active='summary')}
19 19 </%def>
20 20
21 21 <%def name="main()">
22 22 <div id="repo-summary" class="summary">
23 23 ${components.summary_detail(breadcrumbs_links=self.breadcrumbs_links(), show_downloads=False, simplified=True)}
24 24 </div>
25 25
26 26 <div class="box">
27 27 <div class="title">
28 28
29 29 %if c.has_references:
30 30 <ul class="links">
31 31 <li>
32 32 <input type="submit" id="compare_action" class="btn" disabled="disabled" value="${_('Compare Selected Branches')}"/>
33 33 </li>
34 34 </ul>
35 35 %endif
36 36 %if c.has_references:
37 37 <div class="grid-quick-filter">
38 38 <ul class="grid-filter-box">
39 39 <li class="grid-filter-box-icon">
40 40 <i class="icon-search"></i>
41 41 </li>
42 42 <li class="grid-filter-box-input">
43 43 <input class="q_filter_box" id="q_filter" size="15" type="text" name="filter" placeholder="${_('quick filter...')}" value=""/>
44 44 </li>
45 45 </ul>
46 46 </div>
47 47 <div id="obj_count">0</div>
48 48 %endif
49 49 </div>
50 50 <table id="obj_list_table" class="rctable table-bordered"></table>
51 51 </div>
52 52
53 53 <script type="text/javascript">
54 54 $(document).ready(function() {
55 55
56 56 var get_datatable_count = function(){
57 57 var api = $('#obj_list_table').dataTable().api();
58 58 var total = api.page.info().recordsDisplay
59 59 var _text = _ngettext('{0} branch', '{0} branches', total).format(total);
60 60
61 61 $('#obj_count').text(_text);
62 62 };
63 63
64 64 var branches_data = ${c.data|n};
65 // object list
66 $('#obj_list_table').DataTable({
67 data: branches_data,
68 dom: 'rtp',
69 pageLength: ${c.visual.dashboard_items},
70 order: [[ 0, "asc" ]],
71 columns: [
65 var repo_type = "${c.rhodecode_db_repo.repo_type}";
66 var columns = [
72 67 { data: {"_": "name",
73 68 "sort": "name_raw"}, title: "${_('Name')}", className: "td-tags" },
74 69 { data: {"_": "date",
75 70 "sort": "date_raw"}, title: "${_('Date')}", className: "td-time" },
76 71 { data: {"_": "author",
77 72 "sort": "author"}, title: "${_('Author')}", className: "td-user" },
78 73 { data: {"_": "commit",
79 74 "sort": "commit_raw",
80 75 "type": Number}, title: "${_('Commit')}", className: "td-hash" },
81 76 { data: {"_": "compare",
82 77 "sort": "compare"}, title: "${_('Compare')}", className: "td-compare" }
83 ],
78 ];
79 if (repo_type !== 'svn') {
80 columns.push({
81 data: { "_": "action", "sort": "action" },
82 title: `${_('Action')}`,
83 className: "td-action",
84 orderable: false
85 });
86 }
87
88 $('#obj_list_table').DataTable({
89 data: branches_data,
90 dom: 'rtp',
91 pageLength: ${c.visual.dashboard_items},
92 order: [[ 0, "asc" ]],
93 columns: columns,
84 94 language: {
85 95 paginate: DEFAULT_GRID_PAGINATION,
86 96 emptyTable: _gettext("No branches available yet.")
87 97 },
88 98 "initComplete": function( settings, json ) {
89 99 get_datatable_count();
90 100 timeagoActivate();
91 101 tooltipActivate();
92 102 compare_radio_buttons("${c.repo_name}", 'branch');
93 103 }
94 104 });
95 105
96 106 // update when things change
97 107 $('#obj_list_table').on('draw.dt', function() {
98 108 get_datatable_count();
99 109 timeagoActivate();
100 110 tooltipActivate();
101 111 });
102 112
103 113 // filter, filter both grids
104 114 $('#q_filter').on( 'keyup', function () {
105 115 var obj_api = $('#obj_list_table').dataTable().api();
106 116 obj_api
107 117 .columns(0)
108 118 .search(this.value)
109 119 .draw();
110 120 });
111 121
112 122 // refilter table if page load via back button
113 123 $("#q_filter").trigger('keyup');
114 124
115 125 });
116 126
117 127 </script>
118 128 </%def>
@@ -1,518 +1,542 b''
1 1 ## DATA TABLE RE USABLE ELEMENTS
2 2 ## usage:
3 3 ## <%namespace name="dt" file="/data_table/_dt_elements.mako"/>
4 4 <%namespace name="base" file="/base/base.mako"/>
5 5
6 6 <%def name="metatags_help()">
7 7 <table>
8 8 <%
9 9 example_tags = [
10 10 ('state','[stable]'),
11 11 ('state','[stale]'),
12 12 ('state','[featured]'),
13 13 ('state','[dev]'),
14 14 ('state','[dead]'),
15 15 ('state','[deprecated]'),
16 16
17 17 ('label','[personal]'),
18 18 ('generic','[v2.0.0]'),
19 19
20 20 ('lang','[lang =&gt; JavaScript]'),
21 21 ('license','[license =&gt; LicenseName]'),
22 22
23 23 ('ref','[requires =&gt; RepoName]'),
24 24 ('ref','[recommends =&gt; GroupName]'),
25 25 ('ref','[conflicts =&gt; SomeName]'),
26 26 ('ref','[base =&gt; SomeName]'),
27 27 ('url','[url =&gt; [linkName](https://rhodecode.com)]'),
28 28 ('see','[see =&gt; http://rhodecode.com]'),
29 29 ]
30 30 %>
31 31 % for tag_type, tag in example_tags:
32 32 <tr>
33 33 <td>${tag|n}</td>
34 34 <td>${h.style_metatag(tag_type, tag)|n}</td>
35 35 </tr>
36 36 % endfor
37 37 </table>
38 38 </%def>
39 39
40 40 <%def name="render_description(description, stylify_metatags)">
41 41 <%
42 42 tags = []
43 43 if stylify_metatags:
44 44 tags, description = h.extract_metatags(description)
45 45 %>
46 46 % for tag_type, tag in tags:
47 47 ${h.style_metatag(tag_type, tag)|n,trim}
48 48 % endfor
49 49 <code style="white-space: pre-wrap">${description}</code>
50 50 </%def>
51 51
52 52 ## REPOSITORY RENDERERS
53 53 <%def name="quick_menu(repo_name)">
54 54 <i class="icon-more"></i>
55 55 <div class="menu_items_container hidden">
56 56 <ul class="menu_items">
57 57 <li>
58 58 <a title="${_('Summary')}" href="${h.route_path('repo_summary',repo_name=repo_name)}">
59 59 <span>${_('Summary')}</span>
60 60 </a>
61 61 </li>
62 62 <li>
63 63 <a title="${_('Commits')}" href="${h.route_path('repo_commits',repo_name=repo_name)}">
64 64 <span>${_('Commits')}</span>
65 65 </a>
66 66 </li>
67 67 <li>
68 68 <a title="${_('Files')}" href="${h.route_path('repo_files:default_commit',repo_name=repo_name)}">
69 69 <span>${_('Files')}</span>
70 70 </a>
71 71 </li>
72 72 <li>
73 73 <a title="${_('Fork')}" href="${h.route_path('repo_fork_new',repo_name=repo_name)}">
74 74 <span>${_('Fork')}</span>
75 75 </a>
76 76 </li>
77 77 </ul>
78 78 </div>
79 79 </%def>
80 80
81 81 <%def name="repo_name(name,rtype,rstate,private,archived,fork_repo_name,short_name=False,admin=False)">
82 82 <%
83 83 def get_name(name,short_name=short_name):
84 84 if short_name:
85 85 return name.split('/')[-1]
86 86 else:
87 87 return name
88 88 %>
89 89 <div class="${'repo_state_pending' if rstate == 'repo_state_pending' else ''} truncate">
90 90 ##NAME
91 91 <a href="${h.route_path('edit_repo',repo_name=name) if admin else h.route_path('repo_summary',repo_name=name)}">
92 92
93 93 ##TYPE OF REPO
94 94 %if h.is_hg(rtype):
95 95 <span title="${_('Mercurial repository')}"><i class="icon-hg" style="font-size: 14px;"></i></span>
96 96 %elif h.is_git(rtype):
97 97 <span title="${_('Git repository')}"><i class="icon-git" style="font-size: 14px"></i></span>
98 98 %elif h.is_svn(rtype):
99 99 <span title="${_('Subversion repository')}"><i class="icon-svn" style="font-size: 14px"></i></span>
100 100 %endif
101 101
102 102 ##PRIVATE/PUBLIC
103 103 %if private is True and c.visual.show_private_icon:
104 104 <i class="icon-lock" title="${_('Private repository')}"></i>
105 105 %elif private is False and c.visual.show_public_icon:
106 106 <i class="icon-unlock-alt" title="${_('Public repository')}"></i>
107 107 %else:
108 108 <span></span>
109 109 %endif
110 110 ${get_name(name)}
111 111 </a>
112 112 %if fork_repo_name:
113 113 <a href="${h.route_path('repo_summary',repo_name=fork_repo_name)}"><i class="icon-code-fork"></i></a>
114 114 %endif
115 115 %if rstate == 'repo_state_pending':
116 116 <span class="creation_in_progress tooltip" title="${_('This repository is being created in a background task')}">
117 117 (${_('creating...')})
118 118 </span>
119 119 %endif
120 120
121 121 </div>
122 122 </%def>
123 123
124 124 <%def name="repo_desc(description, stylify_metatags)">
125 125 <%
126 126 tags, description = h.extract_metatags(description)
127 127 %>
128 128
129 129 <div class="truncate-wrap">
130 130 % if stylify_metatags:
131 131 % for tag_type, tag in tags:
132 132 ${h.style_metatag(tag_type, tag)|n}
133 133 % endfor
134 134 % endif
135 135 ${description}
136 136 </div>
137 137
138 138 </%def>
139 139
140 140 <%def name="last_change(last_change)">
141 141 ${h.age_component(last_change, time_is_local=True)}
142 142 </%def>
143 143
144 144 <%def name="revision(repo_name, rev, commit_id, author, last_msg, commit_date)">
145 145 <div>
146 146 %if rev >= 0:
147 147 <code><a class="tooltip-hovercard" data-hovercard-alt=${h.tooltip(last_msg)} data-hovercard-url="${h.route_path('hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)}" href="${h.route_path('repo_commit',repo_name=repo_name,commit_id=commit_id)}">${'r{}:{}'.format(rev,h.short_id(commit_id))}</a></code>
148 148 %else:
149 149 ${_('No commits yet')}
150 150 %endif
151 151 </div>
152 152 </%def>
153 153
154 154 <%def name="rss(name)">
155 155 %if c.rhodecode_user.username != h.DEFAULT_USER:
156 156 <a title="${h.tooltip(_('Subscribe to %s rss feed')% name)}" href="${h.route_path('rss_feed_home', repo_name=name, _query=dict(auth_token=c.rhodecode_user.feed_token))}"><i class="icon-rss-sign"></i></a>
157 157 %else:
158 158 <a title="${h.tooltip(_('Subscribe to %s rss feed')% name)}" href="${h.route_path('rss_feed_home', repo_name=name)}"><i class="icon-rss-sign"></i></a>
159 159 %endif
160 160 </%def>
161 161
162 162 <%def name="atom(name)">
163 163 %if c.rhodecode_user.username != h.DEFAULT_USER:
164 164 <a title="${h.tooltip(_('Subscribe to %s atom feed')% name)}" href="${h.route_path('atom_feed_home', repo_name=name, _query=dict(auth_token=c.rhodecode_user.feed_token))}"><i class="icon-rss-sign"></i></a>
165 165 %else:
166 166 <a title="${h.tooltip(_('Subscribe to %s atom feed')% name)}" href="${h.route_path('atom_feed_home', repo_name=name)}"><i class="icon-rss-sign"></i></a>
167 167 %endif
168 168 </%def>
169 169
170 170 <%def name="repo_actions(repo_name, super_user=True)">
171 171 <div>
172 172 <div class="grid_edit">
173 173 <a href="${h.route_path('edit_repo',repo_name=repo_name)}" title="${_('Edit')}">
174 174 Edit
175 175 </a>
176 176 </div>
177 177 <div class="grid_delete">
178 178 ${h.secure_form(h.route_path('edit_repo_advanced_delete', repo_name=repo_name), request=request)}
179 179 <input class="btn btn-link btn-danger" id="remove_${repo_name}" name="remove_${repo_name}"
180 180 onclick="submitConfirm(event, this, _gettext('Confirm to delete this repository'), _gettext('Delete'), '${repo_name}')"
181 181 type="submit" value="Delete"
182 182 >
183 183 ${h.end_form()}
184 184 </div>
185 185 </div>
186 186 </%def>
187 187
188 188 <%def name="repo_state(repo_state)">
189 189 <div>
190 190 %if repo_state == 'repo_state_pending':
191 191 <div class="tag tag4">${_('Creating')}</div>
192 192 %elif repo_state == 'repo_state_created':
193 193 <div class="tag tag1">${_('Created')}</div>
194 194 %else:
195 195 <div class="tag alert2" title="${h.tooltip(repo_state)}">invalid</div>
196 196 %endif
197 197 </div>
198 198 </%def>
199 199
200 200
201 201 ## REPO GROUP RENDERERS
202 202 <%def name="quick_repo_group_menu(repo_group_name)">
203 203 <i class="icon-more"></i>
204 204 <div class="menu_items_container hidden">
205 205 <ul class="menu_items">
206 206 <li>
207 207 <a href="${h.route_path('repo_group_home', repo_group_name=repo_group_name)}">${_('Summary')}</a>
208 208 </li>
209 209
210 210 </ul>
211 211 </div>
212 212 </%def>
213 213
214 214 <%def name="repo_group_name(repo_group_name, children_groups=None)">
215 215 <div>
216 216 <a href="${h.route_path('repo_group_home', repo_group_name=repo_group_name)}">
217 217 <i class="icon-repo-group" title="${_('Repository group')}" style="font-size: 14px"></i>
218 218 %if children_groups:
219 219 ${h.literal(' &raquo; '.join(children_groups))}
220 220 %else:
221 221 ${repo_group_name}
222 222 %endif
223 223 </a>
224 224 </div>
225 225 </%def>
226 226
227 227 <%def name="repo_group_desc(description, personal, stylify_metatags)">
228 228
229 229 <%
230 230 if stylify_metatags:
231 231 tags, description = h.extract_metatags(description)
232 232 %>
233 233
234 234 <div class="truncate-wrap">
235 235 % if personal:
236 236 <div class="metatag" tag="personal">${_('personal')}</div>
237 237 % endif
238 238
239 239 % if stylify_metatags:
240 240 % for tag_type, tag in tags:
241 241 ${h.style_metatag(tag_type, tag)|n}
242 242 % endfor
243 243 % endif
244 244 ${description}
245 245 </div>
246 246
247 247 </%def>
248 248
249 249 <%def name="repo_group_actions(repo_group_id, repo_group_name, gr_count)">
250 250 <div class="grid_edit">
251 251 <a href="${h.route_path('edit_repo_group',repo_group_name=repo_group_name)}" title="${_('Edit')}">Edit</a>
252 252 </div>
253 253 <div class="grid_delete">
254 254 ${h.secure_form(h.route_path('edit_repo_group_advanced_delete', repo_group_name=repo_group_name), request=request)}
255 255 <input class="btn btn-link btn-danger" id="remove_${repo_group_name}" name="remove_${repo_group_name}"
256 256 onclick="submitConfirm(event, this, _gettext('Confirm to delete this repository group'), _gettext('Delete'), '${_ungettext('`{}` with {} repository','`{}` with {} repositories',gr_count).format(repo_group_name, gr_count)}')"
257 257 type="submit" value="Delete"
258 258 >
259 259 ${h.end_form()}
260 260 </div>
261 261 </%def>
262 262
263 263
264 264 <%def name="user_actions(user_id, username)">
265 265 <div class="grid_edit">
266 266 <a href="${h.route_path('user_edit',user_id=user_id)}" title="${_('Edit')}">
267 267 ${_('Edit')}
268 268 </a>
269 269 </div>
270 270 <div class="grid_delete">
271 271 ${h.secure_form(h.route_path('user_delete', user_id=user_id), request=request)}
272 272 <input class="btn btn-link btn-danger" id="remove_user_${user_id}" name="remove_user_${user_id}"
273 273 onclick="submitConfirm(event, this, _gettext('Confirm to delete this user'), _gettext('Delete'), '${username}')"
274 274 type="submit" value="Delete"
275 275 >
276 276 ${h.end_form()}
277 277 </div>
278 278 </%def>
279 279
280 <%def name="branch_actions_git(branch_name, repo_name, **kwargs)">
281 <div class="grid_delete">
282 ${h.secure_form(h.route_path('branch_remove', repo_name=repo_name, branch_name=branch_name), request=request)}
283 <input class="btn btn-link btn-danger" id="remove_branch_${branch_name}" name="remove_branch_${branch_name}"
284 onclick="submitConfirm(event, this, _gettext('Confirm to delete this branch'), _gettext('Delete'), '${branch_name}')"
285 type="submit" value="Delete"
286 >
287 ${h.end_form()}
288 </div>
289 </%def>
290
291 <%def name="branch_actions_hg(branch_name, repo_name, **kwargs)">
292 <div class="grid_delete">
293 %if not kwargs['closed']:
294 ${h.secure_form(h.route_path('branch_remove', repo_name=repo_name, branch_name=branch_name), request=request)}
295 <input class="btn btn-link btn-danger" id="remove_branch_${branch_name}" name="remove_branch_${branch_name}"
296 onclick="submitConfirm(event, this, _gettext('Confirm to close this branch'), _gettext('Close'), '${branch_name}')"
297 type="submit" value="Close"
298 >
299 ${h.end_form()}
300 %endif
301 </div>
302 </%def>
303
280 304 <%def name="user_group_actions(user_group_id, user_group_name)">
281 305 <div class="grid_edit">
282 306 <a href="${h.route_path('edit_user_group', user_group_id=user_group_id)}" title="${_('Edit')}">Edit</a>
283 307 </div>
284 308 <div class="grid_delete">
285 309 ${h.secure_form(h.route_path('user_groups_delete', user_group_id=user_group_id), request=request)}
286 310 <input class="btn btn-link btn-danger" id="remove_group_${user_group_id}" name="remove_group_${user_group_id}"
287 311 onclick="submitConfirm(event, this, _gettext('Confirm to delete this user group'), _gettext('Delete'), '${user_group_name}')"
288 312 type="submit" value="Delete"
289 313 >
290 314 ${h.end_form()}
291 315 </div>
292 316 </%def>
293 317
294 318
295 319 <%def name="user_name(user_id, username)">
296 320 ${h.link_to(h.person(username, 'username_or_name_or_email'), h.route_path('user_edit', user_id=user_id))}
297 321 </%def>
298 322
299 323 <%def name="user_profile(username)">
300 324 ${base.gravatar_with_user(username, 16, tooltip=True)}
301 325 </%def>
302 326
303 327 <%def name="user_group_name(user_group_name)">
304 328 <div>
305 329 <i class="icon-user-group" title="${_('User group')}"></i>
306 330 ${h.link_to_group(user_group_name)}
307 331 </div>
308 332 </%def>
309 333
310 334
311 335 ## GISTS
312 336
313 337 <%def name="gist_gravatar(full_contact)">
314 338 <div class="gist_gravatar">
315 339 ${base.gravatar(full_contact, 30)}
316 340 </div>
317 341 </%def>
318 342
319 343 <%def name="gist_access_id(gist_access_id, full_contact)">
320 344 <div>
321 345 <code>
322 346 <a href="${h.route_path('gist_show', gist_id=gist_access_id)}">${gist_access_id}</a>
323 347 </code>
324 348 </div>
325 349 </%def>
326 350
327 351 <%def name="gist_author(full_contact, created_on, expires)">
328 352 ${base.gravatar_with_user(full_contact, 16, tooltip=True)}
329 353 </%def>
330 354
331 355
332 356 <%def name="gist_created(created_on)">
333 357 <div class="created">
334 358 ${h.age_component(created_on, time_is_local=True)}
335 359 </div>
336 360 </%def>
337 361
338 362 <%def name="gist_expires(expires)">
339 363 <div class="created">
340 364 %if expires == -1:
341 365 ${_('never')}
342 366 %else:
343 367 ${h.age_component(h.time_to_utcdatetime(expires))}
344 368 %endif
345 369 </div>
346 370 </%def>
347 371
348 372 <%def name="gist_type(gist_type)">
349 373 %if gist_type == 'public':
350 374 <span class="tag tag-gist-public disabled">${_('Public Gist')}</span>
351 375 %else:
352 376 <span class="tag tag-gist-private disabled">${_('Private Gist')}</span>
353 377 %endif
354 378 </%def>
355 379
356 380 <%def name="gist_description(gist_description)">
357 381 ${gist_description}
358 382 </%def>
359 383
360 384
361 385 ## PULL REQUESTS GRID RENDERERS
362 386
363 387 <%def name="pullrequest_target_repo(repo_name)">
364 388 <div class="truncate">
365 389 ${h.link_to(repo_name,h.route_path('repo_summary',repo_name=repo_name))}
366 390 </div>
367 391 </%def>
368 392
369 393 <%def name="pullrequest_status(status)">
370 394 <i class="icon-circle review-status-${status}"></i>
371 395 </%def>
372 396
373 397 <%def name="pullrequest_title(title, description)">
374 398 ${title}
375 399 </%def>
376 400
377 401 <%def name="pullrequest_commit_flow(pull_request)">
378 402 <div class="pr-commit-flow">
379 403 <%!
380 404 def pr_ref_type_to_icon(ref_type):
381 405 return dict(
382 406 branch='branch',
383 407 book='bookmark',
384 408 rev='history',
385 409 ).get(ref_type, 'branch')
386 410
387 411 %>
388 412 ## Source
389 413 <code class="pr-source-info"><i class="icon-${pr_ref_type_to_icon(pull_request.source_ref_parts.type)}"></i>${pull_request.source_ref_parts.name}</code>
390 414 &rarr;
391 415 ## Target
392 416 <code class="pr-target-info"><i class="icon-${pr_ref_type_to_icon(pull_request.target_ref_parts.type)}"></i>${pull_request.target_ref_parts.name}</code>
393 417 </div>
394 418 </%def>
395 419
396 420 <%def name="pullrequest_comments(comments_nr)">
397 421 <i class="icon-comment"></i> ${comments_nr}
398 422 </%def>
399 423
400 424 <%def name="pullrequest_name(pull_request_id, state, is_wip, target_repo_name, short=False)">
401 425 <code>
402 426 <a href="${h.route_path('pullrequest_show',repo_name=target_repo_name,pull_request_id=pull_request_id)}">
403 427 % if short:
404 428 !${pull_request_id}
405 429 % else:
406 430 ${_('Pull request !{}').format(pull_request_id)}
407 431 % endif
408 432 </a>
409 433 </code>
410 434 % if state not in ['created']:
411 435 <span class="tag tag-merge-state-${state} tooltip" title="Pull request state is changing">${state}</span>
412 436 % endif
413 437
414 438 % if is_wip:
415 439 <span class="tag tooltip" title="${_('Work in progress')}">wip</span>
416 440 % endif
417 441 </%def>
418 442
419 443 <%def name="pullrequest_updated_on(updated_on, pr_version=None)">
420 444 % if pr_version:
421 445 <code>v${pr_version}</code>
422 446 % endif
423 447 ${h.age_component(h.time_to_utcdatetime(updated_on))}
424 448 </%def>
425 449
426 450 <%def name="pullrequest_author(full_contact)">
427 451 ${base.gravatar_with_user(full_contact, 16, tooltip=True)}
428 452 </%def>
429 453
430 454
431 455 ## ARTIFACT RENDERERS
432 456 <%def name="repo_artifact_name(repo_name, file_uid, artifact_display_name)">
433 457 <a href="${h.route_path('repo_artifacts_get', repo_name=repo_name, uid=file_uid)}">
434 458 ${artifact_display_name or '_EMPTY_NAME_'}
435 459 </a>
436 460 </%def>
437 461
438 462 <%def name="repo_artifact_admin_name(file_uid, artifact_display_name)">
439 463 <a href="${h.route_path('admin_artifacts_show_info', uid=file_uid)}">
440 464 ${(artifact_display_name or '_EMPTY_NAME_')}
441 465 </a>
442 466 </%def>
443 467
444 468 <%def name="repo_artifact_uid(repo_name, file_uid)">
445 469 <code>${h.shorter(file_uid, size=24, prefix=True)}</code>
446 470 </%def>
447 471
448 472 <%def name="repo_artifact_sha256(artifact_sha256)">
449 473 <div class="code">${h.shorter(artifact_sha256, 12)}</div>
450 474 </%def>
451 475
452 476 <%def name="repo_artifact_actions(repo_name, file_store_id, file_uid)">
453 477 ## <div class="grid_edit">
454 478 ## <a href="#Edit" title="${_('Edit')}">${_('Edit')}</a>
455 479 ## </div>
456 480 <div class="grid_edit">
457 481 <a href="${h.route_path('repo_artifacts_info', repo_name=repo_name, uid=file_store_id)}" title="${_('Info')}">${_('Info')}</a>
458 482 </div>
459 483 % if h.HasRepoPermissionAny('repository.admin')(c.repo_name):
460 484 <div class="grid_delete">
461 485 ${h.secure_form(h.route_path('repo_artifacts_delete', repo_name=repo_name, uid=file_store_id), request=request)}
462 486 <input class="btn btn-link btn-danger" id="remove_artifact_${file_store_id}" name="remove_artifact_${file_store_id}"
463 487 onclick="submitConfirm(event, this, _gettext('Confirm to delete this artifact'), _gettext('Delete'), '${file_uid}')"
464 488 type="submit" value="${_('Delete')}"
465 489 >
466 490 ${h.end_form()}
467 491 </div>
468 492 % endif
469 493 </%def>
470 494
471 495
472 496 <%def name="markup_form(form_id, form_text='', help_text=None)">
473 497
474 498 <div class="markup-form">
475 499 <div class="markup-form-area">
476 500 <div class="markup-form-area-header">
477 501 <ul class="nav-links clearfix">
478 502 <li class="active">
479 503 <a href="#edit-text" tabindex="-1" id="edit-btn_${form_id}">${_('Write')}</a>
480 504 </li>
481 505 <li class="">
482 506 <a href="#preview-text" tabindex="-1" id="preview-btn_${form_id}">${_('Preview')}</a>
483 507 </li>
484 508 </ul>
485 509 </div>
486 510
487 511 <div class="markup-form-area-write" style="display: block;">
488 512 <div id="edit-container_${form_id}" style="margin-top: -1px">
489 513 <textarea id="${form_id}" name="${form_id}" class="comment-block-ta ac-input">${form_text if form_text else ''}</textarea>
490 514 </div>
491 515 <div id="preview-container_${form_id}" class="clearfix" style="display: none;">
492 516 <div id="preview-box_${form_id}" class="preview-box"></div>
493 517 </div>
494 518 </div>
495 519
496 520 <div class="markup-form-area-footer">
497 521 <div class="toolbar">
498 522 <div class="toolbar-text">
499 523 ${(_('Parsed using %s syntax') % (
500 524 ('<a href="%s">%s</a>' % (h.route_url('%s_help' % c.visual.default_renderer), c.visual.default_renderer.upper())),
501 525 )
502 526 )|n}
503 527 </div>
504 528 </div>
505 529 </div>
506 530 </div>
507 531
508 532 <div class="markup-form-footer">
509 533 % if help_text:
510 534 <span class="help-block">${help_text}</span>
511 535 % endif
512 536 </div>
513 537 </div>
514 538 <script type="text/javascript">
515 539 new MarkupForm('${form_id}');
516 540 </script>
517 541
518 542 </%def>
@@ -1,322 +1,323 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 def get_url_defs():
21 21 from rhodecode.apps._base import ADMIN_PREFIX
22 22
23 23 return {
24 24 "home": "/",
25 25 "main_page_repos_data": "/_home_repos",
26 26 "main_page_repo_groups_data": "/_home_repo_groups",
27 27 "repo_group_home": "/{repo_group_name}",
28 28 "user_autocomplete_data": "/_users",
29 29 "user_group_autocomplete_data": "/_user_groups",
30 30 "repo_list_data": "/_repos",
31 31 "goto_switcher_data": "/_goto_data",
32 32 "admin_home": ADMIN_PREFIX + "",
33 33 "admin_audit_logs": ADMIN_PREFIX + "/audit_logs",
34 34 "admin_defaults_repositories": ADMIN_PREFIX + "/defaults/repositories",
35 35 "admin_defaults_repositories_update": ADMIN_PREFIX
36 36 + "/defaults/repositories/update",
37 37 "search": ADMIN_PREFIX + "/search",
38 38 "search_repo": "/{repo_name}/search",
39 39 "my_account_auth_tokens": ADMIN_PREFIX + "/my_account/auth_tokens",
40 40 "my_account_auth_tokens_add": ADMIN_PREFIX + "/my_account/auth_tokens/new",
41 41 "my_account_auth_tokens_delete": ADMIN_PREFIX
42 42 + "/my_account/auth_tokens/delete",
43 43 "repos": ADMIN_PREFIX + "/repos",
44 44 "repos_data": ADMIN_PREFIX + "/repos_data",
45 45 "repo_groups": ADMIN_PREFIX + "/repo_groups",
46 46 "repo_groups_data": ADMIN_PREFIX + "/repo_groups_data",
47 47 "user_groups": ADMIN_PREFIX + "/user_groups",
48 48 "user_groups_data": ADMIN_PREFIX + "/user_groups_data",
49 49 "user_profile": "/_profiles/{username}",
50 50 "profile_user_group": "/_profile_user_group/{user_group_name}",
51 51 "repo_summary": "/{repo_name}",
52 52 "repo_creating_check": "/{repo_name}/repo_creating_check",
53 53 "edit_repo": "/{repo_name}/settings",
54 54 "edit_repo_vcs": "/{repo_name}/settings/vcs",
55 55 "edit_repo_vcs_update": "/{repo_name}/settings/vcs/update",
56 56 "edit_repo_vcs_svn_pattern_delete": "/{repo_name}/settings/vcs/svn_pattern/delete",
57 57 "repo_archivefile": "/{repo_name}/archive/{fname}",
58 58 "repo_files_diff": "/{repo_name}/diff/{f_path}",
59 59 "repo_files_diff_2way_redirect": "/{repo_name}/diff-2way/{f_path}",
60 60 "repo_files": "/{repo_name}/files/{commit_id}/{f_path}",
61 61 "repo_files:default_path": "/{repo_name}/files/{commit_id}/",
62 62 "repo_files:default_commit": "/{repo_name}/files",
63 63 "repo_files:rendered": "/{repo_name}/render/{commit_id}/{f_path}",
64 64 "repo_files:annotated": "/{repo_name}/annotate/{commit_id}/{f_path}",
65 65 "repo_files:annotated_previous": "/{repo_name}/annotate-previous/{commit_id}/{f_path}",
66 66 "repo_files_nodelist": "/{repo_name}/nodelist/{commit_id}/{f_path}",
67 67 "repo_file_raw": "/{repo_name}/raw/{commit_id}/{f_path}",
68 68 "repo_file_download": "/{repo_name}/download/{commit_id}/{f_path}",
69 69 "repo_file_history": "/{repo_name}/history/{commit_id}/{f_path}",
70 70 "repo_file_authors": "/{repo_name}/authors/{commit_id}/{f_path}",
71 71 "repo_files_remove_file": "/{repo_name}/remove_file/{commit_id}/{f_path}",
72 72 "repo_files_delete_file": "/{repo_name}/delete_file/{commit_id}/{f_path}",
73 73 "repo_files_edit_file": "/{repo_name}/edit_file/{commit_id}/{f_path}",
74 74 "repo_files_update_file": "/{repo_name}/update_file/{commit_id}/{f_path}",
75 75 "repo_files_add_file": "/{repo_name}/add_file/{commit_id}/{f_path}",
76 76 "repo_files_upload_file": "/{repo_name}/upload_file/{commit_id}/{f_path}",
77 77 "repo_files_create_file": "/{repo_name}/create_file/{commit_id}/{f_path}",
78 78 "repo_files_replace_binary": "/{repo_name}/replace_binary/{commit_id}/{f_path}",
79 79 "repo_nodetree_full": "/{repo_name}/nodetree_full/{commit_id}/{f_path}",
80 80 "repo_nodetree_full:default_path": "/{repo_name}/nodetree_full/{commit_id}/",
81 81 "journal": ADMIN_PREFIX + "/journal",
82 82 "journal_rss": ADMIN_PREFIX + "/journal/rss",
83 83 "journal_atom": ADMIN_PREFIX + "/journal/atom",
84 84 "journal_public": ADMIN_PREFIX + "/public_journal",
85 85 "journal_public_atom": ADMIN_PREFIX + "/public_journal/atom",
86 86 "journal_public_atom_old": ADMIN_PREFIX + "/public_journal_atom",
87 87 "journal_public_rss": ADMIN_PREFIX + "/public_journal/rss",
88 88 "journal_public_rss_old": ADMIN_PREFIX + "/public_journal_rss",
89 89 "toggle_following": ADMIN_PREFIX + "/toggle_following",
90 90 "upload_file": "/_file_store/upload",
91 91 "download_file": "/_file_store/download/{fid}",
92 92 "download_file_by_token": "/_file_store/token-download/{_auth_token}/{fid}",
93 93 "gists_show": ADMIN_PREFIX + "/gists",
94 94 "gists_new": ADMIN_PREFIX + "/gists/new",
95 95 "gists_create": ADMIN_PREFIX + "/gists/create",
96 96 "gist_show": ADMIN_PREFIX + "/gists/{gist_id}",
97 97 "gist_delete": ADMIN_PREFIX + "/gists/{gist_id}/delete",
98 98 "gist_edit": ADMIN_PREFIX + "/gists/{gist_id}/edit",
99 99 "gist_edit_check_revision": ADMIN_PREFIX
100 100 + "/gists/{gist_id}/edit/check_revision",
101 101 "gist_update": ADMIN_PREFIX + "/gists/{gist_id}/update",
102 102 "gist_show_rev": ADMIN_PREFIX + "/gists/{gist_id}/rev/{revision}",
103 103 "gist_show_formatted": ADMIN_PREFIX
104 104 + "/gists/{gist_id}/rev/{revision}/{format}",
105 105 "gist_show_formatted_path": ADMIN_PREFIX
106 106 + "/gists/{gist_id}/rev/{revision}/{format}/{f_path}",
107 107 "login": ADMIN_PREFIX + "/login",
108 108 "logout": ADMIN_PREFIX + "/logout",
109 109 "setup_2fa": ADMIN_PREFIX + "/setup_2fa",
110 110 "check_2fa": ADMIN_PREFIX + "/check_2fa",
111 111 "register": ADMIN_PREFIX + "/register",
112 112 "reset_password": ADMIN_PREFIX + "/password_reset",
113 113 "reset_password_confirmation": ADMIN_PREFIX + "/password_reset_confirmation",
114 114 "admin_permissions_application": ADMIN_PREFIX + "/permissions/application",
115 115 "admin_permissions_application_update": ADMIN_PREFIX
116 116 + "/permissions/application/update",
117 117 "repo_commit_raw": "/{repo_name}/changeset-diff/{commit_id}",
118 118 "user_group_members_data": ADMIN_PREFIX
119 119 + "/user_groups/{user_group_id}/members",
120 120 "user_groups_new": ADMIN_PREFIX + "/user_groups/new",
121 121 "user_groups_create": ADMIN_PREFIX + "/user_groups/create",
122 122 "edit_user_group": ADMIN_PREFIX + "/user_groups/{user_group_id}/edit",
123 123 "edit_user_group_advanced_sync": ADMIN_PREFIX
124 124 + "/user_groups/{user_group_id}/edit/advanced/sync",
125 125 "edit_user_group_global_perms_update": ADMIN_PREFIX
126 126 + "/user_groups/{user_group_id}/edit/global_permissions/update",
127 127 "user_groups_update": ADMIN_PREFIX + "/user_groups/{user_group_id}/update",
128 128 "user_groups_delete": ADMIN_PREFIX + "/user_groups/{user_group_id}/delete",
129 129 "edit_user_group_perms": ADMIN_PREFIX
130 130 + "/user_groups/{user_group_id}/edit/permissions",
131 131 "edit_user_group_perms_update": ADMIN_PREFIX
132 132 + "/user_groups/{user_group_id}/edit/permissions/update",
133 133 "edit_repo_group": "/{repo_group_name}/_edit",
134 134 "edit_repo_group_perms": "/{repo_group_name:}/_settings/permissions",
135 135 "edit_repo_group_perms_update": "/{repo_group_name}/_settings/permissions/update",
136 136 "edit_repo_group_advanced": "/{repo_group_name}/_settings/advanced",
137 137 "edit_repo_group_advanced_delete": "/{repo_group_name}/_settings/advanced/delete",
138 138 "edit_user_ssh_keys": ADMIN_PREFIX + "/users/{user_id}/edit/ssh_keys",
139 139 "edit_user_ssh_keys_generate_keypair": ADMIN_PREFIX
140 140 + "/users/{user_id}/edit/ssh_keys/generate",
141 141 "edit_user_ssh_keys_add": ADMIN_PREFIX + "/users/{user_id}/edit/ssh_keys/new",
142 142 "edit_user_ssh_keys_delete": ADMIN_PREFIX
143 143 + "/users/{user_id}/edit/ssh_keys/delete",
144 144 "users": ADMIN_PREFIX + "/users",
145 145 "users_data": ADMIN_PREFIX + "/users_data",
146 146 "users_create": ADMIN_PREFIX + "/users/create",
147 147 "users_new": ADMIN_PREFIX + "/users/new",
148 148 "user_edit": ADMIN_PREFIX + "/users/{user_id}/edit",
149 149 "user_edit_advanced": ADMIN_PREFIX + "/users/{user_id}/edit/advanced",
150 150 "user_edit_global_perms": ADMIN_PREFIX
151 151 + "/users/{user_id}/edit/global_permissions",
152 152 "user_edit_global_perms_update": ADMIN_PREFIX
153 153 + "/users/{user_id}/edit/global_permissions/update",
154 154 "user_update": ADMIN_PREFIX + "/users/{user_id}/update",
155 155 "user_delete": ADMIN_PREFIX + "/users/{user_id}/delete",
156 156 "user_create_personal_repo_group": ADMIN_PREFIX
157 157 + "/users/{user_id}/create_repo_group",
158 158 "edit_user_auth_tokens": ADMIN_PREFIX + "/users/{user_id}/edit/auth_tokens",
159 159 "edit_user_auth_tokens_add": ADMIN_PREFIX
160 160 + "/users/{user_id}/edit/auth_tokens/new",
161 161 "edit_user_auth_tokens_delete": ADMIN_PREFIX
162 162 + "/users/{user_id}/edit/auth_tokens/delete",
163 163 "edit_user_emails": ADMIN_PREFIX + "/users/{user_id}/edit/emails",
164 164 "edit_user_emails_add": ADMIN_PREFIX + "/users/{user_id}/edit/emails/new",
165 165 "edit_user_emails_delete": ADMIN_PREFIX + "/users/{user_id}/edit/emails/delete",
166 166 "edit_user_ips": ADMIN_PREFIX + "/users/{user_id}/edit/ips",
167 167 "edit_user_ips_add": ADMIN_PREFIX + "/users/{user_id}/edit/ips/new",
168 168 "edit_user_ips_delete": ADMIN_PREFIX + "/users/{user_id}/edit/ips/delete",
169 169 "edit_user_perms_summary": ADMIN_PREFIX
170 170 + "/users/{user_id}/edit/permissions_summary",
171 171 "edit_user_perms_summary_json": ADMIN_PREFIX
172 172 + "/users/{user_id}/edit/permissions_summary/json",
173 173 "edit_user_audit_logs": ADMIN_PREFIX + "/users/{user_id}/edit/audit",
174 174 "edit_user_audit_logs_download": ADMIN_PREFIX
175 175 + "/users/{user_id}/edit/audit/download",
176 176 "admin_settings": ADMIN_PREFIX + "/settings",
177 177 "admin_settings_update": ADMIN_PREFIX + "/settings/update",
178 178 "admin_settings_global": ADMIN_PREFIX + "/settings/global",
179 179 "admin_settings_global_update": ADMIN_PREFIX + "/settings/global/update",
180 180 "admin_settings_vcs": ADMIN_PREFIX + "/settings/vcs",
181 181 "admin_settings_vcs_update": ADMIN_PREFIX + "/settings/vcs/update",
182 182 "admin_settings_vcs_svn_pattern_delete": ADMIN_PREFIX
183 183 + "/settings/vcs/svn_pattern_delete",
184 184 "admin_settings_mapping": ADMIN_PREFIX + "/settings/mapping",
185 185 "admin_settings_mapping_update": ADMIN_PREFIX + "/settings/mapping/update",
186 186 "admin_settings_visual": ADMIN_PREFIX + "/settings/visual",
187 187 "admin_settings_visual_update": ADMIN_PREFIX + "/settings/visual/update",
188 188 "admin_settings_issuetracker": ADMIN_PREFIX + "/settings/issue-tracker",
189 189 "admin_settings_issuetracker_update": ADMIN_PREFIX
190 190 + "/settings/issue-tracker/update",
191 191 "admin_settings_issuetracker_test": ADMIN_PREFIX
192 192 + "/settings/issue-tracker/test",
193 193 "admin_settings_issuetracker_delete": ADMIN_PREFIX
194 194 + "/settings/issue-tracker/delete",
195 195 "admin_settings_email": ADMIN_PREFIX + "/settings/email",
196 196 "admin_settings_email_update": ADMIN_PREFIX + "/settings/email/update",
197 197 "admin_settings_hooks": ADMIN_PREFIX + "/settings/hooks",
198 198 "admin_settings_hooks_update": ADMIN_PREFIX + "/settings/hooks/update",
199 199 "admin_settings_hooks_delete": ADMIN_PREFIX + "/settings/hooks/delete",
200 200 "admin_settings_search": ADMIN_PREFIX + "/settings/search",
201 201 "admin_settings_labs": ADMIN_PREFIX + "/settings/labs",
202 202 "admin_settings_labs_update": ADMIN_PREFIX + "/settings/labs/update",
203 203 "admin_settings_sessions": ADMIN_PREFIX + "/settings/sessions",
204 204 "admin_settings_sessions_cleanup": ADMIN_PREFIX + "/settings/sessions/cleanup",
205 205 "admin_settings_system": ADMIN_PREFIX + "/settings/system",
206 206 "admin_settings_system_update": ADMIN_PREFIX + "/settings/system/updates",
207 207 "admin_settings_open_source": ADMIN_PREFIX + "/settings/open_source",
208 208 "repo_group_new": ADMIN_PREFIX + "/repo_group/new",
209 209 "repo_group_create": ADMIN_PREFIX + "/repo_group/create",
210 210 "repo_new": ADMIN_PREFIX + "/repos/new",
211 211 "repo_create": ADMIN_PREFIX + "/repos/create",
212 212 "admin_permissions_global": ADMIN_PREFIX + "/permissions/global",
213 213 "admin_permissions_global_update": ADMIN_PREFIX + "/permissions/global/update",
214 214 "admin_permissions_object": ADMIN_PREFIX + "/permissions/object",
215 215 "admin_permissions_object_update": ADMIN_PREFIX + "/permissions/object/update",
216 216 "admin_permissions_ips": ADMIN_PREFIX + "/permissions/ips",
217 217 "admin_permissions_overview": ADMIN_PREFIX + "/permissions/overview",
218 218 "admin_permissions_ssh_keys": ADMIN_PREFIX + "/permissions/ssh_keys",
219 219 "admin_permissions_ssh_keys_data": ADMIN_PREFIX + "/permissions/ssh_keys/data",
220 220 "admin_permissions_ssh_keys_update": ADMIN_PREFIX
221 221 + "/permissions/ssh_keys/update",
222 222 "pullrequest_show": "/{repo_name}/pull-request/{pull_request_id}",
223 223 "pull_requests_global": ADMIN_PREFIX + "/pull-request/{pull_request_id}",
224 224 "pull_requests_global_0": ADMIN_PREFIX + "/pull_requests/{pull_request_id}",
225 225 "pull_requests_global_1": ADMIN_PREFIX + "/pull-requests/{pull_request_id}",
226 226 "notifications_show_all": ADMIN_PREFIX + "/notifications",
227 227 "notifications_mark_all_read": ADMIN_PREFIX + "/notifications_mark_all_read",
228 228 "notifications_show": ADMIN_PREFIX + "/notifications/{notification_id}",
229 229 "notifications_update": ADMIN_PREFIX
230 230 + "/notifications/{notification_id}/update",
231 231 "notifications_delete": ADMIN_PREFIX
232 232 + "/notifications/{notification_id}/delete",
233 233 "my_account": ADMIN_PREFIX + "/my_account/profile",
234 234 "my_account_edit": ADMIN_PREFIX + "/my_account/edit",
235 235 "my_account_update": ADMIN_PREFIX + "/my_account/update",
236 236 "my_account_pullrequests": ADMIN_PREFIX + "/my_account/pull_requests",
237 237 "my_account_pullrequests_data": ADMIN_PREFIX + "/my_account/pull_requests/data",
238 238 "my_account_emails": ADMIN_PREFIX + "/my_account/emails",
239 239 "my_account_emails_add": ADMIN_PREFIX + "/my_account/emails/new",
240 240 "my_account_emails_delete": ADMIN_PREFIX + "/my_account/emails/delete",
241 241 "my_account_password": ADMIN_PREFIX + "/my_account/password",
242 242 "my_account_password_update": ADMIN_PREFIX + "/my_account/password/update",
243 243 "my_account_repos": ADMIN_PREFIX + "/my_account/repos",
244 244 "my_account_watched": ADMIN_PREFIX + "/my_account/watched",
245 245 "my_account_perms": ADMIN_PREFIX + "/my_account/perms",
246 246 "my_account_notifications": ADMIN_PREFIX + "/my_account/notifications",
247 247 "my_account_ssh_keys": ADMIN_PREFIX + "/my_account/ssh_keys",
248 248 "my_account_ssh_keys_generate": ADMIN_PREFIX + "/my_account/ssh_keys/generate",
249 249 "my_account_ssh_keys_add": ADMIN_PREFIX + "/my_account/ssh_keys/new",
250 250 "my_account_ssh_keys_delete": ADMIN_PREFIX + "/my_account/ssh_keys/delete",
251 251 "pullrequest_show_all": "/{repo_name}/pull-request",
252 252 "pullrequest_show_all_data": "/{repo_name}/pull-request-data",
253 253 "bookmarks_home": "/{repo_name}/bookmarks",
254 254 "branches_home": "/{repo_name}/branches",
255 "branch_remove": "/{repo_name}/{branch_name}/remove",
255 256 "tags_home": "/{repo_name}/tags",
256 257 "repo_changelog": "/{repo_name}/changelog",
257 258 "repo_commits": "/{repo_name}/commits",
258 259 "repo_commits_file": "/{repo_name}/commits/{commit_id}/{f_path}",
259 260 "repo_commits_elements": "/{repo_name}/commits_elements",
260 261 "repo_commit": "/{repo_name}/changeset/{commit_id}",
261 262 "repo_commit_comment_create": "/{repo_name}/changeset/{commit_id}/comment/create",
262 263 "repo_commit_comment_preview": "/{repo_name}/changeset/{commit_id}/comment/preview",
263 264 "repo_commit_comment_delete": "/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete",
264 265 "repo_commit_comment_edit": "/{repo_name}/changeset/{commit_id}/comment/{comment_id}/edit",
265 266 "repo_commit_children": "/{repo_name}/changeset_children/{commit_id}",
266 267 "repo_commit_parents": "/{repo_name}/changeset_parents/{commit_id}",
267 268 "repo_commit_patch": "/{repo_name}/changeset-patch/{commit_id}",
268 269 "repo_commit_download": "/{repo_name}/changeset-download/{commit_id}",
269 270 "repo_commit_data": "/{repo_name}/changeset-data/{commit_id}",
270 271 "repo_compare": "/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}",
271 272 "repo_compare_select": "/{repo_name}/compare",
272 273 "rss_feed_home": "/{repo_name}/feed-rss",
273 274 "atom_feed_home": "/{repo_name}/feed-atom",
274 275 "rss_feed_home_old": "/{repo_name}/feed/rss",
275 276 "atom_feed_home_old": "/{repo_name}/feed/atom",
276 277 "repo_fork_new": "/{repo_name}/fork",
277 278 "repo_fork_create": "/{repo_name}/fork/create",
278 279 "repo_forks_show_all": "/{repo_name}/forks",
279 280 "repo_forks_data": "/{repo_name}/forks/data",
280 281 "edit_repo_issuetracker": "/{repo_name}/settings/issue_trackers",
281 282 "edit_repo_issuetracker_test": "/{repo_name}/settings/issue_trackers/test",
282 283 "edit_repo_issuetracker_delete": "/{repo_name}/settings/issue_trackers/delete",
283 284 "edit_repo_issuetracker_update": "/{repo_name}/settings/issue_trackers/update",
284 285 "edit_repo_maintenance": "/{repo_name}/settings/maintenance",
285 286 "edit_repo_maintenance_execute": "/{repo_name}/settings/maintenance/execute",
286 287 "repo_changelog_file": "/{repo_name}/changelog/{commit_id}/{f_path}",
287 288 "pullrequest_repo_refs": "/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}",
288 289 "pullrequest_repo_targets": "/{repo_name}/pull-request/repo-destinations",
289 290 "pullrequest_new": "/{repo_name}/pull-request/new",
290 291 "pullrequest_create": "/{repo_name}/pull-request/create",
291 292 "pullrequest_update": "/{repo_name}/pull-request/{pull_request_id}/update",
292 293 "pullrequest_merge": "/{repo_name}/pull-request/{pull_request_id}/merge",
293 294 "pullrequest_delete": "/{repo_name}/pull-request/{pull_request_id}/delete",
294 295 "pullrequest_comment_create": "/{repo_name}/pull-request/{pull_request_id}/comment",
295 296 "pullrequest_comment_delete": "/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete",
296 297 "pullrequest_comment_edit": "/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit",
297 298 "edit_repo_caches": "/{repo_name}/settings/caches",
298 299 "edit_repo_perms": "/{repo_name}/settings/permissions",
299 300 "edit_repo_fields": "/{repo_name}/settings/fields",
300 301 "edit_repo_remote": "/{repo_name}/settings/remote",
301 302 "edit_repo_statistics": "/{repo_name}/settings/statistics",
302 303 "edit_repo_advanced": "/{repo_name}/settings/advanced",
303 304 "edit_repo_advanced_delete": "/{repo_name}/settings/advanced/delete",
304 305 "edit_repo_advanced_archive": "/{repo_name}/settings/advanced/archive",
305 306 "edit_repo_advanced_fork": "/{repo_name}/settings/advanced/fork",
306 307 "edit_repo_advanced_locking": "/{repo_name}/settings/advanced/locking",
307 308 "edit_repo_advanced_journal": "/{repo_name}/settings/advanced/journal",
308 309 "repo_stats": "/{repo_name}/repo_stats/{commit_id}",
309 310 "repo_refs_data": "/{repo_name}/refs-data",
310 311 "repo_refs_changelog_data": "/{repo_name}/refs-data-changelog",
311 312 "repo_artifacts_stream_store": "/_file_store/stream-upload",
312 313 }
313 314
314 315
315 316 def route_path(name, params=None, **kwargs):
316 317 import urllib.parse
317 318
318 319 base_url = get_url_defs()[name].format(**kwargs)
319 320
320 321 if params:
321 322 base_url = f"{base_url}?{urllib.parse.urlencode(params)}"
322 323 return base_url
General Comments 0
You need to be logged in to leave comments. Login now