__init__.py
985 lines
| 33.2 KiB
| text/x-python
|
PythonLexer
r5608 | # Copyright (C) 2016-2024 RhodeCode GmbH | |||
r1502 | # | |||
# This program is free software: you can redistribute it and/or modify | ||||
# it under the terms of the GNU Affero General Public License, version 3 | ||||
# (only), as published by the Free Software Foundation. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU Affero General Public License | ||||
# along with this program. If not, see <http://www.gnu.org/licenses/>. | ||||
# | ||||
# This program is dual-licensed. If you wish to learn more about the | ||||
# RhodeCode Enterprise Edition, including its added features, Support services, | ||||
# and proprietary license terms, please see https://rhodecode.com/licenses/ | ||||
r1539 | import time | |||
r1502 | import logging | |||
r2040 | import operator | |||
r1895 | ||||
r3036 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest | |||
r1502 | ||||
r3924 | from rhodecode.lib import helpers as h, diffs, rc_cache | |||
r5086 | from rhodecode.lib.str_utils import safe_str | |||
r4841 | from rhodecode.lib.utils import repo_name_slug | |||
r3104 | from rhodecode.lib.utils2 import ( | |||
r5192 | StrictAttributeDict, | |||
str2bool, | ||||
safe_int, | ||||
datetime_to_time, | ||||
) | ||||
r3924 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links | |||
from rhodecode.lib.vcs.backends.base import EmptyCommit | ||||
r1714 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError | |||
r1554 | from rhodecode.model import repo | |||
r1774 | from rhodecode.model import repo_group | |||
r2068 | from rhodecode.model import user_group | |||
r2114 | from rhodecode.model import user | |||
r1539 | from rhodecode.model.db import User | |||
r1554 | from rhodecode.model.scm import ScmModel | |||
r4201 | from rhodecode.model.settings import VcsSettingsModel, IssueTrackerSettingsModel | |||
r3924 | from rhodecode.model.repo import ReadmeFinder | |||
r1502 | ||||
log = logging.getLogger(__name__) | ||||
r5192 | ADMIN_PREFIX: str = "/_admin" | |||
STATIC_FILE_PREFIX: str = "/_static" | ||||
r1505 | ||||
r1928 | URL_NAME_REQUIREMENTS = { | |||
# group name can have a slash in them, but they must not end with a slash | ||||
r5192 | "group_name": r".*?[^/]", | |||
"repo_group_name": r".*?[^/]", | ||||
r1928 | # repo names can have a slash in them, but they must not end with a slash | |||
r5192 | "repo_name": r".*?[^/]", | |||
r1928 | # file path eats up everything at the end | |||
r5192 | "f_path": r".*", | |||
r1928 | # reference types | |||
r5192 | "source_ref_type": r"(branch|book|tag|rev|\%\(source_ref_type\)s)", | |||
"target_ref_type": r"(branch|book|tag|rev|\%\(target_ref_type\)s)", | ||||
r1928 | } | |||
r1505 | ||||
r5192 | def add_route_with_slash(config, name, pattern, **kw): | |||
r1774 | config.add_route(name, pattern, **kw) | |||
r5192 | if not pattern.endswith("/"): | |||
config.add_route(name + "_slash", pattern + "/", **kw) | ||||
r1774 | ||||
r3091 | def add_route_requirements(route_path, requirements=None): | |||
r1928 | """ | |||
Adds regex requirements to pyramid routes using a mapping dict | ||||
e.g:: | ||||
add_route_requirements('{repo_name}/settings') | ||||
""" | ||||
r3091 | requirements = requirements or URL_NAME_REQUIREMENTS | |||
r5086 | for key, regex in list(requirements.items()): | |||
r5192 | route_path = route_path.replace("{%s}" % key, "{%s:%s}" % (key, regex)) | |||
r1928 | return route_path | |||
r1746 | def get_format_ref_id(repo): | |||
"""Returns a `repo` specific reference formatter function""" | ||||
if h.is_svn(repo): | ||||
return _format_ref_id_svn | ||||
else: | ||||
return _format_ref_id | ||||
def _format_ref_id(name, raw_id): | ||||
"""Default formatting of a given reference `name`""" | ||||
return name | ||||
def _format_ref_id_svn(name, raw_id): | ||||
"""Special way of formatting a reference for Subversion including path""" | ||||
r5192 | return f"{name}@{raw_id}" | |||
r1746 | ||||
r1502 | class TemplateArgs(StrictAttributeDict): | |||
pass | ||||
class BaseAppView(object): | ||||
r5360 | DONT_CHECKOUT_VIEWS = ["channelstream_connect", "ops_ping"] | |||
EXTRA_VIEWS_TO_IGNORE = ['login', 'register', 'logout'] | ||||
SETUP_2FA_VIEW = 'setup_2fa' | ||||
VERIFY_2FA_VIEW = 'check_2fa' | ||||
r1502 | def __init__(self, context, request): | |||
self.request = request | ||||
self.context = context | ||||
self.session = request.session | ||||
r5192 | if not hasattr(request, "user"): | |||
r3036 | # NOTE(marcink): edge case, we ended up in matched route | |||
# but probably of web-app context, e.g API CALL/VCS CALL | ||||
r5192 | if hasattr(request, "vcs_call") or hasattr(request, "rpc_method"): | |||
log.warning("Unable to process request `%s` in this scope", request) | ||||
r3036 | raise HTTPBadRequest() | |||
r1534 | self._rhodecode_user = request.user # auth user | |||
r1537 | self._rhodecode_db_user = self._rhodecode_user.get_instance() | |||
r5360 | self.user_data = self._rhodecode_db_user.user_data if self._rhodecode_db_user else {} | |||
r1539 | self._maybe_needs_password_change( | |||
r5192 | request.matched_route.name, self._rhodecode_db_user | |||
) | ||||
r5360 | self._maybe_needs_2fa_configuration( | |||
request.matched_route.name, self._rhodecode_db_user | ||||
) | ||||
self._maybe_needs_2fa_check( | ||||
request.matched_route.name, self._rhodecode_db_user | ||||
) | ||||
r1539 | ||||
def _maybe_needs_password_change(self, view_name, user_obj): | ||||
r5360 | if view_name in self.DONT_CHECKOUT_VIEWS: | |||
r4700 | return | |||
r5192 | log.debug( | |||
"Checking if user %s needs password change on view %s", user_obj, view_name | ||||
) | ||||
r4700 | ||||
r1539 | skip_user_views = [ | |||
r5192 | "logout", | |||
"login", | ||||
r5370 | "check_2fa", | |||
r5192 | "my_account_password", | |||
"my_account_password_update", | ||||
r1539 | ] | |||
if not user_obj: | ||||
return | ||||
if user_obj.username == User.DEFAULT_USER: | ||||
return | ||||
now = time.time() | ||||
r5360 | should_change = self.user_data.get("force_password_change") | |||
r1539 | change_after = safe_int(should_change) or 0 | |||
if should_change and now > change_after: | ||||
r5192 | log.debug("User %s requires password change", user_obj) | |||
h.flash( | ||||
"You are required to change your password", | ||||
"warning", | ||||
ignore_duplicate=True, | ||||
) | ||||
r1539 | ||||
if view_name not in skip_user_views: | ||||
r5192 | raise HTTPFound(self.request.route_path("my_account_password")) | |||
r1502 | ||||
r5360 | def _maybe_needs_2fa_configuration(self, view_name, user_obj): | |||
if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE: | ||||
return | ||||
if not user_obj: | ||||
return | ||||
r5367 | if user_obj.needs_2fa_configure and view_name != self.SETUP_2FA_VIEW: | |||
r5360 | h.flash( | |||
"You are required to configure 2FA", | ||||
"warning", | ||||
ignore_duplicate=False, | ||||
) | ||||
r5398 | # Special case for users created "on the fly" (ldap case for new user) | |||
user_obj.check_2fa_required = False | ||||
r5360 | raise HTTPFound(self.request.route_path(self.SETUP_2FA_VIEW)) | |||
def _maybe_needs_2fa_check(self, view_name, user_obj): | ||||
if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE: | ||||
return | ||||
if not user_obj: | ||||
return | ||||
r5374 | if user_obj.check_2fa_required and view_name != self.VERIFY_2FA_VIEW: | |||
r5360 | raise HTTPFound(self.request.route_path(self.VERIFY_2FA_VIEW)) | |||
r1984 | def _log_creation_exception(self, e, repo_name): | |||
_ = self.request.translate | ||||
reason = None | ||||
if len(e.args) == 2: | ||||
reason = e.args[1] | ||||
r5192 | if reason == "INVALID_CERTIFICATE": | |||
log.exception("Exception creating a repository: invalid certificate") | ||||
msg = _("Error creating repository %s: invalid certificate") % repo_name | ||||
r1984 | else: | |||
log.exception("Exception creating a repository") | ||||
r5192 | msg = _("Error creating repository %s") % repo_name | |||
r1984 | return msg | |||
r2332 | def _get_local_tmpl_context(self, include_app_defaults=True): | |||
r1533 | c = TemplateArgs() | |||
c.auth_user = self.request.user | ||||
r1908 | # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user | |||
c.rhodecode_user = self.request.user | ||||
r1785 | if include_app_defaults: | |||
from rhodecode.lib.base import attach_context_attributes | ||||
r5192 | ||||
r1785 | attach_context_attributes(c, self.request, self.request.user.user_id) | |||
r1908 | ||||
r3587 | c.is_super_admin = c.auth_user.is_admin | |||
c.can_create_repo = c.is_super_admin | ||||
c.can_create_repo_group = c.is_super_admin | ||||
c.can_create_user_group = c.is_super_admin | ||||
c.is_delegated_admin = False | ||||
r3588 | if not c.auth_user.is_default and not c.is_super_admin: | |||
r5192 | c.can_create_repo = h.HasPermissionAny("hg.create.repository")( | |||
user=self.request.user | ||||
) | ||||
r3587 | repositories = c.auth_user.repositories_admin or c.can_create_repo | |||
r5192 | c.can_create_repo_group = h.HasPermissionAny("hg.repogroup.create.true")( | |||
user=self.request.user | ||||
) | ||||
repository_groups = ( | ||||
c.auth_user.repository_groups_admin or c.can_create_repo_group | ||||
) | ||||
r3587 | ||||
r5192 | c.can_create_user_group = h.HasPermissionAny("hg.usergroup.create.true")( | |||
user=self.request.user | ||||
) | ||||
r3587 | user_groups = c.auth_user.user_groups_admin or c.can_create_user_group | |||
# delegated admin can create, or manage some objects | ||||
c.is_delegated_admin = repositories or repository_groups or user_groups | ||||
r1533 | return c | |||
r1502 | ||||
r2351 | def _get_template_context(self, tmpl_args, **kwargs): | |||
r5192 | local_tmpl_args = {"defaults": {}, "errors": {}, "c": tmpl_args} | |||
r2351 | local_tmpl_args.update(kwargs) | |||
r1536 | return local_tmpl_args | |||
r1502 | ||||
r1534 | def load_default_context(self): | |||
""" | ||||
example: | ||||
def load_default_context(self): | ||||
c = self._get_local_tmpl_context() | ||||
c.custom_var = 'foobar' | ||||
r2351 | ||||
r1534 | return c | |||
""" | ||||
r5192 | raise NotImplementedError("Needs implementation in view class") | |||
r1534 | ||||
r1554 | ||||
class RepoAppView(BaseAppView): | ||||
def __init__(self, context, request): | ||||
r5093 | super().__init__(context, request) | |||
r1554 | self.db_repo = request.db_repo | |||
self.db_repo_name = self.db_repo.repo_name | ||||
self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo) | ||||
r3984 | self.db_repo_artifacts = ScmModel().get_artifacts(self.db_repo) | |||
r4201 | self.db_repo_patterns = IssueTrackerSettingsModel(repo=self.db_repo) | |||
r1554 | ||||
r1714 | def _handle_missing_requirements(self, error): | |||
log.error( | ||||
r5192 | "Requirements are missing for repository %s: %s", | |||
self.db_repo_name, | ||||
safe_str(error), | ||||
) | ||||
r1714 | ||||
r4977 | def _prepare_and_set_clone_url(self, c): | |||
r5192 | username = "" | |||
r4977 | if self._rhodecode_user.username != User.DEFAULT_USER: | |||
username = self._rhodecode_user.username | ||||
_def_clone_uri = c.clone_uri_tmpl | ||||
_def_clone_uri_id = c.clone_uri_id_tmpl | ||||
_def_clone_uri_ssh = c.clone_uri_ssh_tmpl | ||||
c.clone_repo_url = self.db_repo.clone_url( | ||||
r5192 | user=username, uri_tmpl=_def_clone_uri | |||
) | ||||
r4977 | c.clone_repo_url_id = self.db_repo.clone_url( | |||
r5192 | user=username, uri_tmpl=_def_clone_uri_id | |||
) | ||||
r4977 | c.clone_repo_url_ssh = self.db_repo.clone_url( | |||
r5192 | uri_tmpl=_def_clone_uri_ssh, ssh=True | |||
) | ||||
r4977 | ||||
r2351 | def _get_local_tmpl_context(self, include_app_defaults=True): | |||
r1984 | _ = self.request.translate | |||
r5192 | c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults) | |||
r1785 | ||||
r1554 | # register common vars for this type of view | |||
c.rhodecode_db_repo = self.db_repo | ||||
c.repo_name = self.db_repo_name | ||||
c.repository_pull_requests = self.db_repo_pull_requests | ||||
r3984 | c.repository_artifacts = self.db_repo_artifacts | |||
r3670 | c.repository_is_user_following = ScmModel().is_following_repo( | |||
r5192 | self.db_repo_name, self._rhodecode_user.user_id | |||
) | ||||
r2624 | self.path_filter = PathFilter(None) | |||
r1714 | ||||
r2625 | c.repository_requirements_missing = {} | |||
r1714 | try: | |||
self.rhodecode_vcs_repo = self.db_repo.scm_instance() | ||||
r3723 | # NOTE(marcink): | |||
# comparison to None since if it's an object __bool__ is expensive to | ||||
# calculate | ||||
if self.rhodecode_vcs_repo is not None: | ||||
r2623 | path_perms = self.rhodecode_vcs_repo.get_path_permissions( | |||
r5192 | c.auth_user.username | |||
) | ||||
r2623 | self.path_filter = PathFilter(path_perms) | |||
r1714 | except RepositoryRequirementError as e: | |||
r5192 | c.repository_requirements_missing = {"error": str(e)} | |||
r1714 | self._handle_missing_requirements(e) | |||
r1984 | self.rhodecode_vcs_repo = None | |||
r2618 | ||||
r2623 | c.path_filter = self.path_filter # used by atom_feed_entry.mako | |||
r1984 | ||||
r2625 | if self.rhodecode_vcs_repo is None: | |||
r1984 | # unable to fetch this repo as vcs instance, report back to user | |||
r5192 | log.debug( | |||
"Repository was not found on filesystem, check if it exists or is not damaged" | ||||
) | ||||
h.flash( | ||||
_( | ||||
"The repository `%(repo_name)s` cannot be loaded in filesystem. " | ||||
"Please check if it exist, or is not damaged." | ||||
) | ||||
% {"repo_name": c.repo_name}, | ||||
category="error", | ||||
ignore_duplicate=True, | ||||
) | ||||
r2625 | if c.repository_requirements_missing: | |||
route = self.request.matched_route.name | ||||
r5192 | if route.startswith(("edit_repo", "repo_summary")): | |||
r2625 | # allow summary and edit repo on missing requirements | |||
return c | ||||
raise HTTPFound( | ||||
r5192 | h.route_path("repo_summary", repo_name=self.db_repo_name) | |||
) | ||||
r2625 | ||||
else: # redirect if we don't show missing requirements | ||||
r5192 | raise HTTPFound(h.route_path("home")) | |||
r1714 | ||||
r3367 | c.has_origin_repo_read_perm = False | |||
if self.db_repo.fork: | ||||
c.has_origin_repo_read_perm = h.HasRepoPermissionAny( | ||||
r5192 | "repository.write", "repository.read", "repository.admin" | |||
)(self.db_repo.fork.repo_name, "summary fork link") | ||||
r3367 | ||||
r1554 | return c | |||
r2620 | def _get_f_path_unchecked(self, matchdict, default=None): | |||
""" | ||||
Should only be used by redirects, everything else should call _get_f_path | ||||
""" | ||||
r5192 | f_path = matchdict.get("f_path") | |||
r1929 | if f_path: | |||
# fix for multiple initial slashes that causes errors for GIT | ||||
r5192 | return f_path.lstrip("/") | |||
r2618 | ||||
r2620 | return default | |||
def _get_f_path(self, matchdict, default=None): | ||||
r2623 | f_path_match = self._get_f_path_unchecked(matchdict, default) | |||
return self.path_filter.assert_path_permissions(f_path_match) | ||||
r2618 | ||||
Bartłomiej Wołyńczyk
|
r2685 | def _get_general_setting(self, target_repo, settings_key, default=False): | ||
settings_model = VcsSettingsModel(repo=target_repo) | ||||
settings = settings_model.get_general_settings() | ||||
return settings.get(settings_key, default) | ||||
r3894 | def _get_repo_setting(self, target_repo, settings_key, default=False): | |||
settings_model = VcsSettingsModel(repo=target_repo) | ||||
settings = settings_model.get_repo_settings_inherited() | ||||
return settings.get(settings_key, default) | ||||
r5651 | def _get_readme_data(self, db_repo, renderer_type, commit_id=None, path="/", nodes=None): | |||
r5192 | log.debug("Looking for README file at path %s", path) | |||
r3924 | if commit_id: | |||
landing_commit_id = commit_id | ||||
else: | ||||
landing_commit = db_repo.get_landing_commit() | ||||
if isinstance(landing_commit, EmptyCommit): | ||||
return None, None | ||||
landing_commit_id = landing_commit.raw_id | ||||
r5192 | cache_namespace_uid = f"repo.{db_repo.repo_id}" | |||
region = rc_cache.get_or_create_region( | ||||
"cache_repo", cache_namespace_uid, use_async_runner=False | ||||
) | ||||
r3924 | start = time.time() | |||
@region.conditional_cache_on_arguments(namespace=cache_namespace_uid) | ||||
r5192 | def generate_repo_readme( | |||
r5651 | _repo_id, _commit_id, _repo_name, _readme_search_path, _renderer_type | |||
r5192 | ): | |||
r5651 | _readme_data = None | |||
_readme_filename = None | ||||
r3924 | ||||
commit = db_repo.get_commit(_commit_id) | ||||
log.debug("Searching for a README file at commit %s.", _commit_id) | ||||
r5651 | readme_node = ReadmeFinder(_renderer_type).search(commit, path=_readme_search_path, nodes=nodes) | |||
r3924 | ||||
if readme_node: | ||||
r5192 | log.debug("Found README node: %s", readme_node) | |||
r3924 | relative_urls = { | |||
r5192 | "raw": h.route_path( | |||
"repo_file_raw", | ||||
repo_name=_repo_name, | ||||
commit_id=commit.raw_id, | ||||
f_path=readme_node.path, | ||||
), | ||||
"standard": h.route_path( | ||||
"repo_files", | ||||
repo_name=_repo_name, | ||||
commit_id=commit.raw_id, | ||||
f_path=readme_node.path, | ||||
), | ||||
r3924 | } | |||
r4977 | ||||
r5651 | _readme_data = self._render_readme_or_none( | |||
r5192 | commit, readme_node, relative_urls | |||
) | ||||
r5651 | _readme_filename = readme_node.str_path | |||
r3924 | ||||
r5651 | return _readme_data, _readme_filename | |||
r3924 | ||||
readme_data, readme_filename = generate_repo_readme( | ||||
r5192 | db_repo.repo_id, | |||
landing_commit_id, | ||||
db_repo.repo_name, | ||||
path, | ||||
r5651 | renderer_type | |||
r5192 | ) | |||
r4977 | ||||
r3924 | compute_time = time.time() - start | |||
r5192 | log.debug( | |||
"Repo README for path %s generated and computed in %.4fs", | ||||
path, | ||||
compute_time, | ||||
) | ||||
r3924 | return readme_data, readme_filename | |||
def _render_readme_or_none(self, commit, readme_node, relative_urls): | ||||
r5192 | log.debug("Found README file `%s` rendering...", readme_node.path) | |||
r3924 | renderer = MarkupRenderer() | |||
try: | ||||
html_source = renderer.render( | ||||
r5192 | readme_node.str_content, filename=readme_node.path | |||
) | ||||
r3924 | if relative_urls: | |||
return relative_links(html_source, relative_urls) | ||||
return html_source | ||||
except Exception: | ||||
r5086 | log.exception("Exception while trying to render the README") | |||
r3924 | ||||
r3469 | def get_recache_flag(self): | |||
r5192 | for flag_name in ["force_recache", "force-recache", "no-cache"]: | |||
r3469 | flag_val = self.request.GET.get(flag_name) | |||
if str2bool(flag_val): | ||||
return True | ||||
return False | ||||
r4750 | def get_commit_preload_attrs(cls): | |||
r5192 | pre_load = [ | |||
"author", | ||||
"branch", | ||||
"date", | ||||
"message", | ||||
"parents", | ||||
"obsolete", | ||||
"phase", | ||||
"hidden", | ||||
] | ||||
r4750 | return pre_load | |||
r2618 | ||||
class PathFilter(object): | ||||
# Expects and instance of BasePathPermissionChecker or None | ||||
def __init__(self, permission_checker): | ||||
self.permission_checker = permission_checker | ||||
def assert_path_permissions(self, path): | ||||
r3817 | if self.path_access_allowed(path): | |||
return path | ||||
raise HTTPForbidden() | ||||
def path_access_allowed(self, path): | ||||
r5192 | log.debug("Checking ACL permissions for PathFilter for `%s`", path) | |||
r3817 | if self.permission_checker: | |||
r4285 | has_access = path and self.permission_checker.has_access(path) | |||
r5192 | log.debug( | |||
"ACL Permissions checker enabled, ACL Check has_access: %s", has_access | ||||
) | ||||
r4285 | return has_access | |||
r5192 | log.debug("ACL permissions checker not enabled, skipping...") | |||
r3817 | return True | |||
r1929 | ||||
r2618 | def filter_patchset(self, patchset): | |||
if not self.permission_checker or not patchset: | ||||
return patchset, False | ||||
had_filtered = False | ||||
filtered_patchset = [] | ||||
for patch in patchset: | ||||
r5192 | filename = patch.get("filename", None) | |||
r2618 | if not filename or self.permission_checker.has_access(filename): | |||
filtered_patchset.append(patch) | ||||
else: | ||||
had_filtered = True | ||||
if had_filtered: | ||||
if isinstance(patchset, diffs.LimitedDiffContainer): | ||||
r5192 | filtered_patchset = diffs.LimitedDiffContainer( | |||
patchset.diff_limit, patchset.cur_diff_size, filtered_patchset | ||||
) | ||||
r2618 | return filtered_patchset, True | |||
else: | ||||
return patchset, False | ||||
r5192 | def render_patchset_filtered( | |||
self, diffset, patchset, source_ref=None, target_ref=None | ||||
): | ||||
r2618 | filtered_patchset, has_hidden_changes = self.filter_patchset(patchset) | |||
r3124 | result = diffset.render_patchset( | |||
r5192 | filtered_patchset, source_ref=source_ref, target_ref=target_ref | |||
) | ||||
r2618 | result.has_hidden_changes = has_hidden_changes | |||
return result | ||||
def get_raw_patch(self, diff_processor): | ||||
if self.permission_checker is None: | ||||
return diff_processor.as_raw() | ||||
elif self.permission_checker.has_full_access: | ||||
return diff_processor.as_raw() | ||||
else: | ||||
r5192 | return "# Repository has user-specific filters, raw patch generation is disabled." | |||
r2618 | ||||
@property | ||||
def is_enabled(self): | ||||
return self.permission_checker is not None | ||||
r1554 | ||||
r1956 | ||||
r1989 | class RepoGroupAppView(BaseAppView): | |||
def __init__(self, context, request): | ||||
r5093 | super().__init__(context, request) | |||
r1989 | self.db_repo_group = request.db_repo_group | |||
self.db_repo_group_name = self.db_repo_group.group_name | ||||
r3442 | def _get_local_tmpl_context(self, include_app_defaults=True): | |||
_ = self.request.translate | ||||
r5192 | c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults) | |||
r3442 | c.repo_group = self.db_repo_group | |||
return c | ||||
r2175 | def _revoke_perms_on_yourself(self, form_result): | |||
r5192 | _updates = [ | |||
u | ||||
for u in form_result["perm_updates"] | ||||
if self._rhodecode_user.user_id == int(u[0]) | ||||
] | ||||
_additions = [ | ||||
u | ||||
for u in form_result["perm_additions"] | ||||
if self._rhodecode_user.user_id == int(u[0]) | ||||
] | ||||
_deletions = [ | ||||
u | ||||
for u in form_result["perm_deletions"] | ||||
if self._rhodecode_user.user_id == int(u[0]) | ||||
] | ||||
admin_perm = "group.admin" | ||||
if ( | ||||
_updates | ||||
and _updates[0][1] != admin_perm | ||||
or _additions | ||||
and _additions[0][1] != admin_perm | ||||
or _deletions | ||||
and _deletions[0][1] != admin_perm | ||||
): | ||||
r2175 | return True | |||
return False | ||||
r1989 | ||||
r2068 | class UserGroupAppView(BaseAppView): | |||
def __init__(self, context, request): | ||||
r5093 | super().__init__(context, request) | |||
r2068 | self.db_user_group = request.db_user_group | |||
self.db_user_group_name = self.db_user_group.users_group_name | ||||
r2114 | class UserAppView(BaseAppView): | |||
def __init__(self, context, request): | ||||
r5093 | super().__init__(context, request) | |||
r2114 | self.db_user = request.db_user | |||
self.db_user_id = self.db_user.user_id | ||||
_ = self.request.translate | ||||
if not request.db_user_supports_default: | ||||
if self.db_user.username == User.DEFAULT_USER: | ||||
r5192 | h.flash( | |||
_("Editing user `{}` is disabled.".format(User.DEFAULT_USER)), | ||||
category="warning", | ||||
) | ||||
raise HTTPFound(h.route_path("users")) | ||||
r2114 | ||||
r1646 | class DataGridAppView(object): | |||
""" | ||||
Common class to have re-usable grid rendering components | ||||
""" | ||||
r1649 | def _extract_ordering(self, request, column_map=None): | |||
column_map = column_map or {} | ||||
r5192 | column_index = safe_int(request.GET.get("order[0][column]")) | |||
order_dir = request.GET.get("order[0][dir]", "desc") | ||||
order_by = request.GET.get("columns[%s][data][sort]" % column_index, "name_raw") | ||||
r1646 | ||||
# translate datatable to DB columns | ||||
r1649 | order_by = column_map.get(order_by) or order_by | |||
r1646 | ||||
r5192 | search_q = request.GET.get("search[value]") | |||
r1646 | return search_q, order_by, order_dir | |||
def _extract_chunk(self, request): | ||||
r5192 | start = safe_int(request.GET.get("start"), 0) | |||
length = safe_int(request.GET.get("length"), 25) | ||||
draw = safe_int(request.GET.get("draw")) | ||||
r1646 | return draw, start, length | |||
r2040 | def _get_order_col(self, order_by, model): | |||
r4908 | if isinstance(order_by, str): | |||
r2040 | try: | |||
return operator.attrgetter(order_by)(model) | ||||
except AttributeError: | ||||
return None | ||||
else: | ||||
return order_by | ||||
r1646 | ||||
r1746 | class BaseReferencesView(RepoAppView): | |||
""" | ||||
Base for reference view for branches, tags and bookmarks. | ||||
""" | ||||
r5192 | ||||
r1746 | def load_default_context(self): | |||
c = self._get_local_tmpl_context() | ||||
return c | ||||
def load_refs_context(self, ref_items, partials_template): | ||||
r1897 | _render = self.request.get_partial_renderer(partials_template) | |||
r3850 | pre_load = ["author", "date", "message", "parents"] | |||
r1746 | ||||
is_svn = h.is_svn(self.rhodecode_vcs_repo) | ||||
r1898 | is_hg = h.is_hg(self.rhodecode_vcs_repo) | |||
r1746 | format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo) | |||
r1898 | closed_refs = {} | |||
if is_hg: | ||||
closed_refs = self.rhodecode_vcs_repo.branches_closed | ||||
data = [] | ||||
r1746 | for ref_name, commit_id in ref_items: | |||
commit = self.rhodecode_vcs_repo.get_commit( | ||||
r5192 | commit_id=commit_id, pre_load=pre_load | |||
) | ||||
r1898 | closed = ref_name in closed_refs | |||
r1746 | ||||
# TODO: johbo: Unify generation of reference links | ||||
r5192 | use_commit_id = "/" in ref_name or is_svn | |||
r1927 | ||||
if use_commit_id: | ||||
files_url = h.route_path( | ||||
r5192 | "repo_files", | |||
r1927 | repo_name=self.db_repo_name, | |||
r5192 | f_path=ref_name if is_svn else "", | |||
r4373 | commit_id=commit_id, | |||
r5192 | _query=dict(at=ref_name), | |||
r4373 | ) | |||
r1927 | ||||
else: | ||||
files_url = h.route_path( | ||||
r5192 | "repo_files", | |||
r1927 | repo_name=self.db_repo_name, | |||
r5192 | f_path=ref_name if is_svn else "", | |||
r1927 | commit_id=ref_name, | |||
r5192 | _query=dict(at=ref_name), | |||
r4373 | ) | |||
r1746 | ||||
r5192 | data.append( | |||
{ | ||||
"name": _render("name", ref_name, files_url, closed), | ||||
"name_raw": ref_name, | ||||
r5428 | "closed": closed, | |||
r5192 | "date": _render("date", commit.date), | |||
"date_raw": datetime_to_time(commit.date), | ||||
"author": _render("author", commit.author), | ||||
"commit": _render( | ||||
"commit", commit.message, commit.raw_id, commit.idx | ||||
), | ||||
"commit_raw": commit.idx, | ||||
"compare": _render( | ||||
"compare", format_ref_id(ref_name, commit.raw_id) | ||||
), | ||||
} | ||||
) | ||||
r1898 | ||||
return data | ||||
r1746 | ||||
r1554 | class RepoRoutePredicate(object): | |||
def __init__(self, val, config): | ||||
self.val = val | ||||
def text(self): | ||||
r5192 | return f"repo_route = {self.val}" | |||
r1554 | ||||
phash = text | ||||
def __call__(self, info, request): | ||||
r5192 | if hasattr(request, "vcs_call"): | |||
r1778 | # skip vcs calls | |||
return | ||||
r5192 | repo_name = info["match"]["repo_name"] | |||
r4843 | ||||
r5192 | repo_name_parts = repo_name.split("/") | |||
r5086 | repo_slugs = [x for x in (repo_name_slug(x) for x in repo_name_parts)] | |||
r4843 | ||||
if repo_name_parts != repo_slugs: | ||||
r4841 | # short-skip if the repo-name doesn't follow slug rule | |||
r5192 | log.warning( | |||
"repo_name: %s is different than slug %s", repo_name_parts, repo_slugs | ||||
) | ||||
r4841 | return False | |||
r1554 | repo_model = repo.RepoModel() | |||
r2893 | ||||
by_name_match = repo_model.get_by_repo_name(repo_name, cache=False) | ||||
r1774 | ||||
r2913 | def redirect_if_creating(route_info, db_repo): | |||
r5192 | skip_views = ["edit_repo_advanced_delete"] | |||
route = route_info["route"] | ||||
r2913 | # we should skip delete view so we can actually "remove" repositories | |||
# if they get stuck in creating state. | ||||
if route.name in skip_views: | ||||
return | ||||
r1985 | if db_repo.repo_state in [repo.Repository.STATE_PENDING]: | |||
r2893 | repo_creating_url = request.route_path( | |||
r5192 | "repo_creating", repo_name=db_repo.repo_name | |||
) | ||||
r2893 | raise HTTPFound(repo_creating_url) | |||
r1985 | ||||
r1554 | if by_name_match: | |||
# register this as request object we can re-use later | ||||
request.db_repo = by_name_match | ||||
r5086 | request.db_repo_name = request.db_repo.repo_name | |||
r2913 | redirect_if_creating(info, by_name_match) | |||
r1554 | return True | |||
by_id_match = repo_model.get_repo_by_id(repo_name) | ||||
if by_id_match: | ||||
request.db_repo = by_id_match | ||||
r5086 | request.db_repo_name = request.db_repo.repo_name | |||
r2913 | redirect_if_creating(info, by_id_match) | |||
r1554 | return True | |||
return False | ||||
r3090 | class RepoForbidArchivedRoutePredicate(object): | |||
def __init__(self, val, config): | ||||
self.val = val | ||||
def text(self): | ||||
r5192 | return f"repo_forbid_archived = {self.val}" | |||
r3090 | ||||
phash = text | ||||
def __call__(self, info, request): | ||||
_ = request.translate | ||||
rhodecode_db_repo = request.db_repo | ||||
log.debug( | ||||
r5192 | "%s checking if archived flag for repo for %s", | |||
self.__class__.__name__, | ||||
rhodecode_db_repo.repo_name, | ||||
) | ||||
r3090 | ||||
if rhodecode_db_repo.archived: | ||||
r5192 | log.warning( | |||
"Current view is not supported for archived repo:%s", | ||||
rhodecode_db_repo.repo_name, | ||||
) | ||||
r3090 | ||||
h.flash( | ||||
r5192 | h.literal(_("Action not supported for archived repository.")), | |||
category="warning", | ||||
) | ||||
r3090 | summary_url = request.route_path( | |||
r5192 | "repo_summary", repo_name=rhodecode_db_repo.repo_name | |||
) | ||||
r3090 | raise HTTPFound(summary_url) | |||
return True | ||||
r1766 | class RepoTypeRoutePredicate(object): | |||
def __init__(self, val, config): | ||||
r5192 | self.val = val or ["hg", "git", "svn"] | |||
r1766 | ||||
def text(self): | ||||
r5192 | return f"repo_accepted_type = {self.val}" | |||
r1766 | ||||
phash = text | ||||
def __call__(self, info, request): | ||||
r5192 | if hasattr(request, "vcs_call"): | |||
r1778 | # skip vcs calls | |||
return | ||||
r1766 | ||||
rhodecode_db_repo = request.db_repo | ||||
log.debug( | ||||
r5192 | "%s checking repo type for %s in %s", | |||
self.__class__.__name__, | ||||
rhodecode_db_repo.repo_type, | ||||
self.val, | ||||
) | ||||
r1766 | ||||
if rhodecode_db_repo.repo_type in self.val: | ||||
return True | ||||
else: | ||||
r5192 | log.warning( | |||
"Current view is not supported for repo type:%s", | ||||
rhodecode_db_repo.repo_type, | ||||
) | ||||
r1766 | return False | |||
r1774 | class RepoGroupRoutePredicate(object): | |||
def __init__(self, val, config): | ||||
self.val = val | ||||
def text(self): | ||||
r5192 | return f"repo_group_route = {self.val}" | |||
r1774 | ||||
phash = text | ||||
def __call__(self, info, request): | ||||
r5192 | if hasattr(request, "vcs_call"): | |||
r1778 | # skip vcs calls | |||
return | ||||
r5192 | repo_group_name = info["match"]["repo_group_name"] | |||
r4843 | ||||
r5192 | repo_group_name_parts = repo_group_name.split("/") | |||
repo_group_slugs = [ | ||||
x for x in [repo_name_slug(x) for x in repo_group_name_parts] | ||||
] | ||||
r4843 | if repo_group_name_parts != repo_group_slugs: | |||
# short-skip if the repo-name doesn't follow slug rule | ||||
r5192 | log.warning( | |||
"repo_group_name: %s is different than slug %s", | ||||
repo_group_name_parts, | ||||
repo_group_slugs, | ||||
) | ||||
r4841 | return False | |||
r1774 | repo_group_model = repo_group.RepoGroupModel() | |||
r2893 | by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False) | |||
r1774 | ||||
if by_name_match: | ||||
# register this as request object we can re-use later | ||||
request.db_repo_group = by_name_match | ||||
r5086 | request.db_repo_group_name = request.db_repo_group.group_name | |||
r1774 | return True | |||
return False | ||||
r1766 | ||||
r2068 | class UserGroupRoutePredicate(object): | |||
def __init__(self, val, config): | ||||
self.val = val | ||||
def text(self): | ||||
r5192 | return f"user_group_route = {self.val}" | |||
r2068 | ||||
phash = text | ||||
def __call__(self, info, request): | ||||
r5192 | if hasattr(request, "vcs_call"): | |||
r2068 | # skip vcs calls | |||
return | ||||
r5192 | user_group_id = info["match"]["user_group_id"] | |||
r2068 | user_group_model = user_group.UserGroup() | |||
r2893 | by_id_match = user_group_model.get(user_group_id, cache=False) | |||
r2068 | ||||
r2114 | if by_id_match: | |||
r2068 | # register this as request object we can re-use later | |||
r2114 | request.db_user_group = by_id_match | |||
r2068 | return True | |||
return False | ||||
r2114 | class UserRoutePredicateBase(object): | |||
supports_default = None | ||||
def __init__(self, val, config): | ||||
self.val = val | ||||
def text(self): | ||||
raise NotImplementedError() | ||||
def __call__(self, info, request): | ||||
r5192 | if hasattr(request, "vcs_call"): | |||
r2114 | # skip vcs calls | |||
return | ||||
r5192 | user_id = info["match"]["user_id"] | |||
r2114 | user_model = user.User() | |||
r2893 | by_id_match = user_model.get(user_id, cache=False) | |||
r2114 | ||||
if by_id_match: | ||||
# register this as request object we can re-use later | ||||
request.db_user = by_id_match | ||||
request.db_user_supports_default = self.supports_default | ||||
return True | ||||
return False | ||||
class UserRoutePredicate(UserRoutePredicateBase): | ||||
supports_default = False | ||||
def text(self): | ||||
r5192 | return f"user_route = {self.val}" | |||
r2114 | ||||
phash = text | ||||
class UserRouteWithDefaultPredicate(UserRoutePredicateBase): | ||||
supports_default = True | ||||
def text(self): | ||||
r5192 | return f"user_with_default_route = {self.val}" | |||
r2114 | ||||
phash = text | ||||
r1554 | def includeme(config): | |||
r5192 | config.add_route_predicate("repo_route", RepoRoutePredicate) | |||
config.add_route_predicate("repo_accepted_types", RepoTypeRoutePredicate) | ||||
r3090 | config.add_route_predicate( | |||
r5192 | "repo_forbid_when_archived", RepoForbidArchivedRoutePredicate | |||
) | ||||
config.add_route_predicate("repo_group_route", RepoGroupRoutePredicate) | ||||
config.add_route_predicate("user_group_route", UserGroupRoutePredicate) | ||||
config.add_route_predicate("user_route_with_default", UserRouteWithDefaultPredicate) | ||||
config.add_route_predicate("user_route", UserRoutePredicate) | ||||