__init__.py
858 lines
| 30.4 KiB
| text/x-python
|
PythonLexer
r5088 | # Copyright (C) 2016-2023 RhodeCode GmbH | |||
r1502 | # | |||
# This program is free software: you can redistribute it and/or modify | ||||
# it under the terms of the GNU Affero General Public License, version 3 | ||||
# (only), as published by the Free Software Foundation. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU Affero General Public License | ||||
# along with this program. If not, see <http://www.gnu.org/licenses/>. | ||||
# | ||||
# This program is dual-licensed. If you wish to learn more about the | ||||
# RhodeCode Enterprise Edition, including its added features, Support services, | ||||
# and proprietary license terms, please see https://rhodecode.com/licenses/ | ||||
r1539 | import time | |||
r1502 | import logging | |||
r2040 | import operator | |||
r1895 | ||||
r3036 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest | |||
r1502 | ||||
r3924 | from rhodecode.lib import helpers as h, diffs, rc_cache | |||
r5086 | from rhodecode.lib.str_utils import safe_str | |||
r4841 | from rhodecode.lib.utils import repo_name_slug | |||
r3104 | from rhodecode.lib.utils2 import ( | |||
r5086 | StrictAttributeDict, str2bool, safe_int, datetime_to_time) | |||
r3924 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links | |||
from rhodecode.lib.vcs.backends.base import EmptyCommit | ||||
r1714 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError | |||
r1554 | from rhodecode.model import repo | |||
r1774 | from rhodecode.model import repo_group | |||
r2068 | from rhodecode.model import user_group | |||
r2114 | from rhodecode.model import user | |||
r1539 | from rhodecode.model.db import User | |||
r1554 | from rhodecode.model.scm import ScmModel | |||
r4201 | from rhodecode.model.settings import VcsSettingsModel, IssueTrackerSettingsModel | |||
r3924 | from rhodecode.model.repo import ReadmeFinder | |||
r1502 | ||||
log = logging.getLogger(__name__) | ||||
r1505 | ADMIN_PREFIX = '/_admin' | |||
STATIC_FILE_PREFIX = '/_static' | ||||
r1928 | URL_NAME_REQUIREMENTS = { | |||
# group name can have a slash in them, but they must not end with a slash | ||||
'group_name': r'.*?[^/]', | ||||
'repo_group_name': r'.*?[^/]', | ||||
# repo names can have a slash in them, but they must not end with a slash | ||||
'repo_name': r'.*?[^/]', | ||||
# file path eats up everything at the end | ||||
'f_path': r'.*', | ||||
# reference types | ||||
r5093 | 'source_ref_type': r'(branch|book|tag|rev|\%\(source_ref_type\)s)', | |||
'target_ref_type': r'(branch|book|tag|rev|\%\(target_ref_type\)s)', | ||||
r1928 | } | |||
r1505 | ||||
r1774 | def add_route_with_slash(config,name, pattern, **kw): | |||
config.add_route(name, pattern, **kw) | ||||
if not pattern.endswith('/'): | ||||
config.add_route(name + '_slash', pattern + '/', **kw) | ||||
r3091 | def add_route_requirements(route_path, requirements=None): | |||
r1928 | """ | |||
Adds regex requirements to pyramid routes using a mapping dict | ||||
e.g:: | ||||
add_route_requirements('{repo_name}/settings') | ||||
""" | ||||
r3091 | requirements = requirements or URL_NAME_REQUIREMENTS | |||
r5086 | for key, regex in list(requirements.items()): | |||
r1928 | route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex)) | |||
return route_path | ||||
r1746 | def get_format_ref_id(repo): | |||
"""Returns a `repo` specific reference formatter function""" | ||||
if h.is_svn(repo): | ||||
return _format_ref_id_svn | ||||
else: | ||||
return _format_ref_id | ||||
def _format_ref_id(name, raw_id): | ||||
"""Default formatting of a given reference `name`""" | ||||
return name | ||||
def _format_ref_id_svn(name, raw_id): | ||||
"""Special way of formatting a reference for Subversion including path""" | ||||
r5095 | return f'{name}@{raw_id}' | |||
r1746 | ||||
r1502 | class TemplateArgs(StrictAttributeDict): | |||
pass | ||||
class BaseAppView(object): | ||||
def __init__(self, context, request): | ||||
self.request = request | ||||
self.context = context | ||||
self.session = request.session | ||||
r3036 | if not hasattr(request, 'user'): | |||
# NOTE(marcink): edge case, we ended up in matched route | ||||
# but probably of web-app context, e.g API CALL/VCS CALL | ||||
if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'): | ||||
log.warning('Unable to process request `%s` in this scope', request) | ||||
raise HTTPBadRequest() | ||||
r1534 | self._rhodecode_user = request.user # auth user | |||
r1537 | self._rhodecode_db_user = self._rhodecode_user.get_instance() | |||
r1539 | self._maybe_needs_password_change( | |||
request.matched_route.name, self._rhodecode_db_user) | ||||
def _maybe_needs_password_change(self, view_name, user_obj): | ||||
r4700 | ||||
dont_check_views = [ | ||||
r4941 | 'channelstream_connect', | |||
'ops_ping' | ||||
r4700 | ] | |||
if view_name in dont_check_views: | ||||
return | ||||
r1539 | log.debug('Checking if user %s needs password change on view %s', | |||
user_obj, view_name) | ||||
r4700 | ||||
r1539 | skip_user_views = [ | |||
'logout', 'login', | ||||
'my_account_password', 'my_account_password_update' | ||||
] | ||||
if not user_obj: | ||||
return | ||||
if user_obj.username == User.DEFAULT_USER: | ||||
return | ||||
now = time.time() | ||||
should_change = user_obj.user_data.get('force_password_change') | ||||
change_after = safe_int(should_change) or 0 | ||||
if should_change and now > change_after: | ||||
log.debug('User %s requires password change', user_obj) | ||||
h.flash('You are required to change your password', 'warning', | ||||
ignore_duplicate=True) | ||||
if view_name not in skip_user_views: | ||||
raise HTTPFound( | ||||
self.request.route_path('my_account_password')) | ||||
r1502 | ||||
r1984 | def _log_creation_exception(self, e, repo_name): | |||
_ = self.request.translate | ||||
reason = None | ||||
if len(e.args) == 2: | ||||
reason = e.args[1] | ||||
if reason == 'INVALID_CERTIFICATE': | ||||
log.exception( | ||||
'Exception creating a repository: invalid certificate') | ||||
msg = (_('Error creating repository %s: invalid certificate') | ||||
% repo_name) | ||||
else: | ||||
log.exception("Exception creating a repository") | ||||
msg = (_('Error creating repository %s') | ||||
% repo_name) | ||||
return msg | ||||
r2332 | def _get_local_tmpl_context(self, include_app_defaults=True): | |||
r1533 | c = TemplateArgs() | |||
c.auth_user = self.request.user | ||||
r1908 | # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user | |||
c.rhodecode_user = self.request.user | ||||
r1785 | if include_app_defaults: | |||
from rhodecode.lib.base import attach_context_attributes | ||||
attach_context_attributes(c, self.request, self.request.user.user_id) | ||||
r1908 | ||||
r3587 | c.is_super_admin = c.auth_user.is_admin | |||
c.can_create_repo = c.is_super_admin | ||||
c.can_create_repo_group = c.is_super_admin | ||||
c.can_create_user_group = c.is_super_admin | ||||
c.is_delegated_admin = False | ||||
r3588 | if not c.auth_user.is_default and not c.is_super_admin: | |||
r3587 | c.can_create_repo = h.HasPermissionAny('hg.create.repository')( | |||
user=self.request.user) | ||||
repositories = c.auth_user.repositories_admin or c.can_create_repo | ||||
c.can_create_repo_group = h.HasPermissionAny('hg.repogroup.create.true')( | ||||
user=self.request.user) | ||||
repository_groups = c.auth_user.repository_groups_admin or c.can_create_repo_group | ||||
c.can_create_user_group = h.HasPermissionAny('hg.usergroup.create.true')( | ||||
user=self.request.user) | ||||
user_groups = c.auth_user.user_groups_admin or c.can_create_user_group | ||||
# delegated admin can create, or manage some objects | ||||
c.is_delegated_admin = repositories or repository_groups or user_groups | ||||
r1533 | return c | |||
r1502 | ||||
r2351 | def _get_template_context(self, tmpl_args, **kwargs): | |||
r1502 | ||||
r1536 | local_tmpl_args = { | |||
r1502 | 'defaults': {}, | |||
'errors': {}, | ||||
r2351 | 'c': tmpl_args | |||
r1502 | } | |||
r2351 | local_tmpl_args.update(kwargs) | |||
r1536 | return local_tmpl_args | |||
r1502 | ||||
r1534 | def load_default_context(self): | |||
""" | ||||
example: | ||||
def load_default_context(self): | ||||
c = self._get_local_tmpl_context() | ||||
c.custom_var = 'foobar' | ||||
r2351 | ||||
r1534 | return c | |||
""" | ||||
raise NotImplementedError('Needs implementation in view class') | ||||
r1554 | ||||
class RepoAppView(BaseAppView): | ||||
def __init__(self, context, request): | ||||
r5093 | super().__init__(context, request) | |||
r1554 | self.db_repo = request.db_repo | |||
self.db_repo_name = self.db_repo.repo_name | ||||
self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo) | ||||
r3984 | self.db_repo_artifacts = ScmModel().get_artifacts(self.db_repo) | |||
r4201 | self.db_repo_patterns = IssueTrackerSettingsModel(repo=self.db_repo) | |||
r1554 | ||||
r1714 | def _handle_missing_requirements(self, error): | |||
log.error( | ||||
'Requirements are missing for repository %s: %s', | ||||
r5086 | self.db_repo_name, safe_str(error)) | |||
r1714 | ||||
r4977 | def _prepare_and_set_clone_url(self, c): | |||
username = '' | ||||
if self._rhodecode_user.username != User.DEFAULT_USER: | ||||
username = self._rhodecode_user.username | ||||
_def_clone_uri = c.clone_uri_tmpl | ||||
_def_clone_uri_id = c.clone_uri_id_tmpl | ||||
_def_clone_uri_ssh = c.clone_uri_ssh_tmpl | ||||
c.clone_repo_url = self.db_repo.clone_url( | ||||
user=username, uri_tmpl=_def_clone_uri) | ||||
c.clone_repo_url_id = self.db_repo.clone_url( | ||||
user=username, uri_tmpl=_def_clone_uri_id) | ||||
c.clone_repo_url_ssh = self.db_repo.clone_url( | ||||
uri_tmpl=_def_clone_uri_ssh, ssh=True) | ||||
r2351 | def _get_local_tmpl_context(self, include_app_defaults=True): | |||
r1984 | _ = self.request.translate | |||
r5093 | c = super()._get_local_tmpl_context( | |||
r1785 | include_app_defaults=include_app_defaults) | |||
r1554 | # register common vars for this type of view | |||
c.rhodecode_db_repo = self.db_repo | ||||
c.repo_name = self.db_repo_name | ||||
c.repository_pull_requests = self.db_repo_pull_requests | ||||
r3984 | c.repository_artifacts = self.db_repo_artifacts | |||
r3670 | c.repository_is_user_following = ScmModel().is_following_repo( | |||
self.db_repo_name, self._rhodecode_user.user_id) | ||||
r2624 | self.path_filter = PathFilter(None) | |||
r1714 | ||||
r2625 | c.repository_requirements_missing = {} | |||
r1714 | try: | |||
self.rhodecode_vcs_repo = self.db_repo.scm_instance() | ||||
r3723 | # NOTE(marcink): | |||
# comparison to None since if it's an object __bool__ is expensive to | ||||
# calculate | ||||
if self.rhodecode_vcs_repo is not None: | ||||
r2623 | path_perms = self.rhodecode_vcs_repo.get_path_permissions( | |||
c.auth_user.username) | ||||
self.path_filter = PathFilter(path_perms) | ||||
r1714 | except RepositoryRequirementError as e: | |||
r2625 | c.repository_requirements_missing = {'error': str(e)} | |||
r1714 | self._handle_missing_requirements(e) | |||
r1984 | self.rhodecode_vcs_repo = None | |||
r2618 | ||||
r2623 | c.path_filter = self.path_filter # used by atom_feed_entry.mako | |||
r1984 | ||||
r2625 | if self.rhodecode_vcs_repo is None: | |||
r1984 | # unable to fetch this repo as vcs instance, report back to user | |||
r4844 | log.debug('Repository was not found on filesystem, check if it exists or is not damaged') | |||
r1984 | h.flash(_( | |||
"The repository `%(repo_name)s` cannot be loaded in filesystem. " | ||||
"Please check if it exist, or is not damaged.") % | ||||
{'repo_name': c.repo_name}, | ||||
category='error', ignore_duplicate=True) | ||||
r2625 | if c.repository_requirements_missing: | |||
route = self.request.matched_route.name | ||||
if route.startswith(('edit_repo', 'repo_summary')): | ||||
# allow summary and edit repo on missing requirements | ||||
return c | ||||
raise HTTPFound( | ||||
h.route_path('repo_summary', repo_name=self.db_repo_name)) | ||||
else: # redirect if we don't show missing requirements | ||||
raise HTTPFound(h.route_path('home')) | ||||
r1714 | ||||
r3367 | c.has_origin_repo_read_perm = False | |||
if self.db_repo.fork: | ||||
c.has_origin_repo_read_perm = h.HasRepoPermissionAny( | ||||
'repository.write', 'repository.read', 'repository.admin')( | ||||
self.db_repo.fork.repo_name, 'summary fork link') | ||||
r1554 | return c | |||
r2620 | def _get_f_path_unchecked(self, matchdict, default=None): | |||
""" | ||||
Should only be used by redirects, everything else should call _get_f_path | ||||
""" | ||||
r1929 | f_path = matchdict.get('f_path') | |||
if f_path: | ||||
# fix for multiple initial slashes that causes errors for GIT | ||||
r2620 | return f_path.lstrip('/') | |||
r2618 | ||||
r2620 | return default | |||
def _get_f_path(self, matchdict, default=None): | ||||
r2623 | f_path_match = self._get_f_path_unchecked(matchdict, default) | |||
return self.path_filter.assert_path_permissions(f_path_match) | ||||
r2618 | ||||
Bartłomiej Wołyńczyk
|
r2685 | def _get_general_setting(self, target_repo, settings_key, default=False): | ||
settings_model = VcsSettingsModel(repo=target_repo) | ||||
settings = settings_model.get_general_settings() | ||||
return settings.get(settings_key, default) | ||||
r3894 | def _get_repo_setting(self, target_repo, settings_key, default=False): | |||
settings_model = VcsSettingsModel(repo=target_repo) | ||||
settings = settings_model.get_repo_settings_inherited() | ||||
return settings.get(settings_key, default) | ||||
r3924 | def _get_readme_data(self, db_repo, renderer_type, commit_id=None, path='/'): | |||
log.debug('Looking for README file at path %s', path) | ||||
if commit_id: | ||||
landing_commit_id = commit_id | ||||
else: | ||||
landing_commit = db_repo.get_landing_commit() | ||||
if isinstance(landing_commit, EmptyCommit): | ||||
return None, None | ||||
landing_commit_id = landing_commit.raw_id | ||||
r5093 | cache_namespace_uid = f'repo.{db_repo.repo_id}' | |||
r5086 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid, use_async_runner=True) | |||
r3924 | start = time.time() | |||
@region.conditional_cache_on_arguments(namespace=cache_namespace_uid) | ||||
def generate_repo_readme(repo_id, _commit_id, _repo_name, _readme_search_path, _renderer_type): | ||||
readme_data = None | ||||
readme_filename = None | ||||
commit = db_repo.get_commit(_commit_id) | ||||
log.debug("Searching for a README file at commit %s.", _commit_id) | ||||
readme_node = ReadmeFinder(_renderer_type).search(commit, path=_readme_search_path) | ||||
if readme_node: | ||||
log.debug('Found README node: %s', readme_node) | ||||
relative_urls = { | ||||
'raw': h.route_path( | ||||
'repo_file_raw', repo_name=_repo_name, | ||||
commit_id=commit.raw_id, f_path=readme_node.path), | ||||
'standard': h.route_path( | ||||
'repo_files', repo_name=_repo_name, | ||||
commit_id=commit.raw_id, f_path=readme_node.path), | ||||
} | ||||
r4977 | ||||
r3924 | readme_data = self._render_readme_or_none(commit, readme_node, relative_urls) | |||
r5086 | readme_filename = readme_node.str_path | |||
r3924 | ||||
return readme_data, readme_filename | ||||
readme_data, readme_filename = generate_repo_readme( | ||||
db_repo.repo_id, landing_commit_id, db_repo.repo_name, path, renderer_type,) | ||||
r4977 | ||||
r3924 | compute_time = time.time() - start | |||
log.debug('Repo README for path %s generated and computed in %.4fs', | ||||
path, compute_time) | ||||
return readme_data, readme_filename | ||||
def _render_readme_or_none(self, commit, readme_node, relative_urls): | ||||
log.debug('Found README file `%s` rendering...', readme_node.path) | ||||
renderer = MarkupRenderer() | ||||
try: | ||||
html_source = renderer.render( | ||||
r5086 | readme_node.str_content, filename=readme_node.path) | |||
r3924 | if relative_urls: | |||
return relative_links(html_source, relative_urls) | ||||
return html_source | ||||
except Exception: | ||||
r5086 | log.exception("Exception while trying to render the README") | |||
r3924 | ||||
r3469 | def get_recache_flag(self): | |||
for flag_name in ['force_recache', 'force-recache', 'no-cache']: | ||||
flag_val = self.request.GET.get(flag_name) | ||||
if str2bool(flag_val): | ||||
return True | ||||
return False | ||||
r4750 | def get_commit_preload_attrs(cls): | |||
pre_load = ['author', 'branch', 'date', 'message', 'parents', | ||||
'obsolete', 'phase', 'hidden'] | ||||
return pre_load | ||||
r2618 | ||||
class PathFilter(object): | ||||
# Expects and instance of BasePathPermissionChecker or None | ||||
def __init__(self, permission_checker): | ||||
self.permission_checker = permission_checker | ||||
def assert_path_permissions(self, path): | ||||
r3817 | if self.path_access_allowed(path): | |||
return path | ||||
raise HTTPForbidden() | ||||
def path_access_allowed(self, path): | ||||
log.debug('Checking ACL permissions for PathFilter for `%s`', path) | ||||
if self.permission_checker: | ||||
r4285 | has_access = path and self.permission_checker.has_access(path) | |||
log.debug('ACL Permissions checker enabled, ACL Check has_access: %s', has_access) | ||||
return has_access | ||||
log.debug('ACL permissions checker not enabled, skipping...') | ||||
r3817 | return True | |||
r1929 | ||||
r2618 | def filter_patchset(self, patchset): | |||
if not self.permission_checker or not patchset: | ||||
return patchset, False | ||||
had_filtered = False | ||||
filtered_patchset = [] | ||||
for patch in patchset: | ||||
filename = patch.get('filename', None) | ||||
if not filename or self.permission_checker.has_access(filename): | ||||
filtered_patchset.append(patch) | ||||
else: | ||||
had_filtered = True | ||||
if had_filtered: | ||||
if isinstance(patchset, diffs.LimitedDiffContainer): | ||||
filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset) | ||||
return filtered_patchset, True | ||||
else: | ||||
return patchset, False | ||||
def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None): | ||||
r5086 | ||||
r2618 | filtered_patchset, has_hidden_changes = self.filter_patchset(patchset) | |||
r3124 | result = diffset.render_patchset( | |||
filtered_patchset, source_ref=source_ref, target_ref=target_ref) | ||||
r2618 | result.has_hidden_changes = has_hidden_changes | |||
return result | ||||
def get_raw_patch(self, diff_processor): | ||||
if self.permission_checker is None: | ||||
return diff_processor.as_raw() | ||||
elif self.permission_checker.has_full_access: | ||||
return diff_processor.as_raw() | ||||
else: | ||||
return '# Repository has user-specific filters, raw patch generation is disabled.' | ||||
@property | ||||
def is_enabled(self): | ||||
return self.permission_checker is not None | ||||
r1554 | ||||
r1956 | ||||
r1989 | class RepoGroupAppView(BaseAppView): | |||
def __init__(self, context, request): | ||||
r5093 | super().__init__(context, request) | |||
r1989 | self.db_repo_group = request.db_repo_group | |||
self.db_repo_group_name = self.db_repo_group.group_name | ||||
r3442 | def _get_local_tmpl_context(self, include_app_defaults=True): | |||
_ = self.request.translate | ||||
r5093 | c = super()._get_local_tmpl_context( | |||
r3442 | include_app_defaults=include_app_defaults) | |||
c.repo_group = self.db_repo_group | ||||
return c | ||||
r2175 | def _revoke_perms_on_yourself(self, form_result): | |||
r5086 | _updates = [u for u in form_result['perm_updates'] if self._rhodecode_user.user_id == int(u[0])] | |||
_additions = [u for u in form_result['perm_additions'] if self._rhodecode_user.user_id == int(u[0])] | ||||
_deletions = [u for u in form_result['perm_deletions'] if self._rhodecode_user.user_id == int(u[0])] | ||||
r2175 | admin_perm = 'group.admin' | |||
if _updates and _updates[0][1] != admin_perm or \ | ||||
_additions and _additions[0][1] != admin_perm or \ | ||||
_deletions and _deletions[0][1] != admin_perm: | ||||
return True | ||||
return False | ||||
r1989 | ||||
r2068 | class UserGroupAppView(BaseAppView): | |||
def __init__(self, context, request): | ||||
r5093 | super().__init__(context, request) | |||
r2068 | self.db_user_group = request.db_user_group | |||
self.db_user_group_name = self.db_user_group.users_group_name | ||||
r2114 | class UserAppView(BaseAppView): | |||
def __init__(self, context, request): | ||||
r5093 | super().__init__(context, request) | |||
r2114 | self.db_user = request.db_user | |||
self.db_user_id = self.db_user.user_id | ||||
_ = self.request.translate | ||||
if not request.db_user_supports_default: | ||||
if self.db_user.username == User.DEFAULT_USER: | ||||
h.flash(_("Editing user `{}` is disabled.".format( | ||||
User.DEFAULT_USER)), category='warning') | ||||
raise HTTPFound(h.route_path('users')) | ||||
r1646 | class DataGridAppView(object): | |||
""" | ||||
Common class to have re-usable grid rendering components | ||||
""" | ||||
r1649 | def _extract_ordering(self, request, column_map=None): | |||
column_map = column_map or {} | ||||
r1646 | column_index = safe_int(request.GET.get('order[0][column]')) | |||
order_dir = request.GET.get( | ||||
'order[0][dir]', 'desc') | ||||
order_by = request.GET.get( | ||||
'columns[%s][data][sort]' % column_index, 'name_raw') | ||||
# translate datatable to DB columns | ||||
r1649 | order_by = column_map.get(order_by) or order_by | |||
r1646 | ||||
search_q = request.GET.get('search[value]') | ||||
return search_q, order_by, order_dir | ||||
def _extract_chunk(self, request): | ||||
start = safe_int(request.GET.get('start'), 0) | ||||
length = safe_int(request.GET.get('length'), 25) | ||||
draw = safe_int(request.GET.get('draw')) | ||||
return draw, start, length | ||||
r2040 | def _get_order_col(self, order_by, model): | |||
r4908 | if isinstance(order_by, str): | |||
r2040 | try: | |||
return operator.attrgetter(order_by)(model) | ||||
except AttributeError: | ||||
return None | ||||
else: | ||||
return order_by | ||||
r1646 | ||||
r1746 | class BaseReferencesView(RepoAppView): | |||
""" | ||||
Base for reference view for branches, tags and bookmarks. | ||||
""" | ||||
def load_default_context(self): | ||||
c = self._get_local_tmpl_context() | ||||
return c | ||||
def load_refs_context(self, ref_items, partials_template): | ||||
r1897 | _render = self.request.get_partial_renderer(partials_template) | |||
r3850 | pre_load = ["author", "date", "message", "parents"] | |||
r1746 | ||||
is_svn = h.is_svn(self.rhodecode_vcs_repo) | ||||
r1898 | is_hg = h.is_hg(self.rhodecode_vcs_repo) | |||
r1746 | format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo) | |||
r1898 | closed_refs = {} | |||
if is_hg: | ||||
closed_refs = self.rhodecode_vcs_repo.branches_closed | ||||
data = [] | ||||
r1746 | for ref_name, commit_id in ref_items: | |||
commit = self.rhodecode_vcs_repo.get_commit( | ||||
commit_id=commit_id, pre_load=pre_load) | ||||
r1898 | closed = ref_name in closed_refs | |||
r1746 | ||||
# TODO: johbo: Unify generation of reference links | ||||
use_commit_id = '/' in ref_name or is_svn | ||||
r1927 | ||||
if use_commit_id: | ||||
files_url = h.route_path( | ||||
'repo_files', | ||||
repo_name=self.db_repo_name, | ||||
f_path=ref_name if is_svn else '', | ||||
r4373 | commit_id=commit_id, | |||
_query=dict(at=ref_name) | ||||
) | ||||
r1927 | ||||
else: | ||||
files_url = h.route_path( | ||||
'repo_files', | ||||
repo_name=self.db_repo_name, | ||||
f_path=ref_name if is_svn else '', | ||||
commit_id=ref_name, | ||||
r4373 | _query=dict(at=ref_name) | |||
) | ||||
r1746 | ||||
r1898 | data.append({ | |||
"name": _render('name', ref_name, files_url, closed), | ||||
r1746 | "name_raw": ref_name, | |||
"date": _render('date', commit.date), | ||||
"date_raw": datetime_to_time(commit.date), | ||||
"author": _render('author', commit.author), | ||||
"commit": _render( | ||||
'commit', commit.message, commit.raw_id, commit.idx), | ||||
"commit_raw": commit.idx, | ||||
"compare": _render( | ||||
'compare', format_ref_id(ref_name, commit.raw_id)), | ||||
}) | ||||
r1898 | ||||
return data | ||||
r1746 | ||||
r1554 | class RepoRoutePredicate(object): | |||
def __init__(self, val, config): | ||||
self.val = val | ||||
def text(self): | ||||
r5086 | return f'repo_route = {self.val}' | |||
r1554 | ||||
phash = text | ||||
def __call__(self, info, request): | ||||
r1778 | if hasattr(request, 'vcs_call'): | |||
# skip vcs calls | ||||
return | ||||
r1554 | repo_name = info['match']['repo_name'] | |||
r4843 | ||||
repo_name_parts = repo_name.split('/') | ||||
r5086 | repo_slugs = [x for x in (repo_name_slug(x) for x in repo_name_parts)] | |||
r4843 | ||||
if repo_name_parts != repo_slugs: | ||||
r4841 | # short-skip if the repo-name doesn't follow slug rule | |||
r4843 | log.warning('repo_name: %s is different than slug %s', repo_name_parts, repo_slugs) | |||
r4841 | return False | |||
r1554 | repo_model = repo.RepoModel() | |||
r2893 | ||||
by_name_match = repo_model.get_by_repo_name(repo_name, cache=False) | ||||
r1774 | ||||
r2913 | def redirect_if_creating(route_info, db_repo): | |||
skip_views = ['edit_repo_advanced_delete'] | ||||
route = route_info['route'] | ||||
# we should skip delete view so we can actually "remove" repositories | ||||
# if they get stuck in creating state. | ||||
if route.name in skip_views: | ||||
return | ||||
r1985 | if db_repo.repo_state in [repo.Repository.STATE_PENDING]: | |||
r2893 | repo_creating_url = request.route_path( | |||
'repo_creating', repo_name=db_repo.repo_name) | ||||
raise HTTPFound(repo_creating_url) | ||||
r1985 | ||||
r1554 | if by_name_match: | |||
# register this as request object we can re-use later | ||||
request.db_repo = by_name_match | ||||
r5086 | request.db_repo_name = request.db_repo.repo_name | |||
r2913 | redirect_if_creating(info, by_name_match) | |||
r1554 | return True | |||
by_id_match = repo_model.get_repo_by_id(repo_name) | ||||
if by_id_match: | ||||
request.db_repo = by_id_match | ||||
r5086 | request.db_repo_name = request.db_repo.repo_name | |||
r2913 | redirect_if_creating(info, by_id_match) | |||
r1554 | return True | |||
return False | ||||
r3090 | class RepoForbidArchivedRoutePredicate(object): | |||
def __init__(self, val, config): | ||||
self.val = val | ||||
def text(self): | ||||
r5086 | return f'repo_forbid_archived = {self.val}' | |||
r3090 | ||||
phash = text | ||||
def __call__(self, info, request): | ||||
_ = request.translate | ||||
rhodecode_db_repo = request.db_repo | ||||
log.debug( | ||||
'%s checking if archived flag for repo for %s', | ||||
self.__class__.__name__, rhodecode_db_repo.repo_name) | ||||
if rhodecode_db_repo.archived: | ||||
log.warning('Current view is not supported for archived repo:%s', | ||||
rhodecode_db_repo.repo_name) | ||||
h.flash( | ||||
h.literal(_('Action not supported for archived repository.')), | ||||
category='warning') | ||||
summary_url = request.route_path( | ||||
'repo_summary', repo_name=rhodecode_db_repo.repo_name) | ||||
raise HTTPFound(summary_url) | ||||
return True | ||||
r1766 | class RepoTypeRoutePredicate(object): | |||
def __init__(self, val, config): | ||||
self.val = val or ['hg', 'git', 'svn'] | ||||
def text(self): | ||||
r5086 | return f'repo_accepted_type = {self.val}' | |||
r1766 | ||||
phash = text | ||||
def __call__(self, info, request): | ||||
r1778 | if hasattr(request, 'vcs_call'): | |||
# skip vcs calls | ||||
return | ||||
r1766 | ||||
rhodecode_db_repo = request.db_repo | ||||
log.debug( | ||||
'%s checking repo type for %s in %s', | ||||
self.__class__.__name__, rhodecode_db_repo.repo_type, self.val) | ||||
if rhodecode_db_repo.repo_type in self.val: | ||||
return True | ||||
else: | ||||
log.warning('Current view is not supported for repo type:%s', | ||||
rhodecode_db_repo.repo_type) | ||||
return False | ||||
r1774 | class RepoGroupRoutePredicate(object): | |||
def __init__(self, val, config): | ||||
self.val = val | ||||
def text(self): | ||||
r5086 | return f'repo_group_route = {self.val}' | |||
r1774 | ||||
phash = text | ||||
def __call__(self, info, request): | ||||
r1778 | if hasattr(request, 'vcs_call'): | |||
# skip vcs calls | ||||
return | ||||
r1774 | repo_group_name = info['match']['repo_group_name'] | |||
r4843 | ||||
repo_group_name_parts = repo_group_name.split('/') | ||||
r5086 | repo_group_slugs = [x for x in [repo_name_slug(x) for x in repo_group_name_parts]] | |||
r4843 | if repo_group_name_parts != repo_group_slugs: | |||
# short-skip if the repo-name doesn't follow slug rule | ||||
log.warning('repo_group_name: %s is different than slug %s', repo_group_name_parts, repo_group_slugs) | ||||
r4841 | return False | |||
r1774 | repo_group_model = repo_group.RepoGroupModel() | |||
r2893 | by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False) | |||
r1774 | ||||
if by_name_match: | ||||
# register this as request object we can re-use later | ||||
request.db_repo_group = by_name_match | ||||
r5086 | request.db_repo_group_name = request.db_repo_group.group_name | |||
r1774 | return True | |||
return False | ||||
r1766 | ||||
r2068 | class UserGroupRoutePredicate(object): | |||
def __init__(self, val, config): | ||||
self.val = val | ||||
def text(self): | ||||
r5086 | return f'user_group_route = {self.val}' | |||
r2068 | ||||
phash = text | ||||
def __call__(self, info, request): | ||||
if hasattr(request, 'vcs_call'): | ||||
# skip vcs calls | ||||
return | ||||
user_group_id = info['match']['user_group_id'] | ||||
user_group_model = user_group.UserGroup() | ||||
r2893 | by_id_match = user_group_model.get(user_group_id, cache=False) | |||
r2068 | ||||
r2114 | if by_id_match: | |||
r2068 | # register this as request object we can re-use later | |||
r2114 | request.db_user_group = by_id_match | |||
r2068 | return True | |||
return False | ||||
r2114 | class UserRoutePredicateBase(object): | |||
supports_default = None | ||||
def __init__(self, val, config): | ||||
self.val = val | ||||
def text(self): | ||||
raise NotImplementedError() | ||||
def __call__(self, info, request): | ||||
if hasattr(request, 'vcs_call'): | ||||
# skip vcs calls | ||||
return | ||||
user_id = info['match']['user_id'] | ||||
user_model = user.User() | ||||
r2893 | by_id_match = user_model.get(user_id, cache=False) | |||
r2114 | ||||
if by_id_match: | ||||
# register this as request object we can re-use later | ||||
request.db_user = by_id_match | ||||
request.db_user_supports_default = self.supports_default | ||||
return True | ||||
return False | ||||
class UserRoutePredicate(UserRoutePredicateBase): | ||||
supports_default = False | ||||
def text(self): | ||||
r5086 | return f'user_route = {self.val}' | |||
r2114 | ||||
phash = text | ||||
class UserRouteWithDefaultPredicate(UserRoutePredicateBase): | ||||
supports_default = True | ||||
def text(self): | ||||
r5086 | return f'user_with_default_route = {self.val}' | |||
r2114 | ||||
phash = text | ||||
r1554 | def includeme(config): | |||
config.add_route_predicate( | ||||
'repo_route', RepoRoutePredicate) | ||||
r1766 | config.add_route_predicate( | |||
r1774 | 'repo_accepted_types', RepoTypeRoutePredicate) | |||
config.add_route_predicate( | ||||
r3090 | 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate) | |||
config.add_route_predicate( | ||||
r1774 | 'repo_group_route', RepoGroupRoutePredicate) | |||
r2068 | config.add_route_predicate( | |||
'user_group_route', UserGroupRoutePredicate) | ||||
r2114 | config.add_route_predicate( | |||
'user_route_with_default', UserRouteWithDefaultPredicate) | ||||
config.add_route_predicate( | ||||
r3090 | 'user_route', UserRoutePredicate) | |||