views.py
856 lines
| 30.3 KiB
| text/x-python
|
PythonLexer
r5053 | ||||
r1666 | ||||
r4306 | # Copyright (C) 2016-2020 RhodeCode GmbH | |||
r1666 | # | |||
# This program is free software: you can redistribute it and/or modify | ||||
# it under the terms of the GNU Affero General Public License, version 3 | ||||
# (only), as published by the Free Software Foundation. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU Affero General Public License | ||||
# along with this program. If not, see <http://www.gnu.org/licenses/>. | ||||
# | ||||
# This program is dual-licensed. If you wish to learn more about the | ||||
# RhodeCode Enterprise Edition, including its added features, Support services, | ||||
# and proprietary license terms, please see https://rhodecode.com/licenses/ | ||||
r1668 | import re | |||
r1666 | import logging | |||
r2774 | import collections | |||
r1666 | ||||
r4148 | from pyramid.httpexceptions import HTTPNotFound | |||
r1666 | ||||
r4148 | from rhodecode.apps._base import BaseAppView, DataGridAppView | |||
r1667 | from rhodecode.lib import helpers as h | |||
r2080 | from rhodecode.lib.auth import ( | |||
r4148 | LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, CSRFRequired, | |||
r4151 | HasRepoGroupPermissionAny, AuthUser) | |||
r3754 | from rhodecode.lib.codeblocks import filenode_as_lines_tokens | |||
r1668 | from rhodecode.lib.index import searcher_from_config | |||
r5065 | from rhodecode.lib.utils2 import str2bool, safe_int, safe_str | |||
r3754 | from rhodecode.lib.vcs.nodes import FileNode | |||
r2038 | from rhodecode.model.db import ( | |||
r4329 | func, true, or_, case, cast, in_filter_generator, String, Session, | |||
Repository, RepoGroup, User, UserGroup, PullRequest) | ||||
r1666 | from rhodecode.model.repo import RepoModel | |||
r1774 | from rhodecode.model.repo_group import RepoGroupModel | |||
r1677 | from rhodecode.model.user import UserModel | |||
r1676 | from rhodecode.model.user_group import UserGroupModel | |||
r1666 | ||||
log = logging.getLogger(__name__) | ||||
r4148 | class HomeView(BaseAppView, DataGridAppView): | |||
r1666 | ||||
def load_default_context(self): | ||||
c = self._get_local_tmpl_context() | ||||
c.user = c.auth_user.get_instance() | ||||
return c | ||||
@LoginRequired() | ||||
def user_autocomplete_data(self): | ||||
r2308 | self.load_default_context() | |||
r1666 | query = self.request.GET.get('query') | |||
active = str2bool(self.request.GET.get('active') or True) | ||||
include_groups = str2bool(self.request.GET.get('user_groups')) | ||||
r1678 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) | |||
r1768 | skip_default_user = str2bool(self.request.GET.get('skip_default_user')) | |||
r1666 | ||||
log.debug('generating user list, query:%s, active:%s, with_groups:%s', | ||||
query, active, include_groups) | ||||
r1677 | _users = UserModel().get_users( | |||
r1666 | name_contains=query, only_active=active) | |||
r1768 | def maybe_skip_default_user(usr): | |||
if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: | ||||
return False | ||||
return True | ||||
_users = filter(maybe_skip_default_user, _users) | ||||
r1666 | if include_groups: | |||
# extend with user groups | ||||
r1676 | _user_groups = UserGroupModel().get_user_groups( | |||
r1678 | name_contains=query, only_active=active, | |||
expand_groups=expand_groups) | ||||
r1666 | _users = _users + _user_groups | |||
return {'suggestions': _users} | ||||
@LoginRequired() | ||||
@NotAnonymous() | ||||
def user_group_autocomplete_data(self): | ||||
r2308 | self.load_default_context() | |||
r1666 | query = self.request.GET.get('query') | |||
active = str2bool(self.request.GET.get('active') or True) | ||||
r1678 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) | |||
r1666 | log.debug('generating user group list, query:%s, active:%s', | |||
query, active) | ||||
r1676 | _user_groups = UserGroupModel().get_user_groups( | |||
r1678 | name_contains=query, only_active=active, | |||
expand_groups=expand_groups) | ||||
r1666 | _user_groups = _user_groups | |||
return {'suggestions': _user_groups} | ||||
r1667 | ||||
r3556 | def _get_repo_list(self, name_contains=None, repo_type=None, repo_group_name='', limit=20): | |||
r2774 | org_query = name_contains | |||
r2038 | allowed_ids = self._rhodecode_user.repo_acl_ids( | |||
['repository.read', 'repository.write', 'repository.admin'], | ||||
r4329 | cache=True, name_filter=name_contains) or [-1] | |||
r2038 | ||||
r4152 | query = Session().query( | |||
Repository.repo_name, | ||||
Repository.repo_id, | ||||
Repository.repo_type, | ||||
Repository.private, | ||||
)\ | ||||
r3090 | .filter(Repository.archived.isnot(true()))\ | |||
r2038 | .filter(or_( | |||
# generate multiple IN to fix limitation problems | ||||
*in_filter_generator(Repository.repo_id, allowed_ids) | ||||
)) | ||||
r1667 | ||||
r3556 | query = query.order_by(case( | |||
[ | ||||
(Repository.repo_name.startswith(repo_group_name), repo_group_name+'/'), | ||||
], | ||||
)) | ||||
query = query.order_by(func.length(Repository.repo_name)) | ||||
query = query.order_by(Repository.repo_name) | ||||
r1667 | if repo_type: | |||
query = query.filter(Repository.repo_type == repo_type) | ||||
if name_contains: | ||||
r5065 | ilike_expression = '%{}%'.format(safe_str(name_contains)) | |||
r1667 | query = query.filter( | |||
Repository.repo_name.ilike(ilike_expression)) | ||||
query = query.limit(limit) | ||||
r2774 | acl_iter = query | |||
r2038 | ||||
r1667 | return [ | |||
{ | ||||
r2038 | 'id': obj.repo_name, | |||
r2774 | 'value': org_query, | |||
'value_display': obj.repo_name, | ||||
r2038 | 'text': obj.repo_name, | |||
r1667 | 'type': 'repo', | |||
r2774 | 'repo_id': obj.repo_id, | |||
'repo_type': obj.repo_type, | ||||
'private': obj.private, | ||||
r2038 | 'url': h.route_path('repo_summary', repo_name=obj.repo_name) | |||
r1667 | } | |||
r2774 | for obj in acl_iter] | |||
r1667 | ||||
r3556 | def _get_repo_group_list(self, name_contains=None, repo_group_name='', limit=20): | |||
r2774 | org_query = name_contains | |||
r2038 | allowed_ids = self._rhodecode_user.repo_group_acl_ids( | |||
['group.read', 'group.write', 'group.admin'], | ||||
r4329 | cache=True, name_filter=name_contains) or [-1] | |||
r2038 | ||||
r4152 | query = Session().query( | |||
RepoGroup.group_id, | ||||
RepoGroup.group_name, | ||||
)\ | ||||
r2038 | .filter(or_( | |||
# generate multiple IN to fix limitation problems | ||||
*in_filter_generator(RepoGroup.group_id, allowed_ids) | ||||
)) | ||||
r1668 | ||||
r3556 | query = query.order_by(case( | |||
[ | ||||
(RepoGroup.group_name.startswith(repo_group_name), repo_group_name+'/'), | ||||
], | ||||
)) | ||||
query = query.order_by(func.length(RepoGroup.group_name)) | ||||
query = query.order_by(RepoGroup.group_name) | ||||
r1668 | if name_contains: | |||
r5065 | ilike_expression = u'%{}%'.format(safe_str(name_contains)) | |||
r1668 | query = query.filter( | |||
RepoGroup.group_name.ilike(ilike_expression)) | ||||
query = query.limit(limit) | ||||
r2774 | acl_iter = query | |||
r2038 | ||||
r1668 | return [ | |||
{ | ||||
'id': obj.group_name, | ||||
r2774 | 'value': org_query, | |||
'value_display': obj.group_name, | ||||
r3424 | 'text': obj.group_name, | |||
r2774 | 'type': 'repo_group', | |||
r3424 | 'repo_group_id': obj.group_id, | |||
r2035 | 'url': h.route_path( | |||
'repo_group_home', repo_group_name=obj.group_name) | ||||
r1668 | } | |||
r2774 | for obj in acl_iter] | |||
def _get_user_list(self, name_contains=None, limit=20): | ||||
org_query = name_contains | ||||
if not name_contains: | ||||
r3556 | return [], False | |||
r2774 | ||||
r3556 | # TODO(marcink): should all logged in users be allowed to search others? | |||
allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER | ||||
if not allowed_user_search: | ||||
return [], False | ||||
name_contains = re.compile('(?:user:[ ]?)(.+)').findall(name_contains) | ||||
r2774 | if len(name_contains) != 1: | |||
r3556 | return [], False | |||
r2774 | name_contains = name_contains[0] | |||
query = User.query()\ | ||||
.order_by(func.length(User.username))\ | ||||
.order_by(User.username) \ | ||||
.filter(User.username != User.DEFAULT_USER) | ||||
r1668 | ||||
r2774 | if name_contains: | |||
r5065 | ilike_expression = u'%{}%'.format(safe_str(name_contains)) | |||
r2774 | query = query.filter( | |||
User.username.ilike(ilike_expression)) | ||||
query = query.limit(limit) | ||||
acl_iter = query | ||||
return [ | ||||
{ | ||||
'id': obj.user_id, | ||||
'value': org_query, | ||||
r3556 | 'value_display': 'user: `{}`'.format(obj.username), | |||
r2774 | 'type': 'user', | |||
'icon_link': h.gravatar_url(obj.email, 30), | ||||
'url': h.route_path( | ||||
'user_profile', username=obj.username) | ||||
} | ||||
r3556 | for obj in acl_iter], True | |||
r2774 | ||||
r2795 | def _get_user_groups_list(self, name_contains=None, limit=20): | |||
org_query = name_contains | ||||
if not name_contains: | ||||
r3556 | return [], False | |||
r2795 | ||||
r3556 | # TODO(marcink): should all logged in users be allowed to search others? | |||
allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER | ||||
if not allowed_user_search: | ||||
return [], False | ||||
name_contains = re.compile('(?:user_group:[ ]?)(.+)').findall(name_contains) | ||||
r2795 | if len(name_contains) != 1: | |||
r3556 | return [], False | |||
r2795 | name_contains = name_contains[0] | |||
query = UserGroup.query()\ | ||||
.order_by(func.length(UserGroup.users_group_name))\ | ||||
.order_by(UserGroup.users_group_name) | ||||
if name_contains: | ||||
r5065 | ilike_expression = u'%{}%'.format(safe_str(name_contains)) | |||
r2795 | query = query.filter( | |||
UserGroup.users_group_name.ilike(ilike_expression)) | ||||
query = query.limit(limit) | ||||
acl_iter = query | ||||
return [ | ||||
{ | ||||
'id': obj.users_group_id, | ||||
'value': org_query, | ||||
r3556 | 'value_display': 'user_group: `{}`'.format(obj.users_group_name), | |||
r2795 | 'type': 'user_group', | |||
'url': h.route_path( | ||||
'user_group_profile', user_group_name=obj.users_group_name) | ||||
} | ||||
r3556 | for obj in acl_iter], True | |||
r2795 | ||||
r4329 | def _get_pull_request_list(self, name_contains=None, limit=20): | |||
org_query = name_contains | ||||
if not name_contains: | ||||
return [], False | ||||
# TODO(marcink): should all logged in users be allowed to search others? | ||||
allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER | ||||
if not allowed_user_search: | ||||
return [], False | ||||
name_contains = re.compile('(?:pr:[ ]?)(.+)').findall(name_contains) | ||||
if len(name_contains) != 1: | ||||
return [], False | ||||
name_contains = name_contains[0] | ||||
allowed_ids = self._rhodecode_user.repo_acl_ids( | ||||
['repository.read', 'repository.write', 'repository.admin'], | ||||
cache=True) or [-1] | ||||
query = Session().query( | ||||
PullRequest.pull_request_id, | ||||
PullRequest.title, | ||||
) | ||||
query = query.join(Repository, Repository.repo_id == PullRequest.target_repo_id) | ||||
query = query.filter(or_( | ||||
# generate multiple IN to fix limitation problems | ||||
*in_filter_generator(Repository.repo_id, allowed_ids) | ||||
)) | ||||
query = query.order_by(PullRequest.pull_request_id) | ||||
if name_contains: | ||||
r5065 | ilike_expression = u'%{}%'.format(safe_str(name_contains)) | |||
r4329 | query = query.filter(or_( | |||
cast(PullRequest.pull_request_id, String).ilike(ilike_expression), | ||||
PullRequest.title.ilike(ilike_expression), | ||||
PullRequest.description.ilike(ilike_expression), | ||||
)) | ||||
query = query.limit(limit) | ||||
acl_iter = query | ||||
return [ | ||||
{ | ||||
'id': obj.pull_request_id, | ||||
'value': org_query, | ||||
r4499 | 'value_display': 'pull request: `!{} - {}`'.format( | |||
obj.pull_request_id, safe_str(obj.title[:50])), | ||||
r4329 | 'type': 'pull_request', | |||
'url': h.route_path('pull_requests_global', pull_request_id=obj.pull_request_id) | ||||
} | ||||
for obj in acl_iter], True | ||||
r3556 | def _get_hash_commit_list(self, auth_user, searcher, query, repo=None, repo_group=None): | |||
repo_name = repo_group_name = None | ||||
if repo: | ||||
repo_name = repo.repo_name | ||||
if repo_group: | ||||
repo_group_name = repo_group.group_name | ||||
r2774 | org_query = query | |||
r3319 | if not query or len(query) < 3 or not searcher: | |||
r3556 | return [], False | |||
r1668 | ||||
r3556 | commit_hashes = re.compile('(?:commit:[ ]?)([0-9a-f]{2,40})').findall(query) | |||
r1668 | ||||
if len(commit_hashes) != 1: | ||||
r3556 | return [], False | |||
r2774 | commit_hash = commit_hashes[0] | |||
r1668 | ||||
result = searcher.search( | ||||
r3319 | 'commit_id:{}*'.format(commit_hash), 'commit', auth_user, | |||
r3556 | repo_name, repo_group_name, raise_on_exc=False) | |||
r1668 | ||||
r3443 | commits = [] | |||
for entry in result['results']: | ||||
repo_data = { | ||||
'repository_id': entry.get('repository_id'), | ||||
'repository_type': entry.get('repo_type'), | ||||
'repository_name': entry.get('repository'), | ||||
} | ||||
commit_entry = { | ||||
r1668 | 'id': entry['commit_id'], | |||
r2774 | 'value': org_query, | |||
r3443 | 'value_display': '`{}` commit: {}'.format( | |||
r2774 | entry['repository'], entry['commit_id']), | |||
r1668 | 'type': 'commit', | |||
r2774 | 'repo': entry['repository'], | |||
r3443 | 'repo_data': repo_data, | |||
r2035 | 'url': h.route_path( | |||
'repo_commit', | ||||
repo_name=entry['repository'], commit_id=entry['commit_id']) | ||||
r1668 | } | |||
r3443 | ||||
commits.append(commit_entry) | ||||
r3556 | return commits, True | |||
r1668 | ||||
r3556 | def _get_path_list(self, auth_user, searcher, query, repo=None, repo_group=None): | |||
repo_name = repo_group_name = None | ||||
if repo: | ||||
repo_name = repo.repo_name | ||||
if repo_group: | ||||
repo_group_name = repo_group.group_name | ||||
r3542 | org_query = query | |||
if not query or len(query) < 3 or not searcher: | ||||
r3556 | return [], False | |||
r3542 | ||||
r3556 | paths_re = re.compile('(?:file:[ ]?)(.+)').findall(query) | |||
r3542 | if len(paths_re) != 1: | |||
r3556 | return [], False | |||
r3542 | file_path = paths_re[0] | |||
search_path = searcher.escape_specials(file_path) | ||||
result = searcher.search( | ||||
'file.raw:*{}*'.format(search_path), 'path', auth_user, | ||||
r3556 | repo_name, repo_group_name, raise_on_exc=False) | |||
r3542 | ||||
files = [] | ||||
for entry in result['results']: | ||||
repo_data = { | ||||
'repository_id': entry.get('repository_id'), | ||||
'repository_type': entry.get('repo_type'), | ||||
'repository_name': entry.get('repository'), | ||||
} | ||||
file_entry = { | ||||
'id': entry['commit_id'], | ||||
'value': org_query, | ||||
'value_display': '`{}` file: {}'.format( | ||||
entry['repository'], entry['file']), | ||||
'type': 'file', | ||||
'repo': entry['repository'], | ||||
'repo_data': repo_data, | ||||
'url': h.route_path( | ||||
'repo_files', | ||||
repo_name=entry['repository'], commit_id=entry['commit_id'], | ||||
f_path=entry['file']) | ||||
} | ||||
files.append(file_entry) | ||||
r3556 | return files, True | |||
r3542 | ||||
r1667 | @LoginRequired() | |||
def repo_list_data(self): | ||||
_ = self.request.translate | ||||
r2308 | self.load_default_context() | |||
r1667 | ||||
query = self.request.GET.get('query') | ||||
repo_type = self.request.GET.get('repo_type') | ||||
log.debug('generating repo list, query:%s, repo_type:%s', | ||||
query, repo_type) | ||||
res = [] | ||||
repos = self._get_repo_list(query, repo_type=repo_type) | ||||
if repos: | ||||
res.append({ | ||||
'text': _('Repositories'), | ||||
'children': repos | ||||
}) | ||||
data = { | ||||
'more': False, | ||||
'results': res | ||||
} | ||||
return data | ||||
r1668 | ||||
r3424 | @LoginRequired() | |||
def repo_group_list_data(self): | ||||
_ = self.request.translate | ||||
self.load_default_context() | ||||
query = self.request.GET.get('query') | ||||
log.debug('generating repo group list, query:%s', | ||||
query) | ||||
res = [] | ||||
repo_groups = self._get_repo_group_list(query) | ||||
if repo_groups: | ||||
res.append({ | ||||
'text': _('Repository Groups'), | ||||
'children': repo_groups | ||||
}) | ||||
data = { | ||||
'more': False, | ||||
'results': res | ||||
} | ||||
return data | ||||
r3319 | def _get_default_search_queries(self, search_context, searcher, query): | |||
if not searcher: | ||||
return [] | ||||
r3442 | ||||
r3319 | is_es_6 = searcher.is_es_6 | |||
queries = [] | ||||
repo_group_name, repo_name, repo_context = None, None, None | ||||
# repo group context | ||||
if search_context.get('search_context[repo_group_name]'): | ||||
repo_group_name = search_context.get('search_context[repo_group_name]') | ||||
if search_context.get('search_context[repo_name]'): | ||||
repo_name = search_context.get('search_context[repo_name]') | ||||
repo_context = search_context.get('search_context[repo_view_type]') | ||||
if is_es_6 and repo_name: | ||||
r3441 | # files | |||
r3319 | def query_modifier(): | |||
r3441 | qry = query | |||
r3319 | return {'q': qry, 'type': 'content'} | |||
r3750 | ||||
r4207 | label = u'File content search for `{}`'.format(h.escape(query)) | |||
r3556 | file_qry = { | |||
'id': -10, | ||||
'value': query, | ||||
'value_display': label, | ||||
r4096 | 'value_icon': '<i class="icon-code"></i>', | |||
r3556 | 'type': 'search', | |||
r3750 | 'subtype': 'repo', | |||
r3556 | 'url': h.route_path('search_repo', | |||
repo_name=repo_name, | ||||
_query=query_modifier()) | ||||
r3441 | } | |||
# commits | ||||
def query_modifier(): | ||||
qry = query | ||||
return {'q': qry, 'type': 'commit'} | ||||
r3750 | label = u'Commit search for `{}`'.format(h.escape(query)) | |||
r3556 | commit_qry = { | |||
'id': -20, | ||||
'value': query, | ||||
'value_display': label, | ||||
r4096 | 'value_icon': '<i class="icon-history"></i>', | |||
r3556 | 'type': 'search', | |||
r3750 | 'subtype': 'repo', | |||
r3556 | 'url': h.route_path('search_repo', | |||
repo_name=repo_name, | ||||
_query=query_modifier()) | ||||
r3319 | } | |||
r3556 | ||||
r3742 | if repo_context in ['commit', 'commits']: | |||
r3556 | queries.extend([commit_qry, file_qry]) | |||
elif repo_context in ['files', 'summary']: | ||||
queries.extend([file_qry, commit_qry]) | ||||
else: | ||||
queries.extend([commit_qry, file_qry]) | ||||
r3319 | ||||
elif is_es_6 and repo_group_name: | ||||
r3441 | # files | |||
r3319 | def query_modifier(): | |||
r3441 | qry = query | |||
r3319 | return {'q': qry, 'type': 'content'} | |||
r3441 | ||||
r4207 | label = u'File content search for `{}`'.format(query) | |||
r3556 | file_qry = { | |||
'id': -30, | ||||
'value': query, | ||||
'value_display': label, | ||||
r4096 | 'value_icon': '<i class="icon-code"></i>', | |||
r3556 | 'type': 'search', | |||
r3750 | 'subtype': 'repo_group', | |||
r3556 | 'url': h.route_path('search_repo_group', | |||
repo_group_name=repo_group_name, | ||||
_query=query_modifier()) | ||||
r3441 | } | |||
# commits | ||||
def query_modifier(): | ||||
qry = query | ||||
return {'q': qry, 'type': 'commit'} | ||||
r3750 | label = u'Commit search for `{}`'.format(query) | |||
r3556 | commit_qry = { | |||
'id': -40, | ||||
'value': query, | ||||
'value_display': label, | ||||
r4096 | 'value_icon': '<i class="icon-history"></i>', | |||
r3556 | 'type': 'search', | |||
r3750 | 'subtype': 'repo_group', | |||
r3556 | 'url': h.route_path('search_repo_group', | |||
repo_group_name=repo_group_name, | ||||
_query=query_modifier()) | ||||
r3319 | } | |||
r3742 | if repo_context in ['commit', 'commits']: | |||
r3556 | queries.extend([commit_qry, file_qry]) | |||
elif repo_context in ['files', 'summary']: | ||||
queries.extend([file_qry, commit_qry]) | ||||
else: | ||||
queries.extend([commit_qry, file_qry]) | ||||
# Global, not scoped | ||||
r3319 | if not queries: | |||
queries.append( | ||||
{ | ||||
'id': -1, | ||||
'value': query, | ||||
r4207 | 'value_display': u'File content search for: `{}`'.format(query), | |||
r4096 | 'value_icon': '<i class="icon-code"></i>', | |||
r3319 | 'type': 'search', | |||
r3750 | 'subtype': 'global', | |||
r3319 | 'url': h.route_path('search', | |||
_query={'q': query, 'type': 'content'}) | ||||
r3442 | }) | |||
queries.append( | ||||
{ | ||||
r3443 | 'id': -2, | |||
r3442 | 'value': query, | |||
r3443 | 'value_display': u'Commit search for: `{}`'.format(query), | |||
r4096 | 'value_icon': '<i class="icon-history"></i>', | |||
r3442 | 'type': 'search', | |||
r3750 | 'subtype': 'global', | |||
r3442 | 'url': h.route_path('search', | |||
_query={'q': query, 'type': 'commit'}) | ||||
}) | ||||
r3319 | ||||
return queries | ||||
r1668 | @LoginRequired() | |||
def goto_switcher_data(self): | ||||
c = self.load_default_context() | ||||
_ = self.request.translate | ||||
query = self.request.GET.get('query') | ||||
r2774 | log.debug('generating main filter data, query %s', query) | |||
res = [] | ||||
if not query: | ||||
return {'suggestions': res} | ||||
r1668 | ||||
r3556 | def no_match(name): | |||
return { | ||||
'id': -1, | ||||
'value': "", | ||||
'value_display': name, | ||||
'type': 'text', | ||||
'url': "" | ||||
} | ||||
r3319 | searcher = searcher_from_config(self.request.registry.settings) | |||
r3556 | has_specialized_search = False | |||
r3319 | ||||
r3556 | # set repo context | |||
repo = None | ||||
repo_id = safe_int(self.request.GET.get('search_context[repo_id]')) | ||||
if repo_id: | ||||
repo = Repository.get(repo_id) | ||||
# set group context | ||||
repo_group = None | ||||
r3319 | repo_group_id = safe_int(self.request.GET.get('search_context[repo_group_id]')) | |||
r2804 | if repo_group_id: | |||
repo_group = RepoGroup.get(repo_group_id) | ||||
r3556 | prefix_match = False | |||
# user: type search | ||||
if not prefix_match: | ||||
users, prefix_match = self._get_user_list(query) | ||||
if users: | ||||
has_specialized_search = True | ||||
for serialized_user in users: | ||||
res.append(serialized_user) | ||||
elif prefix_match: | ||||
has_specialized_search = True | ||||
res.append(no_match('No matching users found')) | ||||
r2774 | ||||
r3556 | # user_group: type search | |||
if not prefix_match: | ||||
user_groups, prefix_match = self._get_user_groups_list(query) | ||||
if user_groups: | ||||
has_specialized_search = True | ||||
for serialized_user_group in user_groups: | ||||
res.append(serialized_user_group) | ||||
elif prefix_match: | ||||
has_specialized_search = True | ||||
res.append(no_match('No matching user groups found')) | ||||
r1668 | ||||
r4329 | # pr: type search | |||
if not prefix_match: | ||||
pull_requests, prefix_match = self._get_pull_request_list(query) | ||||
if pull_requests: | ||||
has_specialized_search = True | ||||
for serialized_pull_request in pull_requests: | ||||
res.append(serialized_pull_request) | ||||
elif prefix_match: | ||||
has_specialized_search = True | ||||
res.append(no_match('No matching pull requests found')) | ||||
r3556 | # FTS commit: type search | |||
if not prefix_match: | ||||
commits, prefix_match = self._get_hash_commit_list( | ||||
c.auth_user, searcher, query, repo, repo_group) | ||||
if commits: | ||||
has_specialized_search = True | ||||
unique_repos = collections.OrderedDict() | ||||
for commit in commits: | ||||
repo_name = commit['repo'] | ||||
unique_repos.setdefault(repo_name, []).append(commit) | ||||
r2774 | ||||
r3556 | for _repo, commits in unique_repos.items(): | |||
for commit in commits: | ||||
res.append(commit) | ||||
elif prefix_match: | ||||
has_specialized_search = True | ||||
res.append(no_match('No matching commits found')) | ||||
r1668 | ||||
r3556 | # FTS file: type search | |||
if not prefix_match: | ||||
paths, prefix_match = self._get_path_list( | ||||
c.auth_user, searcher, query, repo, repo_group) | ||||
if paths: | ||||
has_specialized_search = True | ||||
unique_repos = collections.OrderedDict() | ||||
for path in paths: | ||||
repo_name = path['repo'] | ||||
unique_repos.setdefault(repo_name, []).append(path) | ||||
r2795 | ||||
r3556 | for repo, paths in unique_repos.items(): | |||
for path in paths: | ||||
res.append(path) | ||||
elif prefix_match: | ||||
has_specialized_search = True | ||||
res.append(no_match('No matching files found')) | ||||
r1668 | ||||
r3556 | # main suggestions | |||
if not has_specialized_search: | ||||
repo_group_name = '' | ||||
if repo_group: | ||||
repo_group_name = repo_group.group_name | ||||
r1668 | ||||
r3556 | for _q in self._get_default_search_queries(self.request.GET, searcher, query): | |||
res.append(_q) | ||||
repo_groups = self._get_repo_group_list(query, repo_group_name=repo_group_name) | ||||
for serialized_repo_group in repo_groups: | ||||
res.append(serialized_repo_group) | ||||
r3542 | ||||
r3556 | repos = self._get_repo_list(query, repo_group_name=repo_group_name) | |||
for serialized_repo in repos: | ||||
res.append(serialized_repo) | ||||
if not repos and not repo_groups: | ||||
res.append(no_match('No matches found')) | ||||
r3542 | ||||
r2774 | return {'suggestions': res} | |||
r1774 | ||||
@LoginRequired() | ||||
def main_page(self): | ||||
c = self.load_default_context() | ||||
c.repo_group = None | ||||
r4148 | return self._get_template_context(c) | |||
r1774 | ||||
r4148 | def _main_page_repo_groups_data(self, repo_group_id): | |||
column_map = { | ||||
r4150 | 'name': 'group_name_hash', | |||
r4148 | 'desc': 'group_description', | |||
r4150 | 'last_change': 'updated_on', | |||
r4148 | 'owner': 'user_username', | |||
} | ||||
draw, start, limit = self._extract_chunk(self.request) | ||||
search_q, order_by, order_dir = self._extract_ordering( | ||||
self.request, column_map=column_map) | ||||
return RepoGroupModel().get_repo_groups_data_table( | ||||
draw, start, limit, | ||||
search_q, order_by, order_dir, | ||||
self._rhodecode_user, repo_group_id) | ||||
def _main_page_repos_data(self, repo_group_id): | ||||
column_map = { | ||||
r4150 | 'name': 'repo_name', | |||
r4148 | 'desc': 'description', | |||
r4150 | 'last_change': 'updated_on', | |||
r4148 | 'owner': 'user_username', | |||
} | ||||
draw, start, limit = self._extract_chunk(self.request) | ||||
search_q, order_by, order_dir = self._extract_ordering( | ||||
self.request, column_map=column_map) | ||||
return RepoModel().get_repos_data_table( | ||||
draw, start, limit, | ||||
search_q, order_by, order_dir, | ||||
self._rhodecode_user, repo_group_id) | ||||
r1774 | ||||
r4148 | @LoginRequired() | |||
def main_page_repo_groups_data(self): | ||||
self.load_default_context() | ||||
repo_group_id = safe_int(self.request.GET.get('repo_group_id')) | ||||
if repo_group_id: | ||||
group = RepoGroup.get_or_404(repo_group_id) | ||||
r4151 | _perms = AuthUser.repo_group_read_perms | |||
r4148 | if not HasRepoGroupPermissionAny(*_perms)( | |||
group.group_name, 'user is allowed to list repo group children'): | ||||
raise HTTPNotFound() | ||||
return self._main_page_repo_groups_data(repo_group_id) | ||||
@LoginRequired() | ||||
def main_page_repos_data(self): | ||||
self.load_default_context() | ||||
repo_group_id = safe_int(self.request.GET.get('repo_group_id')) | ||||
if repo_group_id: | ||||
group = RepoGroup.get_or_404(repo_group_id) | ||||
r4151 | _perms = AuthUser.repo_group_read_perms | |||
r4148 | if not HasRepoGroupPermissionAny(*_perms)( | |||
group.group_name, 'user is allowed to list repo group children'): | ||||
raise HTTPNotFound() | ||||
return self._main_page_repos_data(repo_group_id) | ||||
r1774 | ||||
@LoginRequired() | ||||
r4151 | @HasRepoGroupPermissionAnyDecorator(*AuthUser.repo_group_read_perms) | |||
r1774 | def repo_group_main_page(self): | |||
c = self.load_default_context() | ||||
c.repo_group = self.request.db_repo_group | ||||
return self._get_template_context(c) | ||||
r2816 | ||||
@LoginRequired() | ||||
@CSRFRequired() | ||||
def markup_preview(self): | ||||
# Technically a CSRF token is not needed as no state changes with this | ||||
# call. However, as this is a POST is better to have it, so automated | ||||
# tools don't flag it as potential CSRF. | ||||
# Post is required because the payload could be bigger than the maximum | ||||
# allowed by GET. | ||||
text = self.request.POST.get('text') | ||||
renderer = self.request.POST.get('renderer') or 'rst' | ||||
if text: | ||||
return h.render(text, renderer=renderer, mentions=True) | ||||
return '' | ||||
r3088 | ||||
@LoginRequired() | ||||
@CSRFRequired() | ||||
r3754 | def file_preview(self): | |||
# Technically a CSRF token is not needed as no state changes with this | ||||
# call. However, as this is a POST is better to have it, so automated | ||||
# tools don't flag it as potential CSRF. | ||||
# Post is required because the payload could be bigger than the maximum | ||||
# allowed by GET. | ||||
text = self.request.POST.get('text') | ||||
file_path = self.request.POST.get('file_path') | ||||
renderer = h.renderer_from_filename(file_path) | ||||
if renderer: | ||||
return h.render(text, renderer=renderer, mentions=True) | ||||
else: | ||||
self.load_default_context() | ||||
_render = self.request.get_partial_renderer( | ||||
'rhodecode:templates/files/file_content.mako') | ||||
lines = filenode_as_lines_tokens(FileNode(file_path, text)) | ||||
return _render('render_lines', lines) | ||||
@LoginRequired() | ||||
@CSRFRequired() | ||||
r3088 | def store_user_session_attr(self): | |||
key = self.request.POST.get('key') | ||||
val = self.request.POST.get('val') | ||||
existing_value = self.request.session.get(key) | ||||
if existing_value != val: | ||||
self.request.session[key] = val | ||||
r3642 | return 'stored:{}:{}'.format(key, val) | |||