views.py
746 lines
| 26.3 KiB
| text/x-python
|
PythonLexer
r1666 | # -*- coding: utf-8 -*- | |||
r3363 | # Copyright (C) 2016-2019 RhodeCode GmbH | |||
r1666 | # | |||
# This program is free software: you can redistribute it and/or modify | ||||
# it under the terms of the GNU Affero General Public License, version 3 | ||||
# (only), as published by the Free Software Foundation. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU Affero General Public License | ||||
# along with this program. If not, see <http://www.gnu.org/licenses/>. | ||||
# | ||||
# This program is dual-licensed. If you wish to learn more about the | ||||
# RhodeCode Enterprise Edition, including its added features, Support services, | ||||
# and proprietary license terms, please see https://rhodecode.com/licenses/ | ||||
r1668 | import re | |||
r1666 | import logging | |||
r2774 | import collections | |||
r1666 | ||||
from pyramid.view import view_config | ||||
from rhodecode.apps._base import BaseAppView | ||||
r1667 | from rhodecode.lib import helpers as h | |||
r2080 | from rhodecode.lib.auth import ( | |||
r2816 | LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, | |||
CSRFRequired) | ||||
r1668 | from rhodecode.lib.index import searcher_from_config | |||
r2804 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int | |||
r1774 | from rhodecode.lib.ext_json import json | |||
r2038 | from rhodecode.model.db import ( | |||
r3556 | func, true, or_, case, in_filter_generator, Repository, RepoGroup, User, UserGroup) | |||
r1666 | from rhodecode.model.repo import RepoModel | |||
r1774 | from rhodecode.model.repo_group import RepoGroupModel | |||
r2038 | from rhodecode.model.scm import RepoGroupList, RepoList | |||
r1677 | from rhodecode.model.user import UserModel | |||
r1676 | from rhodecode.model.user_group import UserGroupModel | |||
r1666 | ||||
log = logging.getLogger(__name__) | ||||
class HomeView(BaseAppView): | ||||
def load_default_context(self): | ||||
c = self._get_local_tmpl_context() | ||||
c.user = c.auth_user.get_instance() | ||||
r2351 | ||||
r1666 | return c | |||
@LoginRequired() | ||||
@view_config( | ||||
route_name='user_autocomplete_data', request_method='GET', | ||||
renderer='json_ext', xhr=True) | ||||
def user_autocomplete_data(self): | ||||
r2308 | self.load_default_context() | |||
r1666 | query = self.request.GET.get('query') | |||
active = str2bool(self.request.GET.get('active') or True) | ||||
include_groups = str2bool(self.request.GET.get('user_groups')) | ||||
r1678 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) | |||
r1768 | skip_default_user = str2bool(self.request.GET.get('skip_default_user')) | |||
r1666 | ||||
log.debug('generating user list, query:%s, active:%s, with_groups:%s', | ||||
query, active, include_groups) | ||||
r1677 | _users = UserModel().get_users( | |||
r1666 | name_contains=query, only_active=active) | |||
r1768 | def maybe_skip_default_user(usr): | |||
if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: | ||||
return False | ||||
return True | ||||
_users = filter(maybe_skip_default_user, _users) | ||||
r1666 | if include_groups: | |||
# extend with user groups | ||||
r1676 | _user_groups = UserGroupModel().get_user_groups( | |||
r1678 | name_contains=query, only_active=active, | |||
expand_groups=expand_groups) | ||||
r1666 | _users = _users + _user_groups | |||
return {'suggestions': _users} | ||||
@LoginRequired() | ||||
@NotAnonymous() | ||||
@view_config( | ||||
route_name='user_group_autocomplete_data', request_method='GET', | ||||
renderer='json_ext', xhr=True) | ||||
def user_group_autocomplete_data(self): | ||||
r2308 | self.load_default_context() | |||
r1666 | query = self.request.GET.get('query') | |||
active = str2bool(self.request.GET.get('active') or True) | ||||
r1678 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) | |||
r1666 | log.debug('generating user group list, query:%s, active:%s', | |||
query, active) | ||||
r1676 | _user_groups = UserGroupModel().get_user_groups( | |||
r1678 | name_contains=query, only_active=active, | |||
expand_groups=expand_groups) | ||||
r1666 | _user_groups = _user_groups | |||
return {'suggestions': _user_groups} | ||||
r1667 | ||||
r3556 | def _get_repo_list(self, name_contains=None, repo_type=None, repo_group_name='', limit=20): | |||
r2774 | org_query = name_contains | |||
r2038 | allowed_ids = self._rhodecode_user.repo_acl_ids( | |||
['repository.read', 'repository.write', 'repository.admin'], | ||||
r2167 | cache=False, name_filter=name_contains) or [-1] | |||
r2038 | ||||
r1667 | query = Repository.query()\ | |||
r3090 | .filter(Repository.archived.isnot(true()))\ | |||
r2038 | .filter(or_( | |||
# generate multiple IN to fix limitation problems | ||||
*in_filter_generator(Repository.repo_id, allowed_ids) | ||||
)) | ||||
r1667 | ||||
r3556 | query = query.order_by(case( | |||
[ | ||||
(Repository.repo_name.startswith(repo_group_name), repo_group_name+'/'), | ||||
], | ||||
)) | ||||
query = query.order_by(func.length(Repository.repo_name)) | ||||
query = query.order_by(Repository.repo_name) | ||||
r1667 | if repo_type: | |||
query = query.filter(Repository.repo_type == repo_type) | ||||
if name_contains: | ||||
ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | ||||
query = query.filter( | ||||
Repository.repo_name.ilike(ilike_expression)) | ||||
query = query.limit(limit) | ||||
r2774 | acl_iter = query | |||
r2038 | ||||
r1667 | return [ | |||
{ | ||||
r2038 | 'id': obj.repo_name, | |||
r2774 | 'value': org_query, | |||
'value_display': obj.repo_name, | ||||
r2038 | 'text': obj.repo_name, | |||
r1667 | 'type': 'repo', | |||
r2774 | 'repo_id': obj.repo_id, | |||
'repo_type': obj.repo_type, | ||||
'private': obj.private, | ||||
r2038 | 'url': h.route_path('repo_summary', repo_name=obj.repo_name) | |||
r1667 | } | |||
r2774 | for obj in acl_iter] | |||
r1667 | ||||
r3556 | def _get_repo_group_list(self, name_contains=None, repo_group_name='', limit=20): | |||
r2774 | org_query = name_contains | |||
r2038 | allowed_ids = self._rhodecode_user.repo_group_acl_ids( | |||
['group.read', 'group.write', 'group.admin'], | ||||
r2167 | cache=False, name_filter=name_contains) or [-1] | |||
r2038 | ||||
r1668 | query = RepoGroup.query()\ | |||
r2038 | .filter(or_( | |||
# generate multiple IN to fix limitation problems | ||||
*in_filter_generator(RepoGroup.group_id, allowed_ids) | ||||
)) | ||||
r1668 | ||||
r3556 | query = query.order_by(case( | |||
[ | ||||
(RepoGroup.group_name.startswith(repo_group_name), repo_group_name+'/'), | ||||
], | ||||
)) | ||||
query = query.order_by(func.length(RepoGroup.group_name)) | ||||
query = query.order_by(RepoGroup.group_name) | ||||
r1668 | if name_contains: | |||
ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | ||||
query = query.filter( | ||||
RepoGroup.group_name.ilike(ilike_expression)) | ||||
query = query.limit(limit) | ||||
r2774 | acl_iter = query | |||
r2038 | ||||
r1668 | return [ | |||
{ | ||||
'id': obj.group_name, | ||||
r2774 | 'value': org_query, | |||
'value_display': obj.group_name, | ||||
r3424 | 'text': obj.group_name, | |||
r2774 | 'type': 'repo_group', | |||
r3424 | 'repo_group_id': obj.group_id, | |||
r2035 | 'url': h.route_path( | |||
'repo_group_home', repo_group_name=obj.group_name) | ||||
r1668 | } | |||
r2774 | for obj in acl_iter] | |||
def _get_user_list(self, name_contains=None, limit=20): | ||||
org_query = name_contains | ||||
if not name_contains: | ||||
r3556 | return [], False | |||
r2774 | ||||
r3556 | # TODO(marcink): should all logged in users be allowed to search others? | |||
allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER | ||||
if not allowed_user_search: | ||||
return [], False | ||||
name_contains = re.compile('(?:user:[ ]?)(.+)').findall(name_contains) | ||||
r2774 | if len(name_contains) != 1: | |||
r3556 | return [], False | |||
r2774 | name_contains = name_contains[0] | |||
query = User.query()\ | ||||
.order_by(func.length(User.username))\ | ||||
.order_by(User.username) \ | ||||
.filter(User.username != User.DEFAULT_USER) | ||||
r1668 | ||||
r2774 | if name_contains: | |||
ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | ||||
query = query.filter( | ||||
User.username.ilike(ilike_expression)) | ||||
query = query.limit(limit) | ||||
acl_iter = query | ||||
return [ | ||||
{ | ||||
'id': obj.user_id, | ||||
'value': org_query, | ||||
r3556 | 'value_display': 'user: `{}`'.format(obj.username), | |||
r2774 | 'type': 'user', | |||
'icon_link': h.gravatar_url(obj.email, 30), | ||||
'url': h.route_path( | ||||
'user_profile', username=obj.username) | ||||
} | ||||
r3556 | for obj in acl_iter], True | |||
r2774 | ||||
r2795 | def _get_user_groups_list(self, name_contains=None, limit=20): | |||
org_query = name_contains | ||||
if not name_contains: | ||||
r3556 | return [], False | |||
r2795 | ||||
r3556 | # TODO(marcink): should all logged in users be allowed to search others? | |||
allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER | ||||
if not allowed_user_search: | ||||
return [], False | ||||
name_contains = re.compile('(?:user_group:[ ]?)(.+)').findall(name_contains) | ||||
r2795 | if len(name_contains) != 1: | |||
r3556 | return [], False | |||
r2795 | name_contains = name_contains[0] | |||
query = UserGroup.query()\ | ||||
.order_by(func.length(UserGroup.users_group_name))\ | ||||
.order_by(UserGroup.users_group_name) | ||||
if name_contains: | ||||
ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | ||||
query = query.filter( | ||||
UserGroup.users_group_name.ilike(ilike_expression)) | ||||
query = query.limit(limit) | ||||
acl_iter = query | ||||
return [ | ||||
{ | ||||
'id': obj.users_group_id, | ||||
'value': org_query, | ||||
r3556 | 'value_display': 'user_group: `{}`'.format(obj.users_group_name), | |||
r2795 | 'type': 'user_group', | |||
'url': h.route_path( | ||||
'user_group_profile', user_group_name=obj.users_group_name) | ||||
} | ||||
r3556 | for obj in acl_iter], True | |||
r2795 | ||||
r3556 | def _get_hash_commit_list(self, auth_user, searcher, query, repo=None, repo_group=None): | |||
repo_name = repo_group_name = None | ||||
if repo: | ||||
repo_name = repo.repo_name | ||||
if repo_group: | ||||
repo_group_name = repo_group.group_name | ||||
r2774 | org_query = query | |||
r3319 | if not query or len(query) < 3 or not searcher: | |||
r3556 | return [], False | |||
r1668 | ||||
r3556 | commit_hashes = re.compile('(?:commit:[ ]?)([0-9a-f]{2,40})').findall(query) | |||
r1668 | ||||
if len(commit_hashes) != 1: | ||||
r3556 | return [], False | |||
r2774 | commit_hash = commit_hashes[0] | |||
r1668 | ||||
result = searcher.search( | ||||
r3319 | 'commit_id:{}*'.format(commit_hash), 'commit', auth_user, | |||
r3556 | repo_name, repo_group_name, raise_on_exc=False) | |||
r1668 | ||||
r3443 | commits = [] | |||
for entry in result['results']: | ||||
repo_data = { | ||||
'repository_id': entry.get('repository_id'), | ||||
'repository_type': entry.get('repo_type'), | ||||
'repository_name': entry.get('repository'), | ||||
} | ||||
commit_entry = { | ||||
r1668 | 'id': entry['commit_id'], | |||
r2774 | 'value': org_query, | |||
r3443 | 'value_display': '`{}` commit: {}'.format( | |||
r2774 | entry['repository'], entry['commit_id']), | |||
r1668 | 'type': 'commit', | |||
r2774 | 'repo': entry['repository'], | |||
r3443 | 'repo_data': repo_data, | |||
r2035 | 'url': h.route_path( | |||
'repo_commit', | ||||
repo_name=entry['repository'], commit_id=entry['commit_id']) | ||||
r1668 | } | |||
r3443 | ||||
commits.append(commit_entry) | ||||
r3556 | return commits, True | |||
r1668 | ||||
r3556 | def _get_path_list(self, auth_user, searcher, query, repo=None, repo_group=None): | |||
repo_name = repo_group_name = None | ||||
if repo: | ||||
repo_name = repo.repo_name | ||||
if repo_group: | ||||
repo_group_name = repo_group.group_name | ||||
r3542 | org_query = query | |||
if not query or len(query) < 3 or not searcher: | ||||
r3556 | return [], False | |||
r3542 | ||||
r3556 | paths_re = re.compile('(?:file:[ ]?)(.+)').findall(query) | |||
r3542 | if len(paths_re) != 1: | |||
r3556 | return [], False | |||
r3542 | file_path = paths_re[0] | |||
search_path = searcher.escape_specials(file_path) | ||||
result = searcher.search( | ||||
'file.raw:*{}*'.format(search_path), 'path', auth_user, | ||||
r3556 | repo_name, repo_group_name, raise_on_exc=False) | |||
r3542 | ||||
files = [] | ||||
for entry in result['results']: | ||||
repo_data = { | ||||
'repository_id': entry.get('repository_id'), | ||||
'repository_type': entry.get('repo_type'), | ||||
'repository_name': entry.get('repository'), | ||||
} | ||||
file_entry = { | ||||
'id': entry['commit_id'], | ||||
'value': org_query, | ||||
'value_display': '`{}` file: {}'.format( | ||||
entry['repository'], entry['file']), | ||||
'type': 'file', | ||||
'repo': entry['repository'], | ||||
'repo_data': repo_data, | ||||
'url': h.route_path( | ||||
'repo_files', | ||||
repo_name=entry['repository'], commit_id=entry['commit_id'], | ||||
f_path=entry['file']) | ||||
} | ||||
files.append(file_entry) | ||||
r3556 | return files, True | |||
r3542 | ||||
r1667 | @LoginRequired() | |||
@view_config( | ||||
route_name='repo_list_data', request_method='GET', | ||||
renderer='json_ext', xhr=True) | ||||
def repo_list_data(self): | ||||
_ = self.request.translate | ||||
r2308 | self.load_default_context() | |||
r1667 | ||||
query = self.request.GET.get('query') | ||||
repo_type = self.request.GET.get('repo_type') | ||||
log.debug('generating repo list, query:%s, repo_type:%s', | ||||
query, repo_type) | ||||
res = [] | ||||
repos = self._get_repo_list(query, repo_type=repo_type) | ||||
if repos: | ||||
res.append({ | ||||
'text': _('Repositories'), | ||||
'children': repos | ||||
}) | ||||
data = { | ||||
'more': False, | ||||
'results': res | ||||
} | ||||
return data | ||||
r1668 | ||||
r3424 | @LoginRequired() | |||
@view_config( | ||||
route_name='repo_group_list_data', request_method='GET', | ||||
renderer='json_ext', xhr=True) | ||||
def repo_group_list_data(self): | ||||
_ = self.request.translate | ||||
self.load_default_context() | ||||
query = self.request.GET.get('query') | ||||
log.debug('generating repo group list, query:%s', | ||||
query) | ||||
res = [] | ||||
repo_groups = self._get_repo_group_list(query) | ||||
if repo_groups: | ||||
res.append({ | ||||
'text': _('Repository Groups'), | ||||
'children': repo_groups | ||||
}) | ||||
data = { | ||||
'more': False, | ||||
'results': res | ||||
} | ||||
return data | ||||
r3319 | def _get_default_search_queries(self, search_context, searcher, query): | |||
if not searcher: | ||||
return [] | ||||
r3442 | ||||
r3319 | is_es_6 = searcher.is_es_6 | |||
queries = [] | ||||
repo_group_name, repo_name, repo_context = None, None, None | ||||
# repo group context | ||||
if search_context.get('search_context[repo_group_name]'): | ||||
repo_group_name = search_context.get('search_context[repo_group_name]') | ||||
if search_context.get('search_context[repo_name]'): | ||||
repo_name = search_context.get('search_context[repo_name]') | ||||
repo_context = search_context.get('search_context[repo_view_type]') | ||||
if is_es_6 and repo_name: | ||||
r3441 | # files | |||
r3319 | def query_modifier(): | |||
r3441 | qry = query | |||
r3319 | return {'q': qry, 'type': 'content'} | |||
r3441 | label = u'File search for `{}` in this repository.'.format(query) | |||
r3556 | file_qry = { | |||
'id': -10, | ||||
'value': query, | ||||
'value_display': label, | ||||
'type': 'search', | ||||
'url': h.route_path('search_repo', | ||||
repo_name=repo_name, | ||||
_query=query_modifier()) | ||||
r3441 | } | |||
# commits | ||||
def query_modifier(): | ||||
qry = query | ||||
return {'q': qry, 'type': 'commit'} | ||||
label = u'Commit search for `{}` in this repository.'.format(query) | ||||
r3556 | commit_qry = { | |||
'id': -20, | ||||
'value': query, | ||||
'value_display': label, | ||||
'type': 'search', | ||||
'url': h.route_path('search_repo', | ||||
repo_name=repo_name, | ||||
_query=query_modifier()) | ||||
r3319 | } | |||
r3556 | ||||
if repo_context in ['commit', 'changelog']: | ||||
queries.extend([commit_qry, file_qry]) | ||||
elif repo_context in ['files', 'summary']: | ||||
queries.extend([file_qry, commit_qry]) | ||||
else: | ||||
queries.extend([commit_qry, file_qry]) | ||||
r3319 | ||||
elif is_es_6 and repo_group_name: | ||||
r3441 | # files | |||
r3319 | def query_modifier(): | |||
r3441 | qry = query | |||
r3319 | return {'q': qry, 'type': 'content'} | |||
r3441 | ||||
label = u'File search for `{}` in this repository group'.format(query) | ||||
r3556 | file_qry = { | |||
'id': -30, | ||||
'value': query, | ||||
'value_display': label, | ||||
'type': 'search', | ||||
'url': h.route_path('search_repo_group', | ||||
repo_group_name=repo_group_name, | ||||
_query=query_modifier()) | ||||
r3441 | } | |||
# commits | ||||
def query_modifier(): | ||||
qry = query | ||||
return {'q': qry, 'type': 'commit'} | ||||
label = u'Commit search for `{}` in this repository group'.format(query) | ||||
r3556 | commit_qry = { | |||
'id': -40, | ||||
'value': query, | ||||
'value_display': label, | ||||
'type': 'search', | ||||
'url': h.route_path('search_repo_group', | ||||
repo_group_name=repo_group_name, | ||||
_query=query_modifier()) | ||||
r3319 | } | |||
r3556 | if repo_context in ['commit', 'changelog']: | |||
queries.extend([commit_qry, file_qry]) | ||||
elif repo_context in ['files', 'summary']: | ||||
queries.extend([file_qry, commit_qry]) | ||||
else: | ||||
queries.extend([commit_qry, file_qry]) | ||||
# Global, not scoped | ||||
r3319 | if not queries: | |||
queries.append( | ||||
{ | ||||
'id': -1, | ||||
'value': query, | ||||
r3443 | 'value_display': u'File search for: `{}`'.format(query), | |||
r3319 | 'type': 'search', | |||
'url': h.route_path('search', | ||||
_query={'q': query, 'type': 'content'}) | ||||
r3442 | }) | |||
queries.append( | ||||
{ | ||||
r3443 | 'id': -2, | |||
r3442 | 'value': query, | |||
r3443 | 'value_display': u'Commit search for: `{}`'.format(query), | |||
r3442 | 'type': 'search', | |||
'url': h.route_path('search', | ||||
_query={'q': query, 'type': 'commit'}) | ||||
}) | ||||
r3319 | ||||
return queries | ||||
r1668 | @LoginRequired() | |||
@view_config( | ||||
route_name='goto_switcher_data', request_method='GET', | ||||
renderer='json_ext', xhr=True) | ||||
def goto_switcher_data(self): | ||||
c = self.load_default_context() | ||||
_ = self.request.translate | ||||
query = self.request.GET.get('query') | ||||
r2774 | log.debug('generating main filter data, query %s', query) | |||
res = [] | ||||
if not query: | ||||
return {'suggestions': res} | ||||
r1668 | ||||
r3556 | def no_match(name): | |||
return { | ||||
'id': -1, | ||||
'value': "", | ||||
'value_display': name, | ||||
'type': 'text', | ||||
'url': "" | ||||
} | ||||
r3319 | searcher = searcher_from_config(self.request.registry.settings) | |||
r3556 | has_specialized_search = False | |||
r3319 | ||||
r3556 | # set repo context | |||
repo = None | ||||
repo_id = safe_int(self.request.GET.get('search_context[repo_id]')) | ||||
if repo_id: | ||||
repo = Repository.get(repo_id) | ||||
# set group context | ||||
repo_group = None | ||||
r3319 | repo_group_id = safe_int(self.request.GET.get('search_context[repo_group_id]')) | |||
r2804 | if repo_group_id: | |||
repo_group = RepoGroup.get(repo_group_id) | ||||
r3556 | prefix_match = False | |||
# user: type search | ||||
if not prefix_match: | ||||
users, prefix_match = self._get_user_list(query) | ||||
if users: | ||||
has_specialized_search = True | ||||
for serialized_user in users: | ||||
res.append(serialized_user) | ||||
elif prefix_match: | ||||
has_specialized_search = True | ||||
res.append(no_match('No matching users found')) | ||||
r2774 | ||||
r3556 | # user_group: type search | |||
if not prefix_match: | ||||
user_groups, prefix_match = self._get_user_groups_list(query) | ||||
if user_groups: | ||||
has_specialized_search = True | ||||
for serialized_user_group in user_groups: | ||||
res.append(serialized_user_group) | ||||
elif prefix_match: | ||||
has_specialized_search = True | ||||
res.append(no_match('No matching user groups found')) | ||||
r1668 | ||||
r3556 | # FTS commit: type search | |||
if not prefix_match: | ||||
commits, prefix_match = self._get_hash_commit_list( | ||||
c.auth_user, searcher, query, repo, repo_group) | ||||
if commits: | ||||
has_specialized_search = True | ||||
unique_repos = collections.OrderedDict() | ||||
for commit in commits: | ||||
repo_name = commit['repo'] | ||||
unique_repos.setdefault(repo_name, []).append(commit) | ||||
r2774 | ||||
r3556 | for _repo, commits in unique_repos.items(): | |||
for commit in commits: | ||||
res.append(commit) | ||||
elif prefix_match: | ||||
has_specialized_search = True | ||||
res.append(no_match('No matching commits found')) | ||||
r1668 | ||||
r3556 | # FTS file: type search | |||
if not prefix_match: | ||||
paths, prefix_match = self._get_path_list( | ||||
c.auth_user, searcher, query, repo, repo_group) | ||||
if paths: | ||||
has_specialized_search = True | ||||
unique_repos = collections.OrderedDict() | ||||
for path in paths: | ||||
repo_name = path['repo'] | ||||
unique_repos.setdefault(repo_name, []).append(path) | ||||
r2795 | ||||
r3556 | for repo, paths in unique_repos.items(): | |||
for path in paths: | ||||
res.append(path) | ||||
elif prefix_match: | ||||
has_specialized_search = True | ||||
res.append(no_match('No matching files found')) | ||||
r1668 | ||||
r3556 | # main suggestions | |||
if not has_specialized_search: | ||||
repo_group_name = '' | ||||
if repo_group: | ||||
repo_group_name = repo_group.group_name | ||||
r1668 | ||||
r3556 | for _q in self._get_default_search_queries(self.request.GET, searcher, query): | |||
res.append(_q) | ||||
repo_groups = self._get_repo_group_list(query, repo_group_name=repo_group_name) | ||||
for serialized_repo_group in repo_groups: | ||||
res.append(serialized_repo_group) | ||||
r3542 | ||||
r3556 | repos = self._get_repo_list(query, repo_group_name=repo_group_name) | |||
for serialized_repo in repos: | ||||
res.append(serialized_repo) | ||||
if not repos and not repo_groups: | ||||
res.append(no_match('No matches found')) | ||||
r3542 | ||||
r2774 | return {'suggestions': res} | |||
r1774 | ||||
def _get_groups_and_repos(self, repo_group_id=None): | ||||
# repo groups groups | ||||
repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) | ||||
_perms = ['group.read', 'group.write', 'group.admin'] | ||||
repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) | ||||
repo_group_data = RepoGroupModel().get_repo_groups_as_dict( | ||||
repo_group_list=repo_group_list_acl, admin=False) | ||||
# repositories | ||||
repo_list = Repository.get_all_repos(group_id=repo_group_id) | ||||
_perms = ['repository.read', 'repository.write', 'repository.admin'] | ||||
repo_list_acl = RepoList(repo_list, perm_set=_perms) | ||||
repo_data = RepoModel().get_repos_as_dict( | ||||
repo_list=repo_list_acl, admin=False) | ||||
return repo_data, repo_group_data | ||||
@LoginRequired() | ||||
@view_config( | ||||
route_name='home', request_method='GET', | ||||
renderer='rhodecode:templates/index.mako') | ||||
def main_page(self): | ||||
c = self.load_default_context() | ||||
c.repo_group = None | ||||
repo_data, repo_group_data = self._get_groups_and_repos() | ||||
# json used to render the grids | ||||
c.repos_data = json.dumps(repo_data) | ||||
c.repo_groups_data = json.dumps(repo_group_data) | ||||
return self._get_template_context(c) | ||||
@LoginRequired() | ||||
@HasRepoGroupPermissionAnyDecorator( | ||||
'group.read', 'group.write', 'group.admin') | ||||
@view_config( | ||||
route_name='repo_group_home', request_method='GET', | ||||
renderer='rhodecode:templates/index_repo_group.mako') | ||||
@view_config( | ||||
route_name='repo_group_home_slash', request_method='GET', | ||||
renderer='rhodecode:templates/index_repo_group.mako') | ||||
def repo_group_main_page(self): | ||||
c = self.load_default_context() | ||||
c.repo_group = self.request.db_repo_group | ||||
repo_data, repo_group_data = self._get_groups_and_repos( | ||||
c.repo_group.group_id) | ||||
# json used to render the grids | ||||
c.repos_data = json.dumps(repo_data) | ||||
c.repo_groups_data = json.dumps(repo_group_data) | ||||
return self._get_template_context(c) | ||||
r2816 | ||||
@LoginRequired() | ||||
@CSRFRequired() | ||||
@view_config( | ||||
route_name='markup_preview', request_method='POST', | ||||
renderer='string', xhr=True) | ||||
def markup_preview(self): | ||||
# Technically a CSRF token is not needed as no state changes with this | ||||
# call. However, as this is a POST is better to have it, so automated | ||||
# tools don't flag it as potential CSRF. | ||||
# Post is required because the payload could be bigger than the maximum | ||||
# allowed by GET. | ||||
text = self.request.POST.get('text') | ||||
renderer = self.request.POST.get('renderer') or 'rst' | ||||
if text: | ||||
return h.render(text, renderer=renderer, mentions=True) | ||||
return '' | ||||
r3088 | ||||
@LoginRequired() | ||||
@CSRFRequired() | ||||
@view_config( | ||||
route_name='store_user_session_value', request_method='POST', | ||||
renderer='string', xhr=True) | ||||
def store_user_session_attr(self): | ||||
key = self.request.POST.get('key') | ||||
val = self.request.POST.get('val') | ||||
existing_value = self.request.session.get(key) | ||||
if existing_value != val: | ||||
self.request.session[key] = val | ||||
return 'stored:{}'.format(key) | ||||