|
|
# Copyright (C) 2011-2023 RhodeCode GmbH
|
|
|
#
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
# it under the terms of the GNU Affero General Public License, version 3
|
|
|
# (only), as published by the Free Software Foundation.
|
|
|
#
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
# GNU General Public License for more details.
|
|
|
#
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
#
|
|
|
# This program is dual-licensed. If you wish to learn more about the
|
|
|
# RhodeCode Enterprise Edition, including its added features, Support services,
|
|
|
# and proprietary license terms, please see https://rhodecode.com/licenses/
|
|
|
|
|
|
import itertools
|
|
|
import logging
|
|
|
import os
|
|
|
import collections
|
|
|
import urllib.request
|
|
|
import urllib.parse
|
|
|
import urllib.error
|
|
|
import pathlib
|
|
|
|
|
|
from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
|
|
|
|
|
|
from pyramid.renderers import render
|
|
|
from pyramid.response import Response
|
|
|
|
|
|
import rhodecode
|
|
|
from rhodecode.apps._base import RepoAppView
|
|
|
|
|
|
|
|
|
from rhodecode.lib import diffs, helpers as h, rc_cache
|
|
|
from rhodecode.lib import audit_logger
|
|
|
from rhodecode.lib.hash_utils import sha1_safe
|
|
|
from rhodecode.lib.rc_cache.archive_cache import get_archival_cache_store, get_archival_config, ReentrantLock
|
|
|
from rhodecode.lib.str_utils import safe_bytes
|
|
|
from rhodecode.lib.view_utils import parse_path_ref
|
|
|
from rhodecode.lib.exceptions import NonRelativePathError
|
|
|
from rhodecode.lib.codeblocks import (
|
|
|
filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
|
|
|
from rhodecode.lib.utils2 import convert_line_endings, detect_mode
|
|
|
from rhodecode.lib.type_utils import str2bool
|
|
|
from rhodecode.lib.str_utils import safe_str, safe_int
|
|
|
from rhodecode.lib.auth import (
|
|
|
LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
|
|
|
from rhodecode.lib.vcs import path as vcspath
|
|
|
from rhodecode.lib.vcs.backends.base import EmptyCommit
|
|
|
from rhodecode.lib.vcs.conf import settings
|
|
|
from rhodecode.lib.vcs.nodes import FileNode
|
|
|
from rhodecode.lib.vcs.exceptions import (
|
|
|
RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
|
|
|
ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
|
|
|
NodeDoesNotExistError, CommitError, NodeError)
|
|
|
|
|
|
from rhodecode.model.scm import ScmModel
|
|
|
from rhodecode.model.db import Repository
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
def get_archive_name(db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True):
|
|
|
# original backward compat name of archive
|
|
|
clean_name = safe_str(db_repo_name.replace('/', '_'))
|
|
|
|
|
|
# e.g vcsserver-sub-1-abcfdef-archive-all.zip
|
|
|
# vcsserver-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip
|
|
|
|
|
|
sub_repo = 'sub-1' if subrepos else 'sub-0'
|
|
|
commit = commit_sha if with_hash else 'archive'
|
|
|
path_marker = (path_sha if with_hash else '') or 'all'
|
|
|
archive_name = f'{clean_name}-{sub_repo}-{commit}-{path_marker}{ext}'
|
|
|
|
|
|
return archive_name
|
|
|
|
|
|
|
|
|
def get_path_sha(at_path):
|
|
|
return safe_str(sha1_safe(at_path)[:8])
|
|
|
|
|
|
|
|
|
def _get_archive_spec(fname):
|
|
|
log.debug('Detecting archive spec for: `%s`', fname)
|
|
|
|
|
|
fileformat = None
|
|
|
ext = None
|
|
|
content_type = None
|
|
|
for a_type, content_type, extension in settings.ARCHIVE_SPECS:
|
|
|
|
|
|
if fname.endswith(extension):
|
|
|
fileformat = a_type
|
|
|
log.debug('archive is of type: %s', fileformat)
|
|
|
ext = extension
|
|
|
break
|
|
|
|
|
|
if not fileformat:
|
|
|
raise ValueError()
|
|
|
|
|
|
# left over part of whole fname is the commit
|
|
|
commit_id = fname[:-len(ext)]
|
|
|
|
|
|
return commit_id, ext, fileformat, content_type
|
|
|
|
|
|
|
|
|
class RepoFilesView(RepoAppView):
|
|
|
|
|
|
@staticmethod
|
|
|
def adjust_file_path_for_svn(f_path, repo):
|
|
|
"""
|
|
|
Computes the relative path of `f_path`.
|
|
|
|
|
|
This is mainly based on prefix matching of the recognized tags and
|
|
|
branches in the underlying repository.
|
|
|
"""
|
|
|
tags_and_branches = itertools.chain(
|
|
|
repo.branches.keys(),
|
|
|
repo.tags.keys())
|
|
|
tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
|
|
|
|
|
|
for name in tags_and_branches:
|
|
|
if f_path.startswith(f'{name}/'):
|
|
|
f_path = vcspath.relpath(f_path, name)
|
|
|
break
|
|
|
return f_path
|
|
|
|
|
|
def load_default_context(self):
|
|
|
c = self._get_local_tmpl_context(include_app_defaults=True)
|
|
|
c.rhodecode_repo = self.rhodecode_vcs_repo
|
|
|
c.enable_downloads = self.db_repo.enable_downloads
|
|
|
return c
|
|
|
|
|
|
def _ensure_not_locked(self, commit_id='tip'):
|
|
|
_ = self.request.translate
|
|
|
|
|
|
repo = self.db_repo
|
|
|
if repo.enable_locking and repo.locked[0]:
|
|
|
h.flash(_('This repository has been locked by %s on %s')
|
|
|
% (h.person_by_id(repo.locked[0]),
|
|
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
|
'warning')
|
|
|
files_url = h.route_path(
|
|
|
'repo_files:default_path',
|
|
|
repo_name=self.db_repo_name, commit_id=commit_id)
|
|
|
raise HTTPFound(files_url)
|
|
|
|
|
|
def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
|
|
|
_ = self.request.translate
|
|
|
|
|
|
if not is_head:
|
|
|
message = _('Cannot modify file. '
|
|
|
'Given commit `{}` is not head of a branch.').format(commit_id)
|
|
|
h.flash(message, category='warning')
|
|
|
|
|
|
if json_mode:
|
|
|
return message
|
|
|
|
|
|
files_url = h.route_path(
|
|
|
'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
|
|
|
f_path=f_path)
|
|
|
raise HTTPFound(files_url)
|
|
|
|
|
|
def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
|
|
|
_ = self.request.translate
|
|
|
|
|
|
rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
|
|
|
self.db_repo_name, branch_name)
|
|
|
if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
|
|
|
message = _('Branch `{}` changes forbidden by rule {}.').format(
|
|
|
h.escape(branch_name), h.escape(rule))
|
|
|
h.flash(message, 'warning')
|
|
|
|
|
|
if json_mode:
|
|
|
return message
|
|
|
|
|
|
files_url = h.route_path(
|
|
|
'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
|
|
|
|
|
|
raise HTTPFound(files_url)
|
|
|
|
|
|
def _get_commit_and_path(self):
|
|
|
default_commit_id = self.db_repo.landing_ref_name
|
|
|
default_f_path = '/'
|
|
|
|
|
|
commit_id = self.request.matchdict.get(
|
|
|
'commit_id', default_commit_id)
|
|
|
f_path = self._get_f_path(self.request.matchdict, default_f_path)
|
|
|
return commit_id, f_path
|
|
|
|
|
|
def _get_default_encoding(self, c):
|
|
|
enc_list = getattr(c, 'default_encodings', [])
|
|
|
return enc_list[0] if enc_list else 'UTF-8'
|
|
|
|
|
|
def _get_commit_or_redirect(self, commit_id, redirect_after=True):
|
|
|
"""
|
|
|
This is a safe way to get commit. If an error occurs it redirects to
|
|
|
tip with proper message
|
|
|
|
|
|
:param commit_id: id of commit to fetch
|
|
|
:param redirect_after: toggle redirection
|
|
|
"""
|
|
|
_ = self.request.translate
|
|
|
|
|
|
try:
|
|
|
return self.rhodecode_vcs_repo.get_commit(commit_id)
|
|
|
except EmptyRepositoryError:
|
|
|
if not redirect_after:
|
|
|
return None
|
|
|
|
|
|
add_new = upload_new = ""
|
|
|
if h.HasRepoPermissionAny(
|
|
|
'repository.write', 'repository.admin')(self.db_repo_name):
|
|
|
_url = h.route_path(
|
|
|
'repo_files_add_file',
|
|
|
repo_name=self.db_repo_name, commit_id=0, f_path='')
|
|
|
add_new = h.link_to(
|
|
|
_('add a new file'), _url, class_="alert-link")
|
|
|
|
|
|
_url_upld = h.route_path(
|
|
|
'repo_files_upload_file',
|
|
|
repo_name=self.db_repo_name, commit_id=0, f_path='')
|
|
|
upload_new = h.link_to(
|
|
|
_('upload a new file'), _url_upld, class_="alert-link")
|
|
|
|
|
|
h.flash(h.literal(
|
|
|
_('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning')
|
|
|
raise HTTPFound(
|
|
|
h.route_path('repo_summary', repo_name=self.db_repo_name))
|
|
|
|
|
|
except (CommitDoesNotExistError, LookupError) as e:
|
|
|
msg = _('No such commit exists for this repository. Commit: {}').format(commit_id)
|
|
|
h.flash(msg, category='error')
|
|
|
raise HTTPNotFound()
|
|
|
except RepositoryError as e:
|
|
|
h.flash(h.escape(safe_str(e)), category='error')
|
|
|
raise HTTPNotFound()
|
|
|
|
|
|
def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None):
|
|
|
"""
|
|
|
Returns file_node, if error occurs or given path is directory,
|
|
|
it'll redirect to top level path
|
|
|
"""
|
|
|
_ = self.request.translate
|
|
|
|
|
|
try:
|
|
|
file_node = commit_obj.get_node(path, pre_load=pre_load)
|
|
|
if file_node.is_dir():
|
|
|
raise RepositoryError('The given path is a directory')
|
|
|
except CommitDoesNotExistError:
|
|
|
log.exception('No such commit exists for this repository')
|
|
|
h.flash(_('No such commit exists for this repository'), category='error')
|
|
|
raise HTTPNotFound()
|
|
|
except RepositoryError as e:
|
|
|
log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
|
|
|
h.flash(h.escape(safe_str(e)), category='error')
|
|
|
raise HTTPNotFound()
|
|
|
|
|
|
return file_node
|
|
|
|
|
|
def _is_valid_head(self, commit_id, repo, landing_ref):
|
|
|
branch_name = sha_commit_id = ''
|
|
|
is_head = False
|
|
|
log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
|
|
|
|
|
|
for _branch_name, branch_commit_id in repo.branches.items():
|
|
|
# simple case we pass in branch name, it's a HEAD
|
|
|
if commit_id == _branch_name:
|
|
|
is_head = True
|
|
|
branch_name = _branch_name
|
|
|
sha_commit_id = branch_commit_id
|
|
|
break
|
|
|
# case when we pass in full sha commit_id, which is a head
|
|
|
elif commit_id == branch_commit_id:
|
|
|
is_head = True
|
|
|
branch_name = _branch_name
|
|
|
sha_commit_id = branch_commit_id
|
|
|
break
|
|
|
|
|
|
if h.is_svn(repo) and not repo.is_empty():
|
|
|
# Note: Subversion only has one head.
|
|
|
if commit_id == repo.get_commit(commit_idx=-1).raw_id:
|
|
|
is_head = True
|
|
|
return branch_name, sha_commit_id, is_head
|
|
|
|
|
|
# checked branches, means we only need to try to get the branch/commit_sha
|
|
|
if repo.is_empty():
|
|
|
is_head = True
|
|
|
branch_name = landing_ref
|
|
|
sha_commit_id = EmptyCommit().raw_id
|
|
|
else:
|
|
|
commit = repo.get_commit(commit_id=commit_id)
|
|
|
if commit:
|
|
|
branch_name = commit.branch
|
|
|
sha_commit_id = commit.raw_id
|
|
|
|
|
|
return branch_name, sha_commit_id, is_head
|
|
|
|
|
|
def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
|
|
|
|
|
|
repo_id = self.db_repo.repo_id
|
|
|
force_recache = self.get_recache_flag()
|
|
|
|
|
|
cache_seconds = safe_int(
|
|
|
rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
|
|
|
cache_on = not force_recache and cache_seconds > 0
|
|
|
log.debug(
|
|
|
'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
|
|
|
'with caching: %s[TTL: %ss]' % (
|
|
|
repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
|
|
|
|
|
|
cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
|
|
|
region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
|
|
|
|
|
|
@region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
|
|
|
def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
|
|
|
log.debug('Generating cached file tree at ver:%s for repo_id: %s, %s, %s',
|
|
|
_repo_id, _commit_id, _f_path)
|
|
|
|
|
|
c.full_load = _full_load
|
|
|
return render(
|
|
|
'rhodecode:templates/files/files_browser_tree.mako',
|
|
|
self._get_template_context(c), self.request, _at_rev)
|
|
|
|
|
|
return compute_file_tree(
|
|
|
self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
|
|
|
|
|
|
def create_pure_path(self, *parts):
|
|
|
# Split paths and sanitize them, removing any ../ etc
|
|
|
sanitized_path = [
|
|
|
x for x in pathlib.PurePath(*parts).parts
|
|
|
if x not in ['.', '..']]
|
|
|
|
|
|
pure_path = pathlib.PurePath(*sanitized_path)
|
|
|
return pure_path
|
|
|
|
|
|
def _is_lf_enabled(self, target_repo):
|
|
|
lf_enabled = False
|
|
|
|
|
|
lf_key_for_vcs_map = {
|
|
|
'hg': 'extensions_largefiles',
|
|
|
'git': 'vcs_git_lfs_enabled'
|
|
|
}
|
|
|
|
|
|
lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
|
|
|
|
|
|
if lf_key_for_vcs:
|
|
|
lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
|
|
|
|
|
|
return lf_enabled
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator(
|
|
|
'repository.read', 'repository.write', 'repository.admin')
|
|
|
def repo_archivefile(self):
|
|
|
# archive cache config
|
|
|
from rhodecode import CONFIG
|
|
|
_ = self.request.translate
|
|
|
self.load_default_context()
|
|
|
default_at_path = '/'
|
|
|
fname = self.request.matchdict['fname']
|
|
|
subrepos = self.request.GET.get('subrepos') == 'true'
|
|
|
with_hash = str2bool(self.request.GET.get('with_hash', '1'))
|
|
|
at_path = self.request.GET.get('at_path') or default_at_path
|
|
|
|
|
|
if not self.db_repo.enable_downloads:
|
|
|
return Response(_('Downloads disabled'))
|
|
|
|
|
|
try:
|
|
|
commit_id, ext, fileformat, content_type = \
|
|
|
_get_archive_spec(fname)
|
|
|
except ValueError:
|
|
|
return Response(_('Unknown archive type for: `{}`').format(
|
|
|
h.escape(fname)))
|
|
|
|
|
|
try:
|
|
|
commit = self.rhodecode_vcs_repo.get_commit(commit_id)
|
|
|
except CommitDoesNotExistError:
|
|
|
return Response(_('Unknown commit_id {}').format(
|
|
|
h.escape(commit_id)))
|
|
|
except EmptyRepositoryError:
|
|
|
return Response(_('Empty repository'))
|
|
|
|
|
|
# we used a ref, or a shorter version, lets redirect client ot use explicit hash
|
|
|
if commit_id != commit.raw_id:
|
|
|
fname=f'{commit.raw_id}{ext}'
|
|
|
raise HTTPFound(self.request.current_route_path(fname=fname))
|
|
|
|
|
|
try:
|
|
|
at_path = commit.get_node(at_path).path or default_at_path
|
|
|
except Exception:
|
|
|
return Response(_('No node at path {} for this repository').format(h.escape(at_path)))
|
|
|
|
|
|
path_sha = get_path_sha(at_path)
|
|
|
|
|
|
# used for cache etc, consistent unique archive name
|
|
|
archive_name_key = get_archive_name(
|
|
|
self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
|
|
|
path_sha=path_sha, with_hash=True)
|
|
|
|
|
|
if not with_hash:
|
|
|
path_sha = ''
|
|
|
|
|
|
# what end client gets served
|
|
|
response_archive_name = get_archive_name(
|
|
|
self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
|
|
|
path_sha=path_sha, with_hash=with_hash)
|
|
|
|
|
|
# remove extension from our archive directory name
|
|
|
archive_dir_name = response_archive_name[:-len(ext)]
|
|
|
|
|
|
archive_cache_disable = self.request.GET.get('no_cache')
|
|
|
|
|
|
d_cache = get_archival_cache_store(config=CONFIG)
|
|
|
# NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver
|
|
|
d_cache_conf = get_archival_config(config=CONFIG)
|
|
|
|
|
|
reentrant_lock_key = archive_name_key + '.lock'
|
|
|
with ReentrantLock(d_cache, reentrant_lock_key):
|
|
|
# This is also a cache key
|
|
|
use_cached_archive = False
|
|
|
if archive_name_key in d_cache and not archive_cache_disable:
|
|
|
reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True)
|
|
|
use_cached_archive = True
|
|
|
log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s',
|
|
|
archive_name_key, tag, reader.name)
|
|
|
else:
|
|
|
reader = None
|
|
|
log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key)
|
|
|
|
|
|
# generate new archive, as previous was not found in the cache
|
|
|
if not reader:
|
|
|
# first remove expired items, before generating a new one :)
|
|
|
# we di this manually because automatic eviction is disabled
|
|
|
d_cache.cull(retry=True)
|
|
|
|
|
|
try:
|
|
|
commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name,
|
|
|
kind=fileformat, subrepos=subrepos,
|
|
|
archive_at_path=at_path, cache_config=d_cache_conf)
|
|
|
except ImproperArchiveTypeError:
|
|
|
return _('Unknown archive type')
|
|
|
|
|
|
reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True)
|
|
|
|
|
|
if not reader:
|
|
|
raise ValueError('archive cache reader is empty, failed to fetch file from distributed archive cache')
|
|
|
|
|
|
def archive_iterator(_reader):
|
|
|
while 1:
|
|
|
data = _reader.read(1024)
|
|
|
if not data:
|
|
|
break
|
|
|
yield data
|
|
|
|
|
|
response = Response(app_iter=archive_iterator(reader))
|
|
|
response.content_disposition = f'attachment; filename={response_archive_name}'
|
|
|
response.content_type = str(content_type)
|
|
|
|
|
|
try:
|
|
|
return response
|
|
|
finally:
|
|
|
# store download action
|
|
|
audit_logger.store_web(
|
|
|
'repo.archive.download', action_data={
|
|
|
'user_agent': self.request.user_agent,
|
|
|
'archive_name': archive_name_key,
|
|
|
'archive_spec': fname,
|
|
|
'archive_cached': use_cached_archive},
|
|
|
user=self._rhodecode_user,
|
|
|
repo=self.db_repo,
|
|
|
commit=True
|
|
|
)
|
|
|
|
|
|
def _get_file_node(self, commit_id, f_path):
|
|
|
if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
|
|
|
commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
|
|
|
try:
|
|
|
node = commit.get_node(f_path)
|
|
|
if node.is_dir():
|
|
|
raise NodeError(f'{node} path is a {type(node)} not a file')
|
|
|
except NodeDoesNotExistError:
|
|
|
commit = EmptyCommit(
|
|
|
commit_id=commit_id,
|
|
|
idx=commit.idx,
|
|
|
repo=commit.repository,
|
|
|
alias=commit.repository.alias,
|
|
|
message=commit.message,
|
|
|
author=commit.author,
|
|
|
date=commit.date)
|
|
|
node = FileNode(safe_bytes(f_path), b'', commit=commit)
|
|
|
else:
|
|
|
commit = EmptyCommit(
|
|
|
repo=self.rhodecode_vcs_repo,
|
|
|
alias=self.rhodecode_vcs_repo.alias)
|
|
|
node = FileNode(safe_bytes(f_path), b'', commit=commit)
|
|
|
return node
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator(
|
|
|
'repository.read', 'repository.write', 'repository.admin')
|
|
|
def repo_files_diff(self):
|
|
|
c = self.load_default_context()
|
|
|
f_path = self._get_f_path(self.request.matchdict)
|
|
|
diff1 = self.request.GET.get('diff1', '')
|
|
|
diff2 = self.request.GET.get('diff2', '')
|
|
|
|
|
|
path1, diff1 = parse_path_ref(diff1, default_path=f_path)
|
|
|
|
|
|
ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
|
|
|
line_context = self.request.GET.get('context', 3)
|
|
|
|
|
|
if not any((diff1, diff2)):
|
|
|
h.flash(
|
|
|
'Need query parameter "diff1" or "diff2" to generate a diff.',
|
|
|
category='error')
|
|
|
raise HTTPBadRequest()
|
|
|
|
|
|
c.action = self.request.GET.get('diff')
|
|
|
if c.action not in ['download', 'raw']:
|
|
|
compare_url = h.route_path(
|
|
|
'repo_compare',
|
|
|
repo_name=self.db_repo_name,
|
|
|
source_ref_type='rev',
|
|
|
source_ref=diff1,
|
|
|
target_repo=self.db_repo_name,
|
|
|
target_ref_type='rev',
|
|
|
target_ref=diff2,
|
|
|
_query=dict(f_path=f_path))
|
|
|
# redirect to new view if we render diff
|
|
|
raise HTTPFound(compare_url)
|
|
|
|
|
|
try:
|
|
|
node1 = self._get_file_node(diff1, path1)
|
|
|
node2 = self._get_file_node(diff2, f_path)
|
|
|
except (RepositoryError, NodeError):
|
|
|
log.exception("Exception while trying to get node from repository")
|
|
|
raise HTTPFound(
|
|
|
h.route_path('repo_files', repo_name=self.db_repo_name,
|
|
|
commit_id='tip', f_path=f_path))
|
|
|
|
|
|
if all(isinstance(node.commit, EmptyCommit)
|
|
|
for node in (node1, node2)):
|
|
|
raise HTTPNotFound()
|
|
|
|
|
|
c.commit_1 = node1.commit
|
|
|
c.commit_2 = node2.commit
|
|
|
|
|
|
if c.action == 'download':
|
|
|
_diff = diffs.get_gitdiff(node1, node2,
|
|
|
ignore_whitespace=ignore_whitespace,
|
|
|
context=line_context)
|
|
|
# NOTE: this was using diff_format='gitdiff'
|
|
|
diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
|
|
|
|
|
|
response = Response(self.path_filter.get_raw_patch(diff))
|
|
|
response.content_type = 'text/plain'
|
|
|
response.content_disposition = (
|
|
|
f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff'
|
|
|
)
|
|
|
charset = self._get_default_encoding(c)
|
|
|
if charset:
|
|
|
response.charset = charset
|
|
|
return response
|
|
|
|
|
|
elif c.action == 'raw':
|
|
|
_diff = diffs.get_gitdiff(node1, node2,
|
|
|
ignore_whitespace=ignore_whitespace,
|
|
|
context=line_context)
|
|
|
# NOTE: this was using diff_format='gitdiff'
|
|
|
diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
|
|
|
|
|
|
response = Response(self.path_filter.get_raw_patch(diff))
|
|
|
response.content_type = 'text/plain'
|
|
|
charset = self._get_default_encoding(c)
|
|
|
if charset:
|
|
|
response.charset = charset
|
|
|
return response
|
|
|
|
|
|
# in case we ever end up here
|
|
|
raise HTTPNotFound()
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator(
|
|
|
'repository.read', 'repository.write', 'repository.admin')
|
|
|
def repo_files_diff_2way_redirect(self):
|
|
|
"""
|
|
|
Kept only to make OLD links work
|
|
|
"""
|
|
|
f_path = self._get_f_path_unchecked(self.request.matchdict)
|
|
|
diff1 = self.request.GET.get('diff1', '')
|
|
|
diff2 = self.request.GET.get('diff2', '')
|
|
|
|
|
|
if not any((diff1, diff2)):
|
|
|
h.flash(
|
|
|
'Need query parameter "diff1" or "diff2" to generate a diff.',
|
|
|
category='error')
|
|
|
raise HTTPBadRequest()
|
|
|
|
|
|
compare_url = h.route_path(
|
|
|
'repo_compare',
|
|
|
repo_name=self.db_repo_name,
|
|
|
source_ref_type='rev',
|
|
|
source_ref=diff1,
|
|
|
target_ref_type='rev',
|
|
|
target_ref=diff2,
|
|
|
_query=dict(f_path=f_path, diffmode='sideside',
|
|
|
target_repo=self.db_repo_name,))
|
|
|
raise HTTPFound(compare_url)
|
|
|
|
|
|
@LoginRequired()
|
|
|
def repo_files_default_commit_redirect(self):
|
|
|
"""
|
|
|
Special page that redirects to the landing page of files based on the default
|
|
|
commit for repository
|
|
|
"""
|
|
|
c = self.load_default_context()
|
|
|
ref_name = c.rhodecode_db_repo.landing_ref_name
|
|
|
landing_url = h.repo_files_by_ref_url(
|
|
|
c.rhodecode_db_repo.repo_name,
|
|
|
c.rhodecode_db_repo.repo_type,
|
|
|
f_path='',
|
|
|
ref_name=ref_name,
|
|
|
commit_id='tip',
|
|
|
query=dict(at=ref_name)
|
|
|
)
|
|
|
|
|
|
raise HTTPFound(landing_url)
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator(
|
|
|
'repository.read', 'repository.write', 'repository.admin')
|
|
|
def repo_files(self):
|
|
|
c = self.load_default_context()
|
|
|
|
|
|
view_name = getattr(self.request.matched_route, 'name', None)
|
|
|
|
|
|
c.annotate = view_name == 'repo_files:annotated'
|
|
|
# default is false, but .rst/.md files later are auto rendered, we can
|
|
|
# overwrite auto rendering by setting this GET flag
|
|
|
c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False)
|
|
|
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
|
|
|
c.commit = self._get_commit_or_redirect(commit_id)
|
|
|
c.branch = self.request.GET.get('branch', None)
|
|
|
c.f_path = f_path
|
|
|
at_rev = self.request.GET.get('at')
|
|
|
|
|
|
# prev link
|
|
|
try:
|
|
|
prev_commit = c.commit.prev(c.branch)
|
|
|
c.prev_commit = prev_commit
|
|
|
c.url_prev = h.route_path(
|
|
|
'repo_files', repo_name=self.db_repo_name,
|
|
|
commit_id=prev_commit.raw_id, f_path=f_path)
|
|
|
if c.branch:
|
|
|
c.url_prev += '?branch=%s' % c.branch
|
|
|
except (CommitDoesNotExistError, VCSError):
|
|
|
c.url_prev = '#'
|
|
|
c.prev_commit = EmptyCommit()
|
|
|
|
|
|
# next link
|
|
|
try:
|
|
|
next_commit = c.commit.next(c.branch)
|
|
|
c.next_commit = next_commit
|
|
|
c.url_next = h.route_path(
|
|
|
'repo_files', repo_name=self.db_repo_name,
|
|
|
commit_id=next_commit.raw_id, f_path=f_path)
|
|
|
if c.branch:
|
|
|
c.url_next += '?branch=%s' % c.branch
|
|
|
except (CommitDoesNotExistError, VCSError):
|
|
|
c.url_next = '#'
|
|
|
c.next_commit = EmptyCommit()
|
|
|
|
|
|
# files or dirs
|
|
|
try:
|
|
|
c.file = c.commit.get_node(f_path, pre_load=['is_binary', 'size', 'data'])
|
|
|
|
|
|
c.file_author = True
|
|
|
c.file_tree = ''
|
|
|
|
|
|
# load file content
|
|
|
if c.file.is_file():
|
|
|
c.lf_node = {}
|
|
|
|
|
|
has_lf_enabled = self._is_lf_enabled(self.db_repo)
|
|
|
if has_lf_enabled:
|
|
|
c.lf_node = c.file.get_largefile_node()
|
|
|
|
|
|
c.file_source_page = 'true'
|
|
|
c.file_last_commit = c.file.last_commit
|
|
|
|
|
|
c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
|
|
|
|
|
|
if not (c.file_size_too_big or c.file.is_binary):
|
|
|
if c.annotate: # annotation has precedence over renderer
|
|
|
c.annotated_lines = filenode_as_annotated_lines_tokens(
|
|
|
c.file
|
|
|
)
|
|
|
else:
|
|
|
c.renderer = (
|
|
|
c.renderer and h.renderer_from_filename(c.file.path)
|
|
|
)
|
|
|
if not c.renderer:
|
|
|
c.lines = filenode_as_lines_tokens(c.file)
|
|
|
|
|
|
_branch_name, _sha_commit_id, is_head = \
|
|
|
self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
|
|
|
landing_ref=self.db_repo.landing_ref_name)
|
|
|
c.on_branch_head = is_head
|
|
|
|
|
|
branch = c.commit.branch if (
|
|
|
c.commit.branch and '/' not in c.commit.branch) else None
|
|
|
c.branch_or_raw_id = branch or c.commit.raw_id
|
|
|
c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
|
|
|
|
|
|
author = c.file_last_commit.author
|
|
|
c.authors = [[
|
|
|
h.email(author),
|
|
|
h.person(author, 'username_or_name_or_email'),
|
|
|
1
|
|
|
]]
|
|
|
|
|
|
else: # load tree content at path
|
|
|
c.file_source_page = 'false'
|
|
|
c.authors = []
|
|
|
# this loads a simple tree without metadata to speed things up
|
|
|
# later via ajax we call repo_nodetree_full and fetch whole
|
|
|
c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
|
|
|
|
|
|
c.readme_data, c.readme_file = \
|
|
|
self._get_readme_data(self.db_repo, c.visual.default_renderer,
|
|
|
c.commit.raw_id, f_path)
|
|
|
|
|
|
except RepositoryError as e:
|
|
|
h.flash(h.escape(safe_str(e)), category='error')
|
|
|
raise HTTPNotFound()
|
|
|
|
|
|
if self.request.environ.get('HTTP_X_PJAX'):
|
|
|
html = render('rhodecode:templates/files/files_pjax.mako',
|
|
|
self._get_template_context(c), self.request)
|
|
|
else:
|
|
|
html = render('rhodecode:templates/files/files.mako',
|
|
|
self._get_template_context(c), self.request)
|
|
|
return Response(html)
|
|
|
|
|
|
@HasRepoPermissionAnyDecorator(
|
|
|
'repository.read', 'repository.write', 'repository.admin')
|
|
|
def repo_files_annotated_previous(self):
|
|
|
self.load_default_context()
|
|
|
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
commit = self._get_commit_or_redirect(commit_id)
|
|
|
prev_commit_id = commit.raw_id
|
|
|
line_anchor = self.request.GET.get('line_anchor')
|
|
|
is_file = False
|
|
|
try:
|
|
|
_file = commit.get_node(f_path)
|
|
|
is_file = _file.is_file()
|
|
|
except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
|
|
|
pass
|
|
|
|
|
|
if is_file:
|
|
|
history = commit.get_path_history(f_path)
|
|
|
prev_commit_id = history[1].raw_id \
|
|
|
if len(history) > 1 else prev_commit_id
|
|
|
prev_url = h.route_path(
|
|
|
'repo_files:annotated', repo_name=self.db_repo_name,
|
|
|
commit_id=prev_commit_id, f_path=f_path,
|
|
|
_anchor=f'L{line_anchor}')
|
|
|
|
|
|
raise HTTPFound(prev_url)
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator(
|
|
|
'repository.read', 'repository.write', 'repository.admin')
|
|
|
def repo_nodetree_full(self):
|
|
|
"""
|
|
|
Returns rendered html of file tree that contains commit date,
|
|
|
author, commit_id for the specified combination of
|
|
|
repo, commit_id and file path
|
|
|
"""
|
|
|
c = self.load_default_context()
|
|
|
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
commit = self._get_commit_or_redirect(commit_id)
|
|
|
try:
|
|
|
dir_node = commit.get_node(f_path)
|
|
|
except RepositoryError as e:
|
|
|
return Response(f'error: {h.escape(safe_str(e))}')
|
|
|
|
|
|
if dir_node.is_file():
|
|
|
return Response('')
|
|
|
|
|
|
c.file = dir_node
|
|
|
c.commit = commit
|
|
|
at_rev = self.request.GET.get('at')
|
|
|
|
|
|
html = self._get_tree_at_commit(
|
|
|
c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
|
|
|
|
|
|
return Response(html)
|
|
|
|
|
|
def _get_attachement_headers(self, f_path):
|
|
|
f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
|
|
|
safe_path = f_name.replace('"', '\\"')
|
|
|
encoded_path = urllib.parse.quote(f_name)
|
|
|
|
|
|
return "attachment; " \
|
|
|
"filename=\"{}\"; " \
|
|
|
"filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator(
|
|
|
'repository.read', 'repository.write', 'repository.admin')
|
|
|
def repo_file_raw(self):
|
|
|
"""
|
|
|
Action for show as raw, some mimetypes are "rendered",
|
|
|
those include images, icons.
|
|
|
"""
|
|
|
c = self.load_default_context()
|
|
|
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
commit = self._get_commit_or_redirect(commit_id)
|
|
|
file_node = self._get_filenode_or_redirect(commit, f_path)
|
|
|
|
|
|
raw_mimetype_mapping = {
|
|
|
# map original mimetype to a mimetype used for "show as raw"
|
|
|
# you can also provide a content-disposition to override the
|
|
|
# default "attachment" disposition.
|
|
|
# orig_type: (new_type, new_dispo)
|
|
|
|
|
|
# show images inline:
|
|
|
# Do not re-add SVG: it is unsafe and permits XSS attacks. One can
|
|
|
# for example render an SVG with javascript inside or even render
|
|
|
# HTML.
|
|
|
'image/x-icon': ('image/x-icon', 'inline'),
|
|
|
'image/png': ('image/png', 'inline'),
|
|
|
'image/gif': ('image/gif', 'inline'),
|
|
|
'image/jpeg': ('image/jpeg', 'inline'),
|
|
|
'application/pdf': ('application/pdf', 'inline'),
|
|
|
}
|
|
|
|
|
|
mimetype = file_node.mimetype
|
|
|
try:
|
|
|
mimetype, disposition = raw_mimetype_mapping[mimetype]
|
|
|
except KeyError:
|
|
|
# we don't know anything special about this, handle it safely
|
|
|
if file_node.is_binary:
|
|
|
# do same as download raw for binary files
|
|
|
mimetype, disposition = 'application/octet-stream', 'attachment'
|
|
|
else:
|
|
|
# do not just use the original mimetype, but force text/plain,
|
|
|
# otherwise it would serve text/html and that might be unsafe.
|
|
|
# Note: underlying vcs library fakes text/plain mimetype if the
|
|
|
# mimetype can not be determined and it thinks it is not
|
|
|
# binary.This might lead to erroneous text display in some
|
|
|
# cases, but helps in other cases, like with text files
|
|
|
# without extension.
|
|
|
mimetype, disposition = 'text/plain', 'inline'
|
|
|
|
|
|
if disposition == 'attachment':
|
|
|
disposition = self._get_attachement_headers(f_path)
|
|
|
|
|
|
stream_content = file_node.stream_bytes()
|
|
|
|
|
|
response = Response(app_iter=stream_content)
|
|
|
response.content_disposition = disposition
|
|
|
response.content_type = mimetype
|
|
|
|
|
|
charset = self._get_default_encoding(c)
|
|
|
if charset:
|
|
|
response.charset = charset
|
|
|
|
|
|
return response
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator(
|
|
|
'repository.read', 'repository.write', 'repository.admin')
|
|
|
def repo_file_download(self):
|
|
|
c = self.load_default_context()
|
|
|
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
commit = self._get_commit_or_redirect(commit_id)
|
|
|
file_node = self._get_filenode_or_redirect(commit, f_path)
|
|
|
|
|
|
if self.request.GET.get('lf'):
|
|
|
# only if lf get flag is passed, we download this file
|
|
|
# as LFS/Largefile
|
|
|
lf_node = file_node.get_largefile_node()
|
|
|
if lf_node:
|
|
|
# overwrite our pointer with the REAL large-file
|
|
|
file_node = lf_node
|
|
|
|
|
|
disposition = self._get_attachement_headers(f_path)
|
|
|
|
|
|
stream_content = file_node.stream_bytes()
|
|
|
|
|
|
response = Response(app_iter=stream_content)
|
|
|
response.content_disposition = disposition
|
|
|
response.content_type = file_node.mimetype
|
|
|
|
|
|
charset = self._get_default_encoding(c)
|
|
|
if charset:
|
|
|
response.charset = charset
|
|
|
|
|
|
return response
|
|
|
|
|
|
def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
|
|
|
|
|
|
cache_seconds = safe_int(
|
|
|
rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
|
|
|
cache_on = cache_seconds > 0
|
|
|
log.debug(
|
|
|
'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
|
|
|
'with caching: %s[TTL: %ss]' % (
|
|
|
repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
|
|
|
|
|
|
cache_namespace_uid = f'repo.{repo_id}'
|
|
|
region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
|
|
|
|
|
|
@region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
|
|
|
def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
|
|
|
log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
|
|
|
_repo_id, commit_id, f_path)
|
|
|
try:
|
|
|
_d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
|
|
|
except (RepositoryError, CommitDoesNotExistError, Exception) as e:
|
|
|
log.exception(safe_str(e))
|
|
|
h.flash(h.escape(safe_str(e)), category='error')
|
|
|
raise HTTPFound(h.route_path(
|
|
|
'repo_files', repo_name=self.db_repo_name,
|
|
|
commit_id='tip', f_path='/'))
|
|
|
|
|
|
return _d + _f
|
|
|
|
|
|
result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id,
|
|
|
commit_id, f_path)
|
|
|
return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator(
|
|
|
'repository.read', 'repository.write', 'repository.admin')
|
|
|
def repo_nodelist(self):
|
|
|
self.load_default_context()
|
|
|
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
commit = self._get_commit_or_redirect(commit_id)
|
|
|
|
|
|
metadata = self._get_nodelist_at_commit(
|
|
|
self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
|
|
|
return {'nodes': [x for x in metadata]}
|
|
|
|
|
|
def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
|
|
|
items = []
|
|
|
for name, commit_id in branches_or_tags.items():
|
|
|
sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
|
|
|
items.append((sym_ref, name, ref_type))
|
|
|
return items
|
|
|
|
|
|
def _symbolic_reference(self, commit_id, name, f_path, ref_type):
|
|
|
return commit_id
|
|
|
|
|
|
def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
|
|
|
return commit_id
|
|
|
|
|
|
# NOTE(dan): old code we used in "diff" mode compare
|
|
|
new_f_path = vcspath.join(name, f_path)
|
|
|
return f'{new_f_path}@{commit_id}'
|
|
|
|
|
|
def _get_node_history(self, commit_obj, f_path, commits=None):
|
|
|
"""
|
|
|
get commit history for given node
|
|
|
|
|
|
:param commit_obj: commit to calculate history
|
|
|
:param f_path: path for node to calculate history for
|
|
|
:param commits: if passed don't calculate history and take
|
|
|
commits defined in this list
|
|
|
"""
|
|
|
_ = self.request.translate
|
|
|
|
|
|
# calculate history based on tip
|
|
|
tip = self.rhodecode_vcs_repo.get_commit()
|
|
|
if commits is None:
|
|
|
pre_load = ["author", "branch"]
|
|
|
try:
|
|
|
commits = tip.get_path_history(f_path, pre_load=pre_load)
|
|
|
except (NodeDoesNotExistError, CommitError):
|
|
|
# this node is not present at tip!
|
|
|
commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
|
|
|
|
|
|
history = []
|
|
|
commits_group = ([], _("Changesets"))
|
|
|
for commit in commits:
|
|
|
branch = ' (%s)' % commit.branch if commit.branch else ''
|
|
|
n_desc = f'r{commit.idx}:{commit.short_id}{branch}'
|
|
|
commits_group[0].append((commit.raw_id, n_desc, 'sha'))
|
|
|
history.append(commits_group)
|
|
|
|
|
|
symbolic_reference = self._symbolic_reference
|
|
|
|
|
|
if self.rhodecode_vcs_repo.alias == 'svn':
|
|
|
adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
|
|
|
f_path, self.rhodecode_vcs_repo)
|
|
|
if adjusted_f_path != f_path:
|
|
|
log.debug(
|
|
|
'Recognized svn tag or branch in file "%s", using svn '
|
|
|
'specific symbolic references', f_path)
|
|
|
f_path = adjusted_f_path
|
|
|
symbolic_reference = self._symbolic_reference_svn
|
|
|
|
|
|
branches = self._create_references(
|
|
|
self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
|
|
|
branches_group = (branches, _("Branches"))
|
|
|
|
|
|
tags = self._create_references(
|
|
|
self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
|
|
|
tags_group = (tags, _("Tags"))
|
|
|
|
|
|
history.append(branches_group)
|
|
|
history.append(tags_group)
|
|
|
|
|
|
return history, commits
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator(
|
|
|
'repository.read', 'repository.write', 'repository.admin')
|
|
|
def repo_file_history(self):
|
|
|
self.load_default_context()
|
|
|
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
commit = self._get_commit_or_redirect(commit_id)
|
|
|
file_node = self._get_filenode_or_redirect(commit, f_path)
|
|
|
|
|
|
if file_node.is_file():
|
|
|
file_history, _hist = self._get_node_history(commit, f_path)
|
|
|
|
|
|
res = []
|
|
|
for section_items, section in file_history:
|
|
|
items = []
|
|
|
for obj_id, obj_text, obj_type in section_items:
|
|
|
at_rev = ''
|
|
|
if obj_type in ['branch', 'bookmark', 'tag']:
|
|
|
at_rev = obj_text
|
|
|
entry = {
|
|
|
'id': obj_id,
|
|
|
'text': obj_text,
|
|
|
'type': obj_type,
|
|
|
'at_rev': at_rev
|
|
|
}
|
|
|
|
|
|
items.append(entry)
|
|
|
|
|
|
res.append({
|
|
|
'text': section,
|
|
|
'children': items
|
|
|
})
|
|
|
|
|
|
data = {
|
|
|
'more': False,
|
|
|
'results': res
|
|
|
}
|
|
|
return data
|
|
|
|
|
|
log.warning('Cannot fetch history for directory')
|
|
|
raise HTTPBadRequest()
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator(
|
|
|
'repository.read', 'repository.write', 'repository.admin')
|
|
|
def repo_file_authors(self):
|
|
|
c = self.load_default_context()
|
|
|
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
commit = self._get_commit_or_redirect(commit_id)
|
|
|
file_node = self._get_filenode_or_redirect(commit, f_path)
|
|
|
|
|
|
if not file_node.is_file():
|
|
|
raise HTTPBadRequest()
|
|
|
|
|
|
c.file_last_commit = file_node.last_commit
|
|
|
if self.request.GET.get('annotate') == '1':
|
|
|
# use _hist from annotation if annotation mode is on
|
|
|
commit_ids = {x[1] for x in file_node.annotate}
|
|
|
_hist = (
|
|
|
self.rhodecode_vcs_repo.get_commit(commit_id)
|
|
|
for commit_id in commit_ids)
|
|
|
else:
|
|
|
_f_history, _hist = self._get_node_history(commit, f_path)
|
|
|
c.file_author = False
|
|
|
|
|
|
unique = collections.OrderedDict()
|
|
|
for commit in _hist:
|
|
|
author = commit.author
|
|
|
if author not in unique:
|
|
|
unique[commit.author] = [
|
|
|
h.email(author),
|
|
|
h.person(author, 'username_or_name_or_email'),
|
|
|
1 # counter
|
|
|
]
|
|
|
|
|
|
else:
|
|
|
# increase counter
|
|
|
unique[commit.author][2] += 1
|
|
|
|
|
|
c.authors = [val for val in unique.values()]
|
|
|
|
|
|
return self._get_template_context(c)
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
|
def repo_files_check_head(self):
|
|
|
self.load_default_context()
|
|
|
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
_branch_name, _sha_commit_id, is_head = \
|
|
|
self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
|
|
|
landing_ref=self.db_repo.landing_ref_name)
|
|
|
|
|
|
new_path = self.request.POST.get('path')
|
|
|
operation = self.request.POST.get('operation')
|
|
|
path_exist = ''
|
|
|
|
|
|
if new_path and operation in ['create', 'upload']:
|
|
|
new_f_path = os.path.join(f_path.lstrip('/'), new_path)
|
|
|
try:
|
|
|
commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
|
|
|
# NOTE(dan): construct whole path without leading /
|
|
|
file_node = commit_obj.get_node(new_f_path)
|
|
|
if file_node is not None:
|
|
|
path_exist = new_f_path
|
|
|
except EmptyRepositoryError:
|
|
|
pass
|
|
|
except Exception:
|
|
|
pass
|
|
|
|
|
|
return {
|
|
|
'branch': _branch_name,
|
|
|
'sha': _sha_commit_id,
|
|
|
'is_head': is_head,
|
|
|
'path_exists': path_exist
|
|
|
}
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
|
def repo_files_remove_file(self):
|
|
|
_ = self.request.translate
|
|
|
c = self.load_default_context()
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
|
|
|
self._ensure_not_locked()
|
|
|
_branch_name, _sha_commit_id, is_head = \
|
|
|
self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
|
|
|
landing_ref=self.db_repo.landing_ref_name)
|
|
|
|
|
|
self.forbid_non_head(is_head, f_path)
|
|
|
self.check_branch_permission(_branch_name)
|
|
|
|
|
|
c.commit = self._get_commit_or_redirect(commit_id)
|
|
|
c.file = self._get_filenode_or_redirect(c.commit, f_path)
|
|
|
|
|
|
c.default_message = _(
|
|
|
'Deleted file {} via RhodeCode Enterprise').format(f_path)
|
|
|
c.f_path = f_path
|
|
|
|
|
|
return self._get_template_context(c)
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
|
@CSRFRequired()
|
|
|
def repo_files_delete_file(self):
|
|
|
_ = self.request.translate
|
|
|
|
|
|
c = self.load_default_context()
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
|
|
|
self._ensure_not_locked()
|
|
|
_branch_name, _sha_commit_id, is_head = \
|
|
|
self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
|
|
|
landing_ref=self.db_repo.landing_ref_name)
|
|
|
|
|
|
self.forbid_non_head(is_head, f_path)
|
|
|
self.check_branch_permission(_branch_name)
|
|
|
|
|
|
c.commit = self._get_commit_or_redirect(commit_id)
|
|
|
c.file = self._get_filenode_or_redirect(c.commit, f_path)
|
|
|
|
|
|
c.default_message = _(
|
|
|
'Deleted file {} via RhodeCode Enterprise').format(f_path)
|
|
|
c.f_path = f_path
|
|
|
node_path = f_path
|
|
|
author = self._rhodecode_db_user.full_contact
|
|
|
message = self.request.POST.get('message') or c.default_message
|
|
|
try:
|
|
|
nodes = {
|
|
|
safe_bytes(node_path): {
|
|
|
'content': b''
|
|
|
}
|
|
|
}
|
|
|
ScmModel().delete_nodes(
|
|
|
user=self._rhodecode_db_user.user_id, repo=self.db_repo,
|
|
|
message=message,
|
|
|
nodes=nodes,
|
|
|
parent_commit=c.commit,
|
|
|
author=author,
|
|
|
)
|
|
|
|
|
|
h.flash(
|
|
|
_('Successfully deleted file `{}`').format(
|
|
|
h.escape(f_path)), category='success')
|
|
|
except Exception:
|
|
|
log.exception('Error during commit operation')
|
|
|
h.flash(_('Error occurred during commit'), category='error')
|
|
|
raise HTTPFound(
|
|
|
h.route_path('repo_commit', repo_name=self.db_repo_name,
|
|
|
commit_id='tip'))
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
|
def repo_files_edit_file(self):
|
|
|
_ = self.request.translate
|
|
|
c = self.load_default_context()
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
|
|
|
self._ensure_not_locked()
|
|
|
_branch_name, _sha_commit_id, is_head = \
|
|
|
self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
|
|
|
landing_ref=self.db_repo.landing_ref_name)
|
|
|
|
|
|
self.forbid_non_head(is_head, f_path, commit_id=commit_id)
|
|
|
self.check_branch_permission(_branch_name, commit_id=commit_id)
|
|
|
|
|
|
c.commit = self._get_commit_or_redirect(commit_id)
|
|
|
c.file = self._get_filenode_or_redirect(c.commit, f_path)
|
|
|
|
|
|
if c.file.is_binary:
|
|
|
files_url = h.route_path(
|
|
|
'repo_files',
|
|
|
repo_name=self.db_repo_name,
|
|
|
commit_id=c.commit.raw_id, f_path=f_path)
|
|
|
raise HTTPFound(files_url)
|
|
|
|
|
|
c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
|
|
|
c.f_path = f_path
|
|
|
|
|
|
return self._get_template_context(c)
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
|
@CSRFRequired()
|
|
|
def repo_files_update_file(self):
|
|
|
_ = self.request.translate
|
|
|
c = self.load_default_context()
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
|
|
|
self._ensure_not_locked()
|
|
|
|
|
|
c.commit = self._get_commit_or_redirect(commit_id)
|
|
|
c.file = self._get_filenode_or_redirect(c.commit, f_path)
|
|
|
|
|
|
if c.file.is_binary:
|
|
|
raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
|
|
|
commit_id=c.commit.raw_id, f_path=f_path))
|
|
|
|
|
|
_branch_name, _sha_commit_id, is_head = \
|
|
|
self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
|
|
|
landing_ref=self.db_repo.landing_ref_name)
|
|
|
|
|
|
self.forbid_non_head(is_head, f_path, commit_id=commit_id)
|
|
|
self.check_branch_permission(_branch_name, commit_id=commit_id)
|
|
|
|
|
|
c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
|
|
|
c.f_path = f_path
|
|
|
|
|
|
old_content = c.file.str_content
|
|
|
sl = old_content.splitlines(1)
|
|
|
first_line = sl[0] if sl else ''
|
|
|
|
|
|
r_post = self.request.POST
|
|
|
# line endings: 0 - Unix, 1 - Mac, 2 - DOS
|
|
|
line_ending_mode = detect_mode(first_line, 0)
|
|
|
content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
|
|
|
|
|
|
message = r_post.get('message') or c.default_message
|
|
|
|
|
|
org_node_path = c.file.str_path
|
|
|
filename = r_post['filename']
|
|
|
|
|
|
root_path = c.file.dir_path
|
|
|
pure_path = self.create_pure_path(root_path, filename)
|
|
|
node_path = pure_path.as_posix()
|
|
|
|
|
|
default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
|
|
|
commit_id=commit_id)
|
|
|
if content == old_content and node_path == org_node_path:
|
|
|
h.flash(_('No changes detected on {}').format(h.escape(org_node_path)),
|
|
|
category='warning')
|
|
|
raise HTTPFound(default_redirect_url)
|
|
|
|
|
|
try:
|
|
|
mapping = {
|
|
|
c.file.bytes_path: {
|
|
|
'org_filename': org_node_path,
|
|
|
'filename': safe_bytes(node_path),
|
|
|
'content': safe_bytes(content),
|
|
|
'lexer': '',
|
|
|
'op': 'mod',
|
|
|
'mode': c.file.mode
|
|
|
}
|
|
|
}
|
|
|
|
|
|
commit = ScmModel().update_nodes(
|
|
|
user=self._rhodecode_db_user.user_id,
|
|
|
repo=self.db_repo,
|
|
|
message=message,
|
|
|
nodes=mapping,
|
|
|
parent_commit=c.commit,
|
|
|
)
|
|
|
|
|
|
h.flash(_('Successfully committed changes to file `{}`').format(
|
|
|
h.escape(f_path)), category='success')
|
|
|
default_redirect_url = h.route_path(
|
|
|
'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
|
|
|
|
|
|
except Exception:
|
|
|
log.exception('Error occurred during commit')
|
|
|
h.flash(_('Error occurred during commit'), category='error')
|
|
|
|
|
|
raise HTTPFound(default_redirect_url)
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
|
def repo_files_add_file(self):
|
|
|
_ = self.request.translate
|
|
|
c = self.load_default_context()
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
|
|
|
self._ensure_not_locked()
|
|
|
|
|
|
c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
|
|
|
if c.commit is None:
|
|
|
c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
|
|
|
|
|
|
if self.rhodecode_vcs_repo.is_empty():
|
|
|
# for empty repository we cannot check for current branch, we rely on
|
|
|
# c.commit.branch instead
|
|
|
_branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
|
|
|
else:
|
|
|
_branch_name, _sha_commit_id, is_head = \
|
|
|
self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
|
|
|
landing_ref=self.db_repo.landing_ref_name)
|
|
|
|
|
|
self.forbid_non_head(is_head, f_path, commit_id=commit_id)
|
|
|
self.check_branch_permission(_branch_name, commit_id=commit_id)
|
|
|
|
|
|
c.default_message = (_('Added file via RhodeCode Enterprise'))
|
|
|
c.f_path = f_path.lstrip('/') # ensure not relative path
|
|
|
|
|
|
return self._get_template_context(c)
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
|
@CSRFRequired()
|
|
|
def repo_files_create_file(self):
|
|
|
_ = self.request.translate
|
|
|
c = self.load_default_context()
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
|
|
|
self._ensure_not_locked()
|
|
|
|
|
|
c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
|
|
|
if c.commit is None:
|
|
|
c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
|
|
|
|
|
|
# calculate redirect URL
|
|
|
if self.rhodecode_vcs_repo.is_empty():
|
|
|
default_redirect_url = h.route_path(
|
|
|
'repo_summary', repo_name=self.db_repo_name)
|
|
|
else:
|
|
|
default_redirect_url = h.route_path(
|
|
|
'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
|
|
|
|
|
|
if self.rhodecode_vcs_repo.is_empty():
|
|
|
# for empty repository we cannot check for current branch, we rely on
|
|
|
# c.commit.branch instead
|
|
|
_branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
|
|
|
else:
|
|
|
_branch_name, _sha_commit_id, is_head = \
|
|
|
self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
|
|
|
landing_ref=self.db_repo.landing_ref_name)
|
|
|
|
|
|
self.forbid_non_head(is_head, f_path, commit_id=commit_id)
|
|
|
self.check_branch_permission(_branch_name, commit_id=commit_id)
|
|
|
|
|
|
c.default_message = (_('Added file via RhodeCode Enterprise'))
|
|
|
c.f_path = f_path
|
|
|
|
|
|
r_post = self.request.POST
|
|
|
message = r_post.get('message') or c.default_message
|
|
|
filename = r_post.get('filename')
|
|
|
unix_mode = 0
|
|
|
|
|
|
if not filename:
|
|
|
# If there's no commit, redirect to repo summary
|
|
|
if type(c.commit) is EmptyCommit:
|
|
|
redirect_url = h.route_path(
|
|
|
'repo_summary', repo_name=self.db_repo_name)
|
|
|
else:
|
|
|
redirect_url = default_redirect_url
|
|
|
h.flash(_('No filename specified'), category='warning')
|
|
|
raise HTTPFound(redirect_url)
|
|
|
|
|
|
root_path = f_path
|
|
|
pure_path = self.create_pure_path(root_path, filename)
|
|
|
node_path = pure_path.as_posix().lstrip('/')
|
|
|
|
|
|
author = self._rhodecode_db_user.full_contact
|
|
|
content = convert_line_endings(r_post.get('content', ''), unix_mode)
|
|
|
nodes = {
|
|
|
safe_bytes(node_path): {
|
|
|
'content': safe_bytes(content)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
try:
|
|
|
|
|
|
commit = ScmModel().create_nodes(
|
|
|
user=self._rhodecode_db_user.user_id,
|
|
|
repo=self.db_repo,
|
|
|
message=message,
|
|
|
nodes=nodes,
|
|
|
parent_commit=c.commit,
|
|
|
author=author,
|
|
|
)
|
|
|
|
|
|
h.flash(_('Successfully committed new file `{}`').format(
|
|
|
h.escape(node_path)), category='success')
|
|
|
|
|
|
default_redirect_url = h.route_path(
|
|
|
'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
|
|
|
|
|
|
except NonRelativePathError:
|
|
|
log.exception('Non Relative path found')
|
|
|
h.flash(_('The location specified must be a relative path and must not '
|
|
|
'contain .. in the path'), category='warning')
|
|
|
raise HTTPFound(default_redirect_url)
|
|
|
except (NodeError, NodeAlreadyExistsError) as e:
|
|
|
h.flash(h.escape(safe_str(e)), category='error')
|
|
|
except Exception:
|
|
|
log.exception('Error occurred during commit')
|
|
|
h.flash(_('Error occurred during commit'), category='error')
|
|
|
|
|
|
raise HTTPFound(default_redirect_url)
|
|
|
|
|
|
@LoginRequired()
|
|
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
|
@CSRFRequired()
|
|
|
def repo_files_upload_file(self):
|
|
|
_ = self.request.translate
|
|
|
c = self.load_default_context()
|
|
|
commit_id, f_path = self._get_commit_and_path()
|
|
|
|
|
|
self._ensure_not_locked()
|
|
|
|
|
|
c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
|
|
|
if c.commit is None:
|
|
|
c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
|
|
|
|
|
|
# calculate redirect URL
|
|
|
if self.rhodecode_vcs_repo.is_empty():
|
|
|
default_redirect_url = h.route_path(
|
|
|
'repo_summary', repo_name=self.db_repo_name)
|
|
|
else:
|
|
|
default_redirect_url = h.route_path(
|
|
|
'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
|
|
|
|
|
|
if self.rhodecode_vcs_repo.is_empty():
|
|
|
# for empty repository we cannot check for current branch, we rely on
|
|
|
# c.commit.branch instead
|
|
|
_branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
|
|
|
else:
|
|
|
_branch_name, _sha_commit_id, is_head = \
|
|
|
self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
|
|
|
landing_ref=self.db_repo.landing_ref_name)
|
|
|
|
|
|
error = self.forbid_non_head(is_head, f_path, json_mode=True)
|
|
|
if error:
|
|
|
return {
|
|
|
'error': error,
|
|
|
'redirect_url': default_redirect_url
|
|
|
}
|
|
|
error = self.check_branch_permission(_branch_name, json_mode=True)
|
|
|
if error:
|
|
|
return {
|
|
|
'error': error,
|
|
|
'redirect_url': default_redirect_url
|
|
|
}
|
|
|
|
|
|
c.default_message = (_('Uploaded file via RhodeCode Enterprise'))
|
|
|
c.f_path = f_path
|
|
|
|
|
|
r_post = self.request.POST
|
|
|
|
|
|
message = c.default_message
|
|
|
user_message = r_post.getall('message')
|
|
|
if isinstance(user_message, list) and user_message:
|
|
|
# we take the first from duplicated results if it's not empty
|
|
|
message = user_message[0] if user_message[0] else message
|
|
|
|
|
|
nodes = {}
|
|
|
|
|
|
for file_obj in r_post.getall('files_upload') or []:
|
|
|
content = file_obj.file
|
|
|
filename = file_obj.filename
|
|
|
|
|
|
root_path = f_path
|
|
|
pure_path = self.create_pure_path(root_path, filename)
|
|
|
node_path = pure_path.as_posix().lstrip('/')
|
|
|
|
|
|
nodes[safe_bytes(node_path)] = {
|
|
|
'content': content
|
|
|
}
|
|
|
|
|
|
if not nodes:
|
|
|
error = 'missing files'
|
|
|
return {
|
|
|
'error': error,
|
|
|
'redirect_url': default_redirect_url
|
|
|
}
|
|
|
|
|
|
author = self._rhodecode_db_user.full_contact
|
|
|
|
|
|
try:
|
|
|
commit = ScmModel().create_nodes(
|
|
|
user=self._rhodecode_db_user.user_id,
|
|
|
repo=self.db_repo,
|
|
|
message=message,
|
|
|
nodes=nodes,
|
|
|
parent_commit=c.commit,
|
|
|
author=author,
|
|
|
)
|
|
|
if len(nodes) == 1:
|
|
|
flash_message = _('Successfully committed {} new files').format(len(nodes))
|
|
|
else:
|
|
|
flash_message = _('Successfully committed 1 new file')
|
|
|
|
|
|
h.flash(flash_message, category='success')
|
|
|
|
|
|
default_redirect_url = h.route_path(
|
|
|
'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
|
|
|
|
|
|
except NonRelativePathError:
|
|
|
log.exception('Non Relative path found')
|
|
|
error = _('The location specified must be a relative path and must not '
|
|
|
'contain .. in the path')
|
|
|
h.flash(error, category='warning')
|
|
|
|
|
|
return {
|
|
|
'error': error,
|
|
|
'redirect_url': default_redirect_url
|
|
|
}
|
|
|
except (NodeError, NodeAlreadyExistsError) as e:
|
|
|
error = h.escape(e)
|
|
|
h.flash(error, category='error')
|
|
|
|
|
|
return {
|
|
|
'error': error,
|
|
|
'redirect_url': default_redirect_url
|
|
|
}
|
|
|
except Exception:
|
|
|
log.exception('Error occurred during commit')
|
|
|
error = _('Error occurred during commit')
|
|
|
h.flash(error, category='error')
|
|
|
return {
|
|
|
'error': error,
|
|
|
'redirect_url': default_redirect_url
|
|
|
}
|
|
|
|
|
|
return {
|
|
|
'error': None,
|
|
|
'redirect_url': default_redirect_url
|
|
|
}
|
|
|
|