scm.py
1044 lines
| 36.5 KiB
| text/x-python
|
PythonLexer
r5088 | # Copyright (C) 2010-2023 RhodeCode GmbH | |||
r1 | # | |||
# This program is free software: you can redistribute it and/or modify | ||||
# it under the terms of the GNU Affero General Public License, version 3 | ||||
# (only), as published by the Free Software Foundation. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU Affero General Public License | ||||
# along with this program. If not, see <http://www.gnu.org/licenses/>. | ||||
# | ||||
# This program is dual-licensed. If you wish to learn more about the | ||||
# RhodeCode Enterprise Edition, including its added features, Support services, | ||||
# and proprietary license terms, please see https://rhodecode.com/licenses/ | ||||
""" | ||||
Scm model for RhodeCode | ||||
""" | ||||
import os.path | ||||
import traceback | ||||
import logging | ||||
r4973 | import io | |||
r1 | ||||
from sqlalchemy import func | ||||
from zope.cachedescriptors.property import Lazy as LazyProperty | ||||
import rhodecode | ||||
r5070 | from rhodecode.lib.str_utils import safe_bytes | |||
r1 | from rhodecode.lib.vcs import get_backend | |||
from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError | ||||
from rhodecode.lib.vcs.nodes import FileNode | ||||
from rhodecode.lib.vcs.backends.base import EmptyCommit | ||||
r2846 | from rhodecode.lib import helpers as h, rc_cache | |||
r1 | from rhodecode.lib.auth import ( | |||
HasRepoPermissionAny, HasRepoGroupPermissionAny, | ||||
HasUserGroupPermissionAny) | ||||
from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError | ||||
r2932 | from rhodecode.lib import hooks_utils | |||
r1 | from rhodecode.lib.utils import ( | |||
r1804 | get_filesystem_repos, make_db_config) | |||
r5070 | from rhodecode.lib.str_utils import safe_str | |||
r1111 | from rhodecode.lib.system_info import get_system_info | |||
r1 | from rhodecode.model import BaseModel | |||
from rhodecode.model.db import ( | ||||
r5180 | or_, false, null, | |||
r1 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, | |||
r3984 | PullRequest, FileStore) | |||
r1 | from rhodecode.model.settings import VcsSettingsModel | |||
r3071 | from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl | |||
r1 | ||||
log = logging.getLogger(__name__) | ||||
class UserTemp(object): | ||||
def __init__(self, user_id): | ||||
self.user_id = user_id | ||||
def __repr__(self): | ||||
r5096 | return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id) | |||
r1 | ||||
class RepoTemp(object): | ||||
def __init__(self, repo_id): | ||||
self.repo_id = repo_id | ||||
def __repr__(self): | ||||
r5096 | return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id) | |||
r1 | ||||
class SimpleCachedRepoList(object): | ||||
""" | ||||
Lighter version of of iteration of repos without the scm initialisation, | ||||
and with cache usage | ||||
""" | ||||
def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): | ||||
self.db_repo_list = db_repo_list | ||||
self.repos_path = repos_path | ||||
self.order_by = order_by | ||||
self.reversed = (order_by or '').startswith('-') | ||||
if not perm_set: | ||||
perm_set = ['repository.read', 'repository.write', | ||||
'repository.admin'] | ||||
self.perm_set = perm_set | ||||
def __len__(self): | ||||
return len(self.db_repo_list) | ||||
def __repr__(self): | ||||
r5096 | return '<{} ({})>'.format(self.__class__.__name__, self.__len__()) | |||
r1 | ||||
def __iter__(self): | ||||
for dbr in self.db_repo_list: | ||||
# check permission at this level | ||||
has_perm = HasRepoPermissionAny(*self.perm_set)( | ||||
dbr.repo_name, 'SimpleCachedRepoList check') | ||||
if not has_perm: | ||||
continue | ||||
tmp_d = { | ||||
'name': dbr.repo_name, | ||||
'dbrepo': dbr.get_dict(), | ||||
'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} | ||||
} | ||||
yield tmp_d | ||||
class _PermCheckIterator(object): | ||||
def __init__( | ||||
self, obj_list, obj_attr, perm_set, perm_checker, | ||||
extra_kwargs=None): | ||||
""" | ||||
Creates iterator from given list of objects, additionally | ||||
checking permission for them from perm_set var | ||||
:param obj_list: list of db objects | ||||
:param obj_attr: attribute of object to pass into perm_checker | ||||
:param perm_set: list of permissions to check | ||||
:param perm_checker: callable to check permissions against | ||||
""" | ||||
self.obj_list = obj_list | ||||
self.obj_attr = obj_attr | ||||
self.perm_set = perm_set | ||||
r4142 | self.perm_checker = perm_checker(*self.perm_set) | |||
r1 | self.extra_kwargs = extra_kwargs or {} | |||
def __len__(self): | ||||
return len(self.obj_list) | ||||
def __repr__(self): | ||||
r5096 | return '<{} ({})>'.format(self.__class__.__name__, self.__len__()) | |||
r1 | ||||
def __iter__(self): | ||||
for db_obj in self.obj_list: | ||||
# check permission at this level | ||||
r4149 | # NOTE(marcink): the __dict__.get() is ~4x faster then getattr() | |||
name = db_obj.__dict__.get(self.obj_attr, None) | ||||
r4142 | if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs): | |||
r1 | continue | |||
yield db_obj | ||||
class RepoList(_PermCheckIterator): | ||||
def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): | ||||
if not perm_set: | ||||
r4146 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] | |||
r1 | ||||
r5096 | super().__init__( | |||
r1 | obj_list=db_repo_list, | |||
r4149 | obj_attr='_repo_name', perm_set=perm_set, | |||
r1 | perm_checker=HasRepoPermissionAny, | |||
extra_kwargs=extra_kwargs) | ||||
class RepoGroupList(_PermCheckIterator): | ||||
def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): | ||||
if not perm_set: | ||||
perm_set = ['group.read', 'group.write', 'group.admin'] | ||||
r5096 | super().__init__( | |||
r1 | obj_list=db_repo_group_list, | |||
r4149 | obj_attr='_group_name', perm_set=perm_set, | |||
r1 | perm_checker=HasRepoGroupPermissionAny, | |||
extra_kwargs=extra_kwargs) | ||||
class UserGroupList(_PermCheckIterator): | ||||
def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): | ||||
if not perm_set: | ||||
perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] | ||||
r5096 | super().__init__( | |||
r1 | obj_list=db_user_group_list, | |||
obj_attr='users_group_name', perm_set=perm_set, | ||||
perm_checker=HasUserGroupPermissionAny, | ||||
extra_kwargs=extra_kwargs) | ||||
class ScmModel(BaseModel): | ||||
""" | ||||
Generic Scm Model | ||||
""" | ||||
@LazyProperty | ||||
def repos_path(self): | ||||
""" | ||||
Gets the repositories root path from database | ||||
""" | ||||
settings_model = VcsSettingsModel(sa=self.sa) | ||||
return settings_model.get_repos_location() | ||||
def repo_scan(self, repos_path=None): | ||||
""" | ||||
Listing of repositories in given path. This path should not be a | ||||
repository itself. Return a dictionary of repository objects | ||||
:param repos_path: path to directory containing repositories | ||||
""" | ||||
if repos_path is None: | ||||
repos_path = self.repos_path | ||||
log.info('scanning for repositories in %s', repos_path) | ||||
config = make_db_config() | ||||
config.set('extensions', 'largefiles', '') | ||||
repos = {} | ||||
for name, path in get_filesystem_repos(repos_path, recursive=True): | ||||
# name need to be decomposed and put back together using the / | ||||
# since this is internal storage separator for rhodecode | ||||
name = Repository.normalize_repo_name(name) | ||||
try: | ||||
if name in repos: | ||||
raise RepositoryError('Duplicate repository name %s ' | ||||
'found in %s' % (name, path)) | ||||
elif path[0] in rhodecode.BACKENDS: | ||||
r3868 | backend = get_backend(path[0]) | |||
repos[name] = backend(path[1], config=config, | ||||
with_wire={"cache": False}) | ||||
r1 | except OSError: | |||
continue | ||||
r4547 | except RepositoryError: | |||
log.exception('Failed to create a repo') | ||||
continue | ||||
r1 | log.debug('found %s paths with repositories', len(repos)) | |||
return repos | ||||
def get_repos(self, all_repos=None, sort_key=None): | ||||
""" | ||||
Get all repositories from db and for each repo create it's | ||||
backend instance and fill that backed with information from database | ||||
:param all_repos: list of repository names as strings | ||||
give specific repositories list, good for filtering | ||||
:param sort_key: initial sorting of repositories | ||||
""" | ||||
if all_repos is None: | ||||
all_repos = self.sa.query(Repository)\ | ||||
r5180 | .filter(Repository.group_id == null())\ | |||
r1 | .order_by(func.lower(Repository.repo_name)).all() | |||
repo_iter = SimpleCachedRepoList( | ||||
all_repos, repos_path=self.repos_path, order_by=sort_key) | ||||
return repo_iter | ||||
def get_repo_groups(self, all_groups=None): | ||||
if all_groups is None: | ||||
all_groups = RepoGroup.query()\ | ||||
r5180 | .filter(RepoGroup.group_parent_id == null()).all() | |||
r1 | return [x for x in RepoGroupList(all_groups)] | |||
def mark_for_invalidation(self, repo_name, delete=False): | ||||
""" | ||||
Mark caches of this repo invalid in the database. `delete` flag | ||||
removes the cache entries | ||||
:param repo_name: the repo_name for which caches should be marked | ||||
invalid, or deleted | ||||
:param delete: delete the entry keys instead of setting bool | ||||
r2846 | flag on them, and also purge caches used by the dogpile | |||
r1 | """ | |||
repo = Repository.get_by_repo_name(repo_name) | ||||
if repo: | ||||
r2932 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |||
repo_id=repo.repo_id) | ||||
CacheKey.set_invalidate(invalidation_namespace, delete=delete) | ||||
r2846 | repo_id = repo.repo_id | |||
r1 | config = repo._config | |||
config.set('extensions', 'largefiles', '') | ||||
r337 | repo.update_commit_cache(config=config, cs_cache=None) | |||
r2846 | if delete: | |||
r5096 | cache_namespace_uid = f'cache_repo.{repo_id}' | |||
r5070 | rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE) | |||
r1 | ||||
def toggle_following_repo(self, follow_repo_id, user_id): | ||||
f = self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_repo_id == follow_repo_id)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
if f is not None: | ||||
try: | ||||
self.sa.delete(f) | ||||
return | ||||
except Exception: | ||||
log.error(traceback.format_exc()) | ||||
raise | ||||
try: | ||||
f = UserFollowing() | ||||
f.user_id = user_id | ||||
f.follows_repo_id = follow_repo_id | ||||
self.sa.add(f) | ||||
except Exception: | ||||
log.error(traceback.format_exc()) | ||||
raise | ||||
def toggle_following_user(self, follow_user_id, user_id): | ||||
f = self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_user_id == follow_user_id)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
if f is not None: | ||||
try: | ||||
self.sa.delete(f) | ||||
return | ||||
except Exception: | ||||
log.error(traceback.format_exc()) | ||||
raise | ||||
try: | ||||
f = UserFollowing() | ||||
f.user_id = user_id | ||||
f.follows_user_id = follow_user_id | ||||
self.sa.add(f) | ||||
except Exception: | ||||
log.error(traceback.format_exc()) | ||||
raise | ||||
def is_following_repo(self, repo_name, user_id, cache=False): | ||||
r = self.sa.query(Repository)\ | ||||
.filter(Repository.repo_name == repo_name).scalar() | ||||
f = self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_repository == r)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
return f is not None | ||||
def is_following_user(self, username, user_id, cache=False): | ||||
u = User.get_by_username(username) | ||||
f = self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_user == u)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
return f is not None | ||||
def get_followers(self, repo): | ||||
repo = self._get_repo(repo) | ||||
return self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_repository == repo).count() | ||||
def get_forks(self, repo): | ||||
repo = self._get_repo(repo) | ||||
return self.sa.query(Repository)\ | ||||
.filter(Repository.fork == repo).count() | ||||
def get_pull_requests(self, repo): | ||||
repo = self._get_repo(repo) | ||||
return self.sa.query(PullRequest)\ | ||||
.filter(PullRequest.target_repo == repo)\ | ||||
.filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() | ||||
r3984 | def get_artifacts(self, repo): | |||
repo = self._get_repo(repo) | ||||
return self.sa.query(FileStore)\ | ||||
.filter(FileStore.repo == repo)\ | ||||
r5180 | .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count() | |||
r3984 | ||||
r1 | def mark_as_fork(self, repo, fork, user): | |||
repo = self._get_repo(repo) | ||||
fork = self._get_repo(fork) | ||||
if fork and repo.repo_id == fork.repo_id: | ||||
raise Exception("Cannot set repository as fork of itself") | ||||
if fork and repo.repo_type != fork.repo_type: | ||||
raise RepositoryError( | ||||
"Cannot set repository as fork of repository with other type") | ||||
repo.fork = fork | ||||
self.sa.add(repo) | ||||
return repo | ||||
r3072 | def pull_changes(self, repo, username, remote_uri=None, validate_uri=True): | |||
r1 | dbrepo = self._get_repo(repo) | |||
r2492 | remote_uri = remote_uri or dbrepo.clone_uri | |||
if not remote_uri: | ||||
r1 | raise Exception("This repository doesn't have a clone uri") | |||
repo = dbrepo.scm_instance(cache=False) | ||||
repo.config.clear_section('hooks') | ||||
r3071 | try: | |||
# NOTE(marcink): add extra validation so we skip invalid urls | ||||
# this is due this tasks can be executed via scheduler without | ||||
# proper validation of remote_uri | ||||
r3072 | if validate_uri: | |||
config = make_db_config(clear_session=False) | ||||
url_validator(remote_uri, dbrepo.repo_type, config) | ||||
r3071 | except InvalidCloneUrl: | |||
raise | ||||
r1 | repo_name = dbrepo.repo_name | |||
try: | ||||
# TODO: we need to make sure those operations call proper hooks ! | ||||
r3078 | repo.fetch(remote_uri) | |||
r1 | ||||
self.mark_for_invalidation(repo_name) | ||||
except Exception: | ||||
log.error(traceback.format_exc()) | ||||
raise | ||||
r3072 | def push_changes(self, repo, username, remote_uri=None, validate_uri=True): | |||
r2492 | dbrepo = self._get_repo(repo) | |||
r2561 | remote_uri = remote_uri or dbrepo.push_uri | |||
r2492 | if not remote_uri: | |||
raise Exception("This repository doesn't have a clone uri") | ||||
repo = dbrepo.scm_instance(cache=False) | ||||
repo.config.clear_section('hooks') | ||||
try: | ||||
r3071 | # NOTE(marcink): add extra validation so we skip invalid urls | |||
# this is due this tasks can be executed via scheduler without | ||||
# proper validation of remote_uri | ||||
r3072 | if validate_uri: | |||
config = make_db_config(clear_session=False) | ||||
url_validator(remote_uri, dbrepo.repo_type, config) | ||||
r3071 | except InvalidCloneUrl: | |||
raise | ||||
try: | ||||
r2492 | repo.push(remote_uri) | |||
except Exception: | ||||
log.error(traceback.format_exc()) | ||||
raise | ||||
r1 | def commit_change(self, repo, repo_name, commit, user, author, message, | |||
r5198 | content: bytes, f_path: bytes, branch: str = None): | |||
r1 | """ | |||
Commits changes | ||||
""" | ||||
user = self._get_user(user) | ||||
# message and author needs to be unicode | ||||
# proper backend should then translate that into required type | ||||
r5070 | message = safe_str(message) | |||
author = safe_str(author) | ||||
r1 | imc = repo.in_memory_commit | |||
r5070 | imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path))) | |||
r1 | try: | |||
# TODO: handle pre-push action ! | ||||
tip = imc.commit( | ||||
message=message, author=author, parents=[commit], | ||||
r5198 | branch=branch or commit.branch) | |||
r1 | except Exception as e: | |||
log.error(traceback.format_exc()) | ||||
raise IMCCommitError(str(e)) | ||||
finally: | ||||
# always clear caches, if commit fails we want fresh object also | ||||
self.mark_for_invalidation(repo_name) | ||||
# We trigger the post-push action | ||||
hooks_utils.trigger_post_push_hook( | ||||
r3133 | username=user.username, action='push_local', hook_type='post_push', | |||
r4305 | repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id]) | |||
r1 | return tip | |||
r5070 | def _sanitize_path(self, f_path: bytes): | |||
if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path: | ||||
raise NonRelativePathError(b'%b is not an relative path' % f_path) | ||||
r1 | if f_path: | |||
f_path = os.path.normpath(f_path) | ||||
return f_path | ||||
r1947 | def get_dirnode_metadata(self, request, commit, dir_node): | |||
r1 | if not dir_node.is_dir(): | |||
return [] | ||||
data = [] | ||||
for node in dir_node: | ||||
if not node.is_file(): | ||||
# we skip file-nodes | ||||
continue | ||||
last_commit = node.last_commit | ||||
last_commit_date = last_commit.date | ||||
data.append({ | ||||
'name': node.name, | ||||
'size': h.format_byte_size_binary(node.size), | ||||
'modified_at': h.format_date(last_commit_date), | ||||
'modified_ts': last_commit_date.isoformat(), | ||||
'revision': last_commit.revision, | ||||
'short_id': last_commit.short_id, | ||||
'message': h.escape(last_commit.message), | ||||
'author': h.escape(last_commit.author), | ||||
r1947 | 'user_profile': h.gravatar_with_user( | |||
request, last_commit.author), | ||||
r1 | }) | |||
return data | ||||
def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, | ||||
r502 | extended_info=False, content=False, max_file_bytes=None): | |||
r1 | """ | |||
recursive walk in root dir and return a set of all path in that dir | ||||
based on repository walk function | ||||
:param repo_name: name of repository | ||||
:param commit_id: commit id for which to list nodes | ||||
:param root_path: root path to list | ||||
r501 | :param flat: return as a list, if False returns a dict with description | |||
r3461 | :param extended_info: show additional info such as md5, binary, size etc | |||
:param content: add nodes content to the return data | ||||
r502 | :param max_file_bytes: will not return file contents over this limit | |||
r1 | ||||
""" | ||||
_files = list() | ||||
_dirs = list() | ||||
r5070 | ||||
r1 | try: | |||
_repo = self._get_repo(repo_name) | ||||
commit = _repo.scm_instance().get_commit(commit_id=commit_id) | ||||
root_path = root_path.lstrip('/') | ||||
r5070 | ||||
# get RootNode, inject pre-load options before walking | ||||
top_node = commit.get_node(root_path) | ||||
extended_info_pre_load = [] | ||||
if extended_info: | ||||
extended_info_pre_load += ['md5'] | ||||
top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load | ||||
for __, dirs, files in commit.walk(top_node): | ||||
r3461 | ||||
r1 | for f in files: | |||
_content = None | ||||
r5070 | _data = f_name = f.str_path | |||
r501 | ||||
r1 | if not flat: | |||
_data = { | ||||
r3461 | "name": h.escape(f_name), | |||
r1 | "type": "file", | |||
} | ||||
if extended_info: | ||||
_data.update({ | ||||
r501 | "md5": f.md5, | |||
r1 | "binary": f.is_binary, | |||
"size": f.size, | ||||
"extension": f.extension, | ||||
"mimetype": f.mimetype, | ||||
"lines": f.lines()[0] | ||||
}) | ||||
r501 | ||||
r1 | if content: | |||
r3461 | over_size_limit = (max_file_bytes is not None | |||
and f.size > max_file_bytes) | ||||
r1 | full_content = None | |||
r502 | if not f.is_binary and not over_size_limit: | |||
r5070 | full_content = f.str_content | |||
r1 | ||||
_data.update({ | ||||
r501 | "content": full_content, | |||
r1 | }) | |||
_files.append(_data) | ||||
r3461 | ||||
r1 | for d in dirs: | |||
r5070 | _data = d_name = d.str_path | |||
r1 | if not flat: | |||
_data = { | ||||
r3461 | "name": h.escape(d_name), | |||
r1 | "type": "dir", | |||
} | ||||
if extended_info: | ||||
_data.update({ | ||||
r5070 | "md5": "", | |||
"binary": False, | ||||
"size": 0, | ||||
"extension": "", | ||||
r1 | }) | |||
if content: | ||||
_data.update({ | ||||
"content": None | ||||
}) | ||||
_dirs.append(_data) | ||||
except RepositoryError: | ||||
r3460 | log.exception("Exception in get_nodes") | |||
r1 | raise | |||
return _dirs, _files | ||||
r3925 | def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'): | |||
""" | ||||
Generate files for quick filter in files view | ||||
""" | ||||
_files = list() | ||||
_dirs = list() | ||||
try: | ||||
_repo = self._get_repo(repo_name) | ||||
commit = _repo.scm_instance().get_commit(commit_id=commit_id) | ||||
root_path = root_path.lstrip('/') | ||||
r5142 | top_node = commit.get_node(root_path) | |||
top_node.default_pre_load = [] | ||||
for __, dirs, files in commit.walk(top_node): | ||||
r3925 | for f in files: | |||
_data = { | ||||
r5070 | "name": h.escape(f.str_path), | |||
r3925 | "type": "file", | |||
} | ||||
_files.append(_data) | ||||
for d in dirs: | ||||
_data = { | ||||
r5070 | "name": h.escape(d.str_path), | |||
r3925 | "type": "dir", | |||
} | ||||
_dirs.append(_data) | ||||
except RepositoryError: | ||||
log.exception("Exception in get_quick_filter_nodes") | ||||
raise | ||||
return _dirs, _files | ||||
r3460 | def get_node(self, repo_name, commit_id, file_path, | |||
r3479 | extended_info=False, content=False, max_file_bytes=None, cache=True): | |||
r3460 | """ | |||
retrieve single node from commit | ||||
""" | ||||
r5070 | ||||
r3460 | try: | |||
_repo = self._get_repo(repo_name) | ||||
commit = _repo.scm_instance().get_commit(commit_id=commit_id) | ||||
file_node = commit.get_node(file_path) | ||||
if file_node.is_dir(): | ||||
raise RepositoryError('The given path is a directory') | ||||
_content = None | ||||
r5070 | f_name = file_node.str_path | |||
r3460 | ||||
file_data = { | ||||
"name": h.escape(f_name), | ||||
"type": "file", | ||||
} | ||||
if extended_info: | ||||
file_data.update({ | ||||
"extension": file_node.extension, | ||||
"mimetype": file_node.mimetype, | ||||
r3479 | }) | |||
if cache: | ||||
md5 = file_node.md5 | ||||
is_binary = file_node.is_binary | ||||
size = file_node.size | ||||
else: | ||||
is_binary, md5, size, _content = file_node.metadata_uncached() | ||||
file_data.update({ | ||||
"md5": md5, | ||||
"binary": is_binary, | ||||
"size": size, | ||||
r3460 | }) | |||
r3488 | if content and cache: | |||
# get content + cache | ||||
size = file_node.size | ||||
over_size_limit = (max_file_bytes is not None and size > max_file_bytes) | ||||
r3460 | full_content = None | |||
r3962 | all_lines = 0 | |||
r3460 | if not file_node.is_binary and not over_size_limit: | |||
r5070 | full_content = safe_str(file_node.content) | |||
r3962 | all_lines, empty_lines = file_node.count_lines(full_content) | |||
r3488 | ||||
file_data.update({ | ||||
"content": full_content, | ||||
r3962 | "lines": all_lines | |||
r3488 | }) | |||
elif content: | ||||
# get content *without* cache | ||||
if _content is None: | ||||
is_binary, md5, size, _content = file_node.metadata_uncached() | ||||
over_size_limit = (max_file_bytes is not None and size > max_file_bytes) | ||||
full_content = None | ||||
r3962 | all_lines = 0 | |||
r3488 | if not is_binary and not over_size_limit: | |||
r5070 | full_content = safe_str(_content) | |||
r3962 | all_lines, empty_lines = file_node.count_lines(full_content) | |||
r3460 | ||||
file_data.update({ | ||||
"content": full_content, | ||||
r3962 | "lines": all_lines | |||
r3460 | }) | |||
except RepositoryError: | ||||
log.exception("Exception in get_node") | ||||
raise | ||||
return file_data | ||||
def get_fts_data(self, repo_name, commit_id, root_path='/'): | ||||
""" | ||||
Fetch node tree for usage in full text search | ||||
""" | ||||
tree_info = list() | ||||
try: | ||||
_repo = self._get_repo(repo_name) | ||||
commit = _repo.scm_instance().get_commit(commit_id=commit_id) | ||||
root_path = root_path.lstrip('/') | ||||
r5070 | top_node = commit.get_node(root_path) | |||
top_node.default_pre_load = [] | ||||
for __, dirs, files in commit.walk(top_node): | ||||
r3460 | ||||
for f in files: | ||||
r3479 | is_binary, md5, size, _content = f.metadata_uncached() | |||
r3460 | _data = { | |||
r5070 | "name": f.str_path, | |||
r3460 | "md5": md5, | |||
"extension": f.extension, | ||||
"binary": is_binary, | ||||
"size": size | ||||
} | ||||
tree_info.append(_data) | ||||
except RepositoryError: | ||||
log.exception("Exception in get_nodes") | ||||
raise | ||||
return tree_info | ||||
r1 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, | |||
author=None, trigger_push_hook=True): | ||||
""" | ||||
Commits given multiple nodes into repo | ||||
:param user: RhodeCode User object or user_id, the commiter | ||||
:param repo: RhodeCode Repository object | ||||
:param message: commit message | ||||
:param nodes: mapping {filename:{'content':content},...} | ||||
:param parent_commit: parent commit, can be empty than it's | ||||
initial commit | ||||
:param author: author of commit, cna be different that commiter | ||||
only for git | ||||
:param trigger_push_hook: trigger push hooks | ||||
r4879 | :returns: new committed commit | |||
r1 | """ | |||
user = self._get_user(user) | ||||
scm_instance = repo.scm_instance(cache=False) | ||||
r5070 | message = safe_str(message) | |||
r1 | commiter = user.full_contact | |||
r5070 | author = safe_str(author) if author else commiter | |||
r1 | ||||
imc = scm_instance.in_memory_commit | ||||
if not parent_commit: | ||||
parent_commit = EmptyCommit(alias=scm_instance.alias) | ||||
if isinstance(parent_commit, EmptyCommit): | ||||
r5070 | # EmptyCommit means we're editing empty repository | |||
r1 | parents = None | |||
else: | ||||
parents = [parent_commit] | ||||
r5070 | ||||
upload_file_types = (io.BytesIO, io.BufferedRandom) | ||||
processed_nodes = [] | ||||
for filename, content_dict in nodes.items(): | ||||
if not isinstance(filename, bytes): | ||||
raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}') | ||||
content = content_dict['content'] | ||||
if not isinstance(content, upload_file_types + (bytes,)): | ||||
raise ValueError('content key value in nodes needs to be bytes') | ||||
for f_path in nodes: | ||||
f_path = self._sanitize_path(f_path) | ||||
content = nodes[f_path]['content'] | ||||
# decoding here will force that we have proper encoded values | ||||
# in any other case this will throw exceptions and deny commit | ||||
if isinstance(content, bytes): | ||||
pass | ||||
elif isinstance(content, upload_file_types): | ||||
content = content.read() | ||||
else: | ||||
raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}') | ||||
processed_nodes.append((f_path, content)) | ||||
r1 | # add multiple nodes | |||
for path, content in processed_nodes: | ||||
imc.add(FileNode(path, content=content)) | ||||
r5070 | ||||
r1 | # TODO: handle pre push scenario | |||
tip = imc.commit(message=message, | ||||
author=author, | ||||
parents=parents, | ||||
branch=parent_commit.branch) | ||||
self.mark_for_invalidation(repo.repo_name) | ||||
if trigger_push_hook: | ||||
hooks_utils.trigger_post_push_hook( | ||||
username=user.username, action='push_local', | ||||
r4305 | repo_name=repo.repo_name, repo_type=scm_instance.alias, | |||
r3133 | hook_type='post_push', | |||
r1 | commit_ids=[tip.raw_id]) | |||
return tip | ||||
def update_nodes(self, user, repo, message, nodes, parent_commit=None, | ||||
author=None, trigger_push_hook=True): | ||||
user = self._get_user(user) | ||||
scm_instance = repo.scm_instance(cache=False) | ||||
r5070 | message = safe_str(message) | |||
r1 | commiter = user.full_contact | |||
r5070 | author = safe_str(author) if author else commiter | |||
r1 | ||||
imc = scm_instance.in_memory_commit | ||||
if not parent_commit: | ||||
parent_commit = EmptyCommit(alias=scm_instance.alias) | ||||
if isinstance(parent_commit, EmptyCommit): | ||||
# EmptyCommit means we we're editing empty repository | ||||
parents = None | ||||
else: | ||||
parents = [parent_commit] | ||||
# add multiple nodes | ||||
for _filename, data in nodes.items(): | ||||
# new filename, can be renamed from the old one, also sanitaze | ||||
# the path for any hack around relative paths like ../../ etc. | ||||
filename = self._sanitize_path(data['filename']) | ||||
old_filename = self._sanitize_path(_filename) | ||||
content = data['content'] | ||||
r3410 | file_mode = data.get('mode') | |||
filenode = FileNode(old_filename, content=content, mode=file_mode) | ||||
r1 | op = data['op'] | |||
if op == 'add': | ||||
imc.add(filenode) | ||||
elif op == 'del': | ||||
imc.remove(filenode) | ||||
elif op == 'mod': | ||||
if filename != old_filename: | ||||
r3410 | # TODO: handle renames more efficient, needs vcs lib changes | |||
r1 | imc.remove(filenode) | |||
r3410 | imc.add(FileNode(filename, content=content, mode=file_mode)) | |||
r1 | else: | |||
imc.change(filenode) | ||||
try: | ||||
r3410 | # TODO: handle pre push scenario commit changes | |||
r1 | tip = imc.commit(message=message, | |||
author=author, | ||||
parents=parents, | ||||
branch=parent_commit.branch) | ||||
except NodeNotChangedError: | ||||
raise | ||||
except Exception as e: | ||||
log.exception("Unexpected exception during call to imc.commit") | ||||
raise IMCCommitError(str(e)) | ||||
finally: | ||||
# always clear caches, if commit fails we want fresh object also | ||||
self.mark_for_invalidation(repo.repo_name) | ||||
if trigger_push_hook: | ||||
hooks_utils.trigger_post_push_hook( | ||||
r3133 | username=user.username, action='push_local', hook_type='post_push', | |||
r4305 | repo_name=repo.repo_name, repo_type=scm_instance.alias, | |||
r1 | commit_ids=[tip.raw_id]) | |||
r3754 | return tip | |||
r1 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, | |||
author=None, trigger_push_hook=True): | ||||
""" | ||||
Deletes given multiple nodes into `repo` | ||||
:param user: RhodeCode User object or user_id, the committer | ||||
:param repo: RhodeCode Repository object | ||||
:param message: commit message | ||||
:param nodes: mapping {filename:{'content':content},...} | ||||
:param parent_commit: parent commit, can be empty than it's initial | ||||
commit | ||||
:param author: author of commit, cna be different that commiter only | ||||
for git | ||||
:param trigger_push_hook: trigger push hooks | ||||
:returns: new commit after deletion | ||||
""" | ||||
user = self._get_user(user) | ||||
scm_instance = repo.scm_instance(cache=False) | ||||
processed_nodes = [] | ||||
for f_path in nodes: | ||||
f_path = self._sanitize_path(f_path) | ||||
r5070 | # content can be empty but for compatibility it allows same dicts | |||
r1 | # structure as add_nodes | |||
content = nodes[f_path].get('content') | ||||
r5070 | processed_nodes.append((safe_bytes(f_path), content)) | |||
r1 | ||||
r5070 | message = safe_str(message) | |||
r1 | commiter = user.full_contact | |||
r5070 | author = safe_str(author) if author else commiter | |||
r1 | ||||
imc = scm_instance.in_memory_commit | ||||
if not parent_commit: | ||||
parent_commit = EmptyCommit(alias=scm_instance.alias) | ||||
if isinstance(parent_commit, EmptyCommit): | ||||
# EmptyCommit means we we're editing empty repository | ||||
parents = None | ||||
else: | ||||
parents = [parent_commit] | ||||
# add multiple nodes | ||||
for path, content in processed_nodes: | ||||
imc.remove(FileNode(path, content=content)) | ||||
# TODO: handle pre push scenario | ||||
tip = imc.commit(message=message, | ||||
author=author, | ||||
parents=parents, | ||||
branch=parent_commit.branch) | ||||
self.mark_for_invalidation(repo.repo_name) | ||||
if trigger_push_hook: | ||||
hooks_utils.trigger_post_push_hook( | ||||
r3133 | username=user.username, action='push_local', hook_type='post_push', | |||
r4305 | repo_name=repo.repo_name, repo_type=scm_instance.alias, | |||
r1 | commit_ids=[tip.raw_id]) | |||
return tip | ||||
def strip(self, repo, commit_id, branch): | ||||
scm_instance = repo.scm_instance(cache=False) | ||||
scm_instance.config.clear_section('hooks') | ||||
scm_instance.strip(commit_id, branch) | ||||
self.mark_for_invalidation(repo.repo_name) | ||||
def get_unread_journal(self): | ||||
return self.sa.query(UserLog).count() | ||||
r3881 | @classmethod | |||
def backend_landing_ref(cls, repo_type): | ||||
""" | ||||
Return a default landing ref based on a repository type. | ||||
""" | ||||
landing_ref = { | ||||
'hg': ('branch:default', 'default'), | ||||
'git': ('branch:master', 'master'), | ||||
'svn': ('rev:tip', 'latest tip'), | ||||
'default': ('rev:tip', 'latest tip'), | ||||
} | ||||
return landing_ref.get(repo_type) or landing_ref['default'] | ||||
r2358 | def get_repo_landing_revs(self, translator, repo=None): | |||
r1 | """ | |||
Generates select option with tags branches and bookmarks (for hg only) | ||||
grouped by type | ||||
:param repo: | ||||
""" | ||||
r4852 | from rhodecode.lib.vcs.backends.git import GitRepository | |||
r2358 | _ = translator | |||
r1 | repo = self._get_repo(repo) | |||
r1153 | ||||
r3881 | if repo: | |||
repo_type = repo.repo_type | ||||
else: | ||||
repo_type = 'default' | ||||
default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type) | ||||
default_ref_options = [ | ||||
[default_landing_ref, landing_ref_lbl] | ||||
r1153 | ] | |||
r3881 | default_choices = [ | |||
default_landing_ref | ||||
r1153 | ] | |||
r1 | if not repo: | |||
r4852 | # presented at NEW repo creation | |||
r3881 | return default_choices, default_ref_options | |||
r1 | ||||
repo = repo.scm_instance() | ||||
r4852 | ref_options = [(default_landing_ref, landing_ref_lbl)] | |||
choices = [default_landing_ref] | ||||
r3881 | ||||
# branches | ||||
r5070 | branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches] | |||
r3881 | if not branch_group: | |||
# new repo, or without maybe a branch? | ||||
branch_group = default_ref_options | ||||
branches_group = (branch_group, _("Branches")) | ||||
ref_options.append(branches_group) | ||||
r1 | choices.extend([x[0] for x in branches_group[0]]) | |||
r3881 | # bookmarks for HG | |||
r1 | if repo.alias == 'hg': | |||
bookmarks_group = ( | ||||
r5070 | [(f'book:{safe_str(b)}', safe_str(b)) | |||
r1 | for b in repo.bookmarks], | |||
_("Bookmarks")) | ||||
r3881 | ref_options.append(bookmarks_group) | |||
r1 | choices.extend([x[0] for x in bookmarks_group[0]]) | |||
r3881 | # tags | |||
r1 | tags_group = ( | |||
r5070 | [(f'tag:{safe_str(t)}', safe_str(t)) | |||
r1 | for t in repo.tags], | |||
_("Tags")) | ||||
r3881 | ref_options.append(tags_group) | |||
r1 | choices.extend([x[0] for x in tags_group[0]]) | |||
r3881 | return choices, ref_options | |||
r1 | ||||
def get_server_info(self, environ=None): | ||||
r1111 | server_info = get_system_info(environ) | |||
return server_info | ||||