scm.py
727 lines
| 25.8 KiB
| text/x-python
|
PythonLexer
r757 | # -*- coding: utf-8 -*- | |||
""" | ||||
r811 | rhodecode.model.scm | |||
~~~~~~~~~~~~~~~~~~~ | ||||
r757 | ||||
r811 | Scm model for RhodeCode | |||
r757 | :created_on: Apr 9, 2010 | |||
:author: marcink | ||||
r1824 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |||
r757 | :license: GPLv3, see COPYING for more details. | |||
""" | ||||
r1206 | # This program is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU General Public License as published by | ||||
# the Free Software Foundation, either version 3 of the License, or | ||||
# (at your option) any later version. | ||||
r1203 | # | |||
r691 | # This program is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
r1203 | # | |||
r691 | # You should have received a copy of the GNU General Public License | |||
r1206 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
r2620 | from __future__ import with_statement | |||
r1554 | import os | |||
r2618 | import re | |||
r757 | import time | |||
import traceback | ||||
import logging | ||||
r1801 | import cStringIO | |||
r2618 | import pkg_resources | |||
from os.path import dirname as dn, join as jn | ||||
r757 | ||||
r2354 | from sqlalchemy import func | |||
r2459 | from pylons.i18n.translation import _ | |||
r2354 | ||||
r2618 | import rhodecode | |||
r2007 | from rhodecode.lib.vcs import get_backend | |||
from rhodecode.lib.vcs.exceptions import RepositoryError | ||||
from rhodecode.lib.vcs.utils.lazy import LazyProperty | ||||
from rhodecode.lib.vcs.nodes import FileNode | ||||
r2684 | from rhodecode.lib.vcs.backends.base import EmptyChangeset | |||
r757 | ||||
r710 | from rhodecode import BACKENDS | |||
r691 | from rhodecode.lib import helpers as h | |||
r3577 | from rhodecode.lib.utils2 import safe_str, safe_unicode, get_server_url,\ | |||
_set_extras | ||||
r3714 | from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny,\ | |||
HasUserGroupPermissionAnyDecorator, HasUserGroupPermissionAny | ||||
r3228 | from rhodecode.lib.utils import get_filesystem_repos, make_ui, \ | |||
r2684 | action_logger, REMOVED_REPO_PAT | |||
r752 | from rhodecode.model import BaseModel | |||
r758 | from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \ | |||
r2440 | UserFollowing, UserLog, User, RepoGroup, PullRequest | |||
r3478 | from rhodecode.lib.hooks import log_push_action | |||
r3840 | from rhodecode.lib.exceptions import NonRelativePathError | |||
r757 | ||||
r691 | log = logging.getLogger(__name__) | |||
r757 | ||||
r735 | class UserTemp(object): | |||
def __init__(self, user_id): | ||||
self.user_id = user_id | ||||
r901 | ||||
def __repr__(self): | ||||
return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) | ||||
r1213 | ||||
r735 | class RepoTemp(object): | |||
def __init__(self, repo_id): | ||||
self.repo_id = repo_id | ||||
r747 | ||||
r901 | def __repr__(self): | |||
return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) | ||||
r1801 | ||||
r1366 | class CachedRepoList(object): | |||
r2604 | """ | |||
Cached repo list, uses in-memory cache after initialization, that is | ||||
super fast | ||||
""" | ||||
r1366 | ||||
r3222 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): | |||
r1366 | self.db_repo_list = db_repo_list | |||
self.repos_path = repos_path | ||||
self.order_by = order_by | ||||
self.reversed = (order_by or '').startswith('-') | ||||
r3222 | if not perm_set: | |||
perm_set = ['repository.read', 'repository.write', | ||||
'repository.admin'] | ||||
self.perm_set = perm_set | ||||
r1366 | ||||
def __len__(self): | ||||
return len(self.db_repo_list) | ||||
def __repr__(self): | ||||
return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | ||||
def __iter__(self): | ||||
Mads Kiilerich
|
r3772 | # pre-propagated valid_cache_keys to save executing select statements | ||
r2352 | # for each repo | |||
Mads Kiilerich
|
r3772 | valid_cache_keys = CacheInvalidation.get_valid_cache_keys() | ||
r2352 | ||||
r1428 | for dbr in self.db_repo_list: | |||
Mads Kiilerich
|
r3772 | scmr = dbr.scm_instance_cached(valid_cache_keys) | ||
r1437 | # check permission at this level | |||
r1982 | if not HasRepoPermissionAny( | |||
Mads Kiilerich
|
r3717 | *self.perm_set)(dbr.repo_name, 'get repo check'): | ||
r1366 | continue | |||
r3360 | try: | |||
last_change = scmr.last_change | ||||
tip = h.get_changeset_safe(scmr, 'tip') | ||||
except Exception: | ||||
r1982 | log.error( | |||
'%s this repository is present in database but it ' | ||||
r3360 | 'cannot be created as an scm instance, org_exc:%s' | |||
% (dbr.repo_name, traceback.format_exc()) | ||||
r1982 | ) | |||
r1373 | continue | |||
r1366 | ||||
tmp_d = {} | ||||
tmp_d['name'] = dbr.repo_name | ||||
tmp_d['name_sort'] = tmp_d['name'].lower() | ||||
r2936 | tmp_d['raw_name'] = tmp_d['name'].lower() | |||
r1366 | tmp_d['description'] = dbr.description | |||
r2604 | tmp_d['description_sort'] = tmp_d['description'].lower() | |||
r1366 | tmp_d['last_change'] = last_change | |||
r1728 | tmp_d['last_change_sort'] = time.mktime(last_change.timetuple()) | |||
r1366 | tmp_d['tip'] = tip.raw_id | |||
tmp_d['tip_sort'] = tip.revision | ||||
tmp_d['rev'] = tip.revision | ||||
tmp_d['contact'] = dbr.user.full_contact | ||||
tmp_d['contact_sort'] = tmp_d['contact'] | ||||
tmp_d['owner_sort'] = tmp_d['contact'] | ||||
tmp_d['repo_archives'] = list(scmr._get_archives()) | ||||
tmp_d['last_msg'] = tip.message | ||||
r1459 | tmp_d['author'] = tip.author | |||
r1366 | tmp_d['dbrepo'] = dbr.get_dict() | |||
r1728 | tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {} | |||
r1366 | yield tmp_d | |||
r1213 | ||||
r1801 | ||||
r2604 | class SimpleCachedRepoList(CachedRepoList): | |||
""" | ||||
Lighter version of CachedRepoList without the scm initialisation | ||||
""" | ||||
def __iter__(self): | ||||
for dbr in self.db_repo_list: | ||||
# check permission at this level | ||||
if not HasRepoPermissionAny( | ||||
Mads Kiilerich
|
r3717 | *self.perm_set)(dbr.repo_name, 'get repo check'): | ||
r2604 | continue | |||
tmp_d = {} | ||||
tmp_d['name'] = dbr.repo_name | ||||
tmp_d['name_sort'] = tmp_d['name'].lower() | ||||
r2936 | tmp_d['raw_name'] = tmp_d['name'].lower() | |||
r2604 | tmp_d['description'] = dbr.description | |||
tmp_d['description_sort'] = tmp_d['description'].lower() | ||||
tmp_d['dbrepo'] = dbr.get_dict() | ||||
tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {} | ||||
yield tmp_d | ||||
r3714 | class _PermCheckIterator(object): | |||
def __init__(self, obj_list, obj_attr, perm_set, perm_checker): | ||||
r3222 | """ | |||
r3714 | Creates iterator from given list of objects, additionally | |||
r3222 | checking permission for them from perm_set var | |||
r3714 | :param obj_list: list of db objects | |||
:param obj_attr: attribute of object to pass into perm_checker | ||||
:param perm_set: list of permissions to check | ||||
:param perm_checker: callable to check permissions against | ||||
r3222 | """ | |||
r3714 | self.obj_list = obj_list | |||
self.obj_attr = obj_attr | ||||
r3222 | self.perm_set = perm_set | |||
r3714 | self.perm_checker = perm_checker | |||
r1982 | ||||
def __len__(self): | ||||
r3714 | return len(self.obj_list) | |||
r1982 | ||||
def __repr__(self): | ||||
return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | ||||
def __iter__(self): | ||||
r3714 | for db_obj in self.obj_list: | |||
r1982 | # check permission at this level | |||
r3714 | name = getattr(db_obj, self.obj_attr, None) | |||
if not self.perm_checker(*self.perm_set)(name, self.__class__.__name__): | ||||
r1982 | continue | |||
r3714 | yield db_obj | |||
class RepoGroupList(_PermCheckIterator): | ||||
def __init__(self, db_repo_group_list, perm_set=None): | ||||
if not perm_set: | ||||
perm_set = ['group.read', 'group.write', 'group.admin'] | ||||
super(RepoGroupList, self).__init__(obj_list=db_repo_group_list, | ||||
obj_attr='group_name', perm_set=perm_set, | ||||
perm_checker=HasReposGroupPermissionAny) | ||||
class UserGroupList(_PermCheckIterator): | ||||
def __init__(self, db_user_group_list, perm_set=None): | ||||
if not perm_set: | ||||
perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] | ||||
super(UserGroupList, self).__init__(obj_list=db_user_group_list, | ||||
obj_attr='users_group_name', perm_set=perm_set, | ||||
perm_checker=HasUserGroupPermissionAny) | ||||
r1982 | ||||
r752 | class ScmModel(BaseModel): | |||
r1716 | """ | |||
Generic Scm Model | ||||
r691 | """ | |||
r1755 | def __get_repo(self, instance): | |||
cls = Repository | ||||
if isinstance(instance, cls): | ||||
return instance | ||||
r2672 | elif isinstance(instance, int) or safe_str(instance).isdigit(): | |||
r1755 | return cls.get(instance) | |||
elif isinstance(instance, basestring): | ||||
return cls.get_by_repo_name(instance) | ||||
elif instance: | ||||
raise Exception('given object must be int, basestr or Instance' | ||||
' of %s got %s' % (type(cls), type(instance))) | ||||
r691 | @LazyProperty | |||
def repos_path(self): | ||||
r1716 | """ | |||
Get's the repositories root path from database | ||||
r691 | """ | |||
r811 | ||||
r691 | q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() | |||
return q.ui_value | ||||
r1038 | def repo_scan(self, repos_path=None): | |||
r1716 | """ | |||
Listing of repositories in given path. This path should not be a | ||||
r691 | repository itself. Return a dictionary of repository objects | |||
r1203 | ||||
r691 | :param repos_path: path to directory containing repositories | |||
""" | ||||
r811 | ||||
r1038 | if repos_path is None: | |||
repos_path = self.repos_path | ||||
r1925 | log.info('scanning for repositories in %s' % repos_path) | |||
r1038 | baseui = make_ui('db') | |||
r1751 | repos = {} | |||
r691 | ||||
r877 | for name, path in get_filesystem_repos(repos_path, recursive=True): | |||
r1554 | # name need to be decomposed and put back together using the / | |||
# since this is internal storage separator for rhodecode | ||||
r3152 | name = Repository.normalize_repo_name(name) | |||
r1716 | ||||
r691 | try: | |||
r1751 | if name in repos: | |||
r691 | raise RepositoryError('Duplicate repository name %s ' | |||
r1554 | 'found in %s' % (name, path)) | |||
r691 | else: | |||
klass = get_backend(path[0]) | ||||
r710 | if path[0] == 'hg' and path[0] in BACKENDS.keys(): | |||
r1753 | repos[name] = klass(safe_str(path[1]), baseui=baseui) | |||
r691 | ||||
r710 | if path[0] == 'git' and path[0] in BACKENDS.keys(): | |||
r1751 | repos[name] = klass(path[1]) | |||
r691 | except OSError: | |||
continue | ||||
r3228 | log.debug('found %s paths with repositories' % (len(repos))) | |||
r1751 | return repos | |||
r691 | ||||
r2604 | def get_repos(self, all_repos=None, sort_key=None, simple=False): | |||
r1343 | """ | |||
Get all repos from db and for each repo create it's | ||||
r1213 | backend instance and fill that backed with information from database | |||
r1203 | ||||
r1343 | :param all_repos: list of repository names as strings | |||
give specific repositories list, good for filtering | ||||
r2604 | ||||
:param sort_key: initial sorting of repos | ||||
:param simple: use SimpleCachedList - one without the SCM info | ||||
r691 | """ | |||
r767 | if all_repos is None: | |||
r1366 | all_repos = self.sa.query(Repository)\ | |||
r1343 | .filter(Repository.group_id == None)\ | |||
r2354 | .order_by(func.lower(Repository.repo_name)).all() | |||
r2604 | if simple: | |||
repo_iter = SimpleCachedRepoList(all_repos, | ||||
repos_path=self.repos_path, | ||||
order_by=sort_key) | ||||
else: | ||||
repo_iter = CachedRepoList(all_repos, | ||||
repos_path=self.repos_path, | ||||
order_by=sort_key) | ||||
r691 | ||||
r1366 | return repo_iter | |||
r691 | ||||
r1982 | def get_repos_groups(self, all_groups=None): | |||
if all_groups is None: | ||||
all_groups = RepoGroup.query()\ | ||||
.filter(RepoGroup.group_parent_id == None).all() | ||||
r3714 | return [x for x in RepoGroupList(all_groups)] | |||
r1982 | ||||
r692 | def mark_for_invalidation(self, repo_name): | |||
r2147 | """ | |||
Mads Kiilerich
|
r3682 | Mark caches of this repo invalid in the database. | ||
r1203 | ||||
Mads Kiilerich
|
r3682 | :param repo_name: the repo for which caches should be marked invalid | ||
r692 | """ | |||
Mads Kiilerich
|
r3759 | CacheInvalidation.set_invalidate(repo_name) | ||
r3150 | repo = Repository.get_by_repo_name(repo_name) | |||
if repo: | ||||
repo.update_changeset_cache() | ||||
r692 | ||||
r734 | def toggle_following_repo(self, follow_repo_id, user_id): | |||
r692 | ||||
r734 | f = self.sa.query(UserFollowing)\ | |||
.filter(UserFollowing.follows_repo_id == follow_repo_id)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
if f is not None: | ||||
try: | ||||
self.sa.delete(f) | ||||
r735 | action_logger(UserTemp(user_id), | |||
'stopped_following_repo', | ||||
r747 | RepoTemp(follow_repo_id)) | |||
r734 | return | |||
r3631 | except Exception: | |||
r734 | log.error(traceback.format_exc()) | |||
raise | ||||
try: | ||||
f = UserFollowing() | ||||
f.user_id = user_id | ||||
f.follows_repo_id = follow_repo_id | ||||
self.sa.add(f) | ||||
r1722 | ||||
r735 | action_logger(UserTemp(user_id), | |||
'started_following_repo', | ||||
r747 | RepoTemp(follow_repo_id)) | |||
r3631 | except Exception: | |||
r734 | log.error(traceback.format_exc()) | |||
raise | ||||
r1213 | def toggle_following_user(self, follow_user_id, user_id): | |||
r734 | f = self.sa.query(UserFollowing)\ | |||
.filter(UserFollowing.follows_user_id == follow_user_id)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
if f is not None: | ||||
try: | ||||
self.sa.delete(f) | ||||
return | ||||
r3631 | except Exception: | |||
r734 | log.error(traceback.format_exc()) | |||
raise | ||||
try: | ||||
f = UserFollowing() | ||||
f.user_id = user_id | ||||
f.follows_user_id = follow_user_id | ||||
self.sa.add(f) | ||||
r3631 | except Exception: | |||
r734 | log.error(traceback.format_exc()) | |||
raise | ||||
r999 | def is_following_repo(self, repo_name, user_id, cache=False): | |||
r734 | r = self.sa.query(Repository)\ | |||
.filter(Repository.repo_name == repo_name).scalar() | ||||
f = self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_repository == r)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
return f is not None | ||||
r999 | def is_following_user(self, username, user_id, cache=False): | |||
r1530 | u = User.get_by_username(username) | |||
r734 | ||||
f = self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_user == u)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
return f is not None | ||||
r692 | ||||
r2440 | def get_followers(self, repo): | |||
repo = self._get_repo(repo) | ||||
r1282 | ||||
return self.sa.query(UserFollowing)\ | ||||
r2440 | .filter(UserFollowing.follows_repository == repo).count() | |||
r747 | ||||
r2440 | def get_forks(self, repo): | |||
repo = self._get_repo(repo) | ||||
return self.sa.query(Repository)\ | ||||
.filter(Repository.fork == repo).count() | ||||
r1282 | ||||
r2440 | def get_pull_requests(self, repo): | |||
repo = self._get_repo(repo) | ||||
return self.sa.query(PullRequest)\ | ||||
r3580 | .filter(PullRequest.other_repo == repo)\ | |||
.filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() | ||||
r692 | ||||
r1755 | def mark_as_fork(self, repo, fork, user): | |||
repo = self.__get_repo(repo) | ||||
fork = self.__get_repo(fork) | ||||
r2629 | if fork and repo.repo_id == fork.repo_id: | |||
raise Exception("Cannot set repository as fork of itself") | ||||
r1755 | repo.fork = fork | |||
self.sa.add(repo) | ||||
return repo | ||||
r3826 | def _handle_rc_scm_extras(self, username, repo_name, repo_alias): | |||
from rhodecode import CONFIG | ||||
from rhodecode.lib.base import _get_ip_addr | ||||
try: | ||||
from pylons import request | ||||
environ = request.environ | ||||
except TypeError: | ||||
# we might use this outside of request context, let's fake the | ||||
# environ data | ||||
from webob import Request | ||||
environ = Request.blank('').environ | ||||
extras = { | ||||
'ip': _get_ip_addr(environ), | ||||
'username': username, | ||||
'action': 'push_local', | ||||
'repository': repo_name, | ||||
'scm': repo_alias, | ||||
'config': CONFIG['__file__'], | ||||
'server_url': get_server_url(environ), | ||||
'make_lock': None, | ||||
'locked_by': [None, None] | ||||
} | ||||
_set_extras(extras) | ||||
r3481 | def _handle_push(self, repo, username, action, repo_name, revisions): | |||
""" | ||||
Triggers push action hooks | ||||
:param repo: SCM repo | ||||
:param username: username who pushes | ||||
:param action: push/push_loca/push_remote | ||||
:param repo_name: name of repo | ||||
:param revisions: list of revisions that we pushed | ||||
""" | ||||
r3826 | self._handle_rc_scm_extras(username, repo_name, repo_alias=repo.alias) | |||
r3478 | _scm_repo = repo._repo | |||
r3826 | # trigger push hook | |||
r3481 | if repo.alias == 'hg': | |||
r3478 | log_push_action(_scm_repo.ui, _scm_repo, node=revisions[0]) | |||
r3481 | elif repo.alias == 'git': | |||
r3589 | log_push_action(None, _scm_repo, _git_revs=revisions) | |||
r3478 | ||||
r3481 | def _get_IMC_module(self, scm_type): | |||
""" | ||||
Returns InMemoryCommit class based on scm_type | ||||
:param scm_type: | ||||
""" | ||||
if scm_type == 'hg': | ||||
from rhodecode.lib.vcs.backends.hg import \ | ||||
MercurialInMemoryChangeset as IMC | ||||
elif scm_type == 'git': | ||||
from rhodecode.lib.vcs.backends.git import \ | ||||
GitInMemoryChangeset as IMC | ||||
return IMC | ||||
r2514 | def pull_changes(self, repo, username): | |||
dbrepo = self.__get_repo(repo) | ||||
r1508 | clone_uri = dbrepo.clone_uri | |||
if not clone_uri: | ||||
raise Exception("This repository doesn't have a clone uri") | ||||
r1530 | ||||
r1370 | repo = dbrepo.scm_instance | |||
r3478 | repo_name = dbrepo.repo_name | |||
r1114 | try: | |||
r2383 | if repo.alias == 'git': | |||
repo.fetch(clone_uri) | ||||
else: | ||||
repo.pull(clone_uri) | ||||
r3478 | self.mark_for_invalidation(repo_name) | |||
r3631 | except Exception: | |||
r1114 | log.error(traceback.format_exc()) | |||
raise | ||||
r1722 | def commit_change(self, repo, repo_name, cs, user, author, message, | |||
content, f_path): | ||||
r2684 | """ | |||
Commits changes | ||||
:param repo: SCM instance | ||||
""" | ||||
r3481 | user = self._get_user(user) | |||
IMC = self._get_IMC_module(repo.alias) | ||||
r1311 | ||||
# decoding here will force that we have proper encoded values | ||||
# in any other case this will throw exceptions and deny commit | ||||
r1401 | content = safe_str(content) | |||
path = safe_str(f_path) | ||||
r2199 | # message and author needs to be unicode | |||
# proper backend should then translate that into required type | ||||
message = safe_unicode(message) | ||||
author = safe_unicode(author) | ||||
r3836 | imc = IMC(repo) | |||
imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path))) | ||||
tip = imc.commit(message=message, | ||||
r2199 | author=author, | |||
parents=[cs], branch=cs.branch) | ||||
r1311 | ||||
self.mark_for_invalidation(repo_name) | ||||
r3478 | self._handle_push(repo, | |||
username=user.username, | ||||
action='push_local', | ||||
repo_name=repo_name, | ||||
revisions=[tip.raw_id]) | ||||
r2684 | return tip | |||
r1311 | ||||
r3840 | def create_nodes(self, user, repo, message, nodes, parent_cs=None, | |||
author=None, trigger_push_hook=True): | ||||
""" | ||||
Commits given multiple nodes into repo | ||||
:param user: RhodeCode User object or user_id, the commiter | ||||
:param repo: RhodeCode Repository object | ||||
:param message: commit message | ||||
:param nodes: mapping {filename:{'content':content},...} | ||||
:param parent_cs: parent changeset, can be empty than it's initial commit | ||||
:param author: author of commit, cna be different that commiter only for git | ||||
:param trigger_push_hook: trigger push hooks | ||||
:returns: new commited changeset | ||||
""" | ||||
r3481 | user = self._get_user(user) | |||
r3840 | scm_instance = repo.scm_instance_no_cache() | |||
r3481 | ||||
r3840 | processed_nodes = [] | |||
for f_path in nodes: | ||||
if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path: | ||||
raise NonRelativePathError('%s is not an relative path' % f_path) | ||||
if f_path: | ||||
f_path = os.path.normpath(f_path) | ||||
f_path = safe_str(f_path) | ||||
content = nodes[f_path]['content'] | ||||
# decoding here will force that we have proper encoded values | ||||
# in any other case this will throw exceptions and deny commit | ||||
if isinstance(content, (basestring,)): | ||||
content = safe_str(content) | ||||
elif isinstance(content, (file, cStringIO.OutputType,)): | ||||
content = content.read() | ||||
else: | ||||
raise Exception('Content is of unrecognized type %s' % ( | ||||
type(content) | ||||
)) | ||||
processed_nodes.append((f_path, content)) | ||||
r1530 | ||||
r2199 | message = safe_unicode(message) | |||
r3840 | commiter = user.full_contact | |||
author = safe_unicode(author) if author else commiter | ||||
r1483 | ||||
r3840 | IMC = self._get_IMC_module(scm_instance.alias) | |||
imc = IMC(scm_instance) | ||||
if not parent_cs: | ||||
parent_cs = EmptyChangeset(alias=scm_instance.alias) | ||||
if isinstance(parent_cs, EmptyChangeset): | ||||
r2199 | # EmptyChangeset means we we're editing empty repository | |||
r1483 | parents = None | |||
else: | ||||
r3840 | parents = [parent_cs] | |||
# add multiple nodes | ||||
for path, content in processed_nodes: | ||||
imc.add(FileNode(path, content=content)) | ||||
r1483 | ||||
r3840 | tip = imc.commit(message=message, | |||
author=author, | ||||
parents=parents, | ||||
branch=parent_cs.branch) | ||||
self.mark_for_invalidation(repo.repo_name) | ||||
if trigger_push_hook: | ||||
self._handle_push(scm_instance, | ||||
username=user.username, | ||||
action='push_local', | ||||
repo_name=repo.repo_name, | ||||
revisions=[tip.raw_id]) | ||||
r2684 | return tip | |||
r1483 | ||||
r1810 | def get_nodes(self, repo_name, revision, root_path='/', flat=True): | |||
""" | ||||
recursive walk in root dir and return a set of all path in that dir | ||||
based on repository walk function | ||||
:param repo_name: name of repository | ||||
:param revision: revision for which to list nodes | ||||
:param root_path: root path to list | ||||
:param flat: return as a list, if False returns a dict with decription | ||||
""" | ||||
_files = list() | ||||
_dirs = list() | ||||
try: | ||||
_repo = self.__get_repo(repo_name) | ||||
changeset = _repo.scm_instance.get_changeset(revision) | ||||
root_path = root_path.lstrip('/') | ||||
for topnode, dirs, files in changeset.walk(root_path): | ||||
for f in files: | ||||
_files.append(f.path if flat else {"name": f.path, | ||||
"type": "file"}) | ||||
for d in dirs: | ||||
_dirs.append(d.path if flat else {"name": d.path, | ||||
"type": "dir"}) | ||||
except RepositoryError: | ||||
log.debug(traceback.format_exc()) | ||||
raise | ||||
return _dirs, _files | ||||
r784 | def get_unread_journal(self): | |||
return self.sa.query(UserLog).count() | ||||
r2459 | ||||
def get_repo_landing_revs(self, repo=None): | ||||
""" | ||||
Generates select option with tags branches and bookmarks (for hg only) | ||||
grouped by type | ||||
:param repo: | ||||
""" | ||||
r2460 | ||||
r2459 | hist_l = [] | |||
r2460 | choices = [] | |||
r2459 | repo = self.__get_repo(repo) | |||
hist_l.append(['tip', _('latest tip')]) | ||||
r2460 | choices.append('tip') | |||
r2459 | if not repo: | |||
r2460 | return choices, hist_l | |||
r2459 | ||||
repo = repo.scm_instance | ||||
r2460 | ||||
r2459 | branches_group = ([(k, k) for k, v in | |||
repo.branches.iteritems()], _("Branches")) | ||||
hist_l.append(branches_group) | ||||
r2460 | choices.extend([x[0] for x in branches_group[0]]) | |||
r2459 | ||||
if repo.alias == 'hg': | ||||
bookmarks_group = ([(k, k) for k, v in | ||||
repo.bookmarks.iteritems()], _("Bookmarks")) | ||||
hist_l.append(bookmarks_group) | ||||
r2460 | choices.extend([x[0] for x in bookmarks_group[0]]) | |||
r2459 | ||||
tags_group = ([(k, k) for k, v in | ||||
repo.tags.iteritems()], _("Tags")) | ||||
hist_l.append(tags_group) | ||||
r2460 | choices.extend([x[0] for x in tags_group[0]]) | |||
r2459 | ||||
r2460 | return choices, hist_l | |||
r2618 | ||||
def install_git_hook(self, repo, force_create=False): | ||||
""" | ||||
Creates a rhodecode hook inside a git repository | ||||
:param repo: Instance of VCS repo | ||||
:param force_create: Create even if same name hook exists | ||||
""" | ||||
loc = jn(repo.path, 'hooks') | ||||
if not repo.bare: | ||||
loc = jn(repo.path, '.git', 'hooks') | ||||
if not os.path.isdir(loc): | ||||
os.makedirs(loc) | ||||
r2726 | tmpl_post = pkg_resources.resource_string( | |||
r2618 | 'rhodecode', jn('config', 'post_receive_tmpl.py') | |||
) | ||||
r2726 | tmpl_pre = pkg_resources.resource_string( | |||
'rhodecode', jn('config', 'pre_receive_tmpl.py') | ||||
) | ||||
r2618 | ||||
r2726 | for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]: | |||
_hook_file = jn(loc, '%s-receive' % h_type) | ||||
_rhodecode_hook = False | ||||
log.debug('Installing git hook in repo %s' % repo) | ||||
if os.path.exists(_hook_file): | ||||
# let's take a look at this hook, maybe it's rhodecode ? | ||||
log.debug('hook exists, checking if it is from rhodecode') | ||||
_HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER') | ||||
with open(_hook_file, 'rb') as f: | ||||
data = f.read() | ||||
matches = re.compile(r'(?:%s)\s*=\s*(.*)' | ||||
% 'RC_HOOK_VER').search(data) | ||||
if matches: | ||||
try: | ||||
ver = matches.groups()[0] | ||||
log.debug('got %s it is rhodecode' % (ver)) | ||||
_rhodecode_hook = True | ||||
r3631 | except Exception: | |||
r2726 | log.error(traceback.format_exc()) | |||
else: | ||||
# there is no hook in this dir, so we want to create one | ||||
_rhodecode_hook = True | ||||
r2618 | ||||
r2726 | if _rhodecode_hook or force_create: | |||
log.debug('writing %s hook file !' % h_type) | ||||
with open(_hook_file, 'wb') as f: | ||||
tmpl = tmpl.replace('_TMPL_', rhodecode.__version__) | ||||
f.write(tmpl) | ||||
os.chmod(_hook_file, 0755) | ||||
else: | ||||
log.debug('skipping writing hook file') | ||||