scm.py
599 lines
| 20.3 KiB
| text/x-python
|
PythonLexer
r757 | # -*- coding: utf-8 -*- | ||
""" | |||
r811 | rhodecode.model.scm | ||
~~~~~~~~~~~~~~~~~~~ | |||
r757 | |||
r811 | Scm model for RhodeCode | ||
r757 | :created_on: Apr 9, 2010 | ||
:author: marcink | |||
r1824 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | ||
r757 | :license: GPLv3, see COPYING for more details. | ||
""" | |||
r1206 | # This program is free software: you can redistribute it and/or modify | ||
# it under the terms of the GNU General Public License as published by | |||
# the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
r1203 | # | ||
r691 | # This program is distributed in the hope that it will be useful, | ||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU General Public License for more details. | |||
r1203 | # | ||
r691 | # You should have received a copy of the GNU General Public License | ||
r1206 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | ||
r2620 | from __future__ import with_statement | ||
r1554 | import os | ||
r2618 | import re | ||
r757 | import time | ||
import traceback | |||
import logging | |||
r1801 | import cStringIO | ||
r2618 | import pkg_resources | ||
from os.path import dirname as dn, join as jn | |||
r757 | |||
r2354 | from sqlalchemy import func | ||
r2459 | from pylons.i18n.translation import _ | ||
r2354 | |||
r2618 | import rhodecode | ||
r2007 | from rhodecode.lib.vcs import get_backend | ||
from rhodecode.lib.vcs.exceptions import RepositoryError | |||
from rhodecode.lib.vcs.utils.lazy import LazyProperty | |||
from rhodecode.lib.vcs.nodes import FileNode | |||
r757 | |||
r710 | from rhodecode import BACKENDS | ||
r691 | from rhodecode.lib import helpers as h | ||
r2199 | from rhodecode.lib.utils2 import safe_str, safe_unicode | ||
r1982 | from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny | ||
r1038 | from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \ | ||
r2069 | action_logger, EmptyChangeset, REMOVED_REPO_PAT | ||
r752 | from rhodecode.model import BaseModel | ||
r758 | from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \ | ||
r2440 | UserFollowing, UserLog, User, RepoGroup, PullRequest | ||
r757 | |||
r691 | log = logging.getLogger(__name__) | ||
r757 | |||
r735 | class UserTemp(object): | ||
def __init__(self, user_id): | |||
self.user_id = user_id | |||
r901 | |||
def __repr__(self): | |||
return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) | |||
r1213 | |||
r735 | class RepoTemp(object): | ||
def __init__(self, repo_id): | |||
self.repo_id = repo_id | |||
r747 | |||
r901 | def __repr__(self): | ||
return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) | |||
r1801 | |||
r1366 | class CachedRepoList(object): | ||
r2604 | """ | ||
Cached repo list, uses in-memory cache after initialization, that is | |||
super fast | |||
""" | |||
r1366 | |||
r1428 | def __init__(self, db_repo_list, repos_path, order_by=None): | ||
r1366 | self.db_repo_list = db_repo_list | ||
self.repos_path = repos_path | |||
self.order_by = order_by | |||
self.reversed = (order_by or '').startswith('-') | |||
def __len__(self): | |||
return len(self.db_repo_list) | |||
def __repr__(self): | |||
return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | |||
def __iter__(self): | |||
r2352 | # pre-propagated cache_map to save executing select statements | ||
# for each repo | |||
cache_map = CacheInvalidation.get_cache_map() | |||
r1428 | for dbr in self.db_repo_list: | ||
r2352 | scmr = dbr.scm_instance_cached(cache_map) | ||
r1437 | # check permission at this level | ||
r1982 | if not HasRepoPermissionAny( | ||
'repository.read', 'repository.write', 'repository.admin' | |||
)(dbr.repo_name, 'get repo check'): | |||
r1366 | continue | ||
r1380 | if scmr is None: | ||
r1982 | log.error( | ||
'%s this repository is present in database but it ' | |||
'cannot be created as an scm instance' % dbr.repo_name | |||
) | |||
r1373 | continue | ||
r1366 | |||
last_change = scmr.last_change | |||
tip = h.get_changeset_safe(scmr, 'tip') | |||
tmp_d = {} | |||
tmp_d['name'] = dbr.repo_name | |||
tmp_d['name_sort'] = tmp_d['name'].lower() | |||
tmp_d['description'] = dbr.description | |||
r2604 | tmp_d['description_sort'] = tmp_d['description'].lower() | ||
r1366 | tmp_d['last_change'] = last_change | ||
r1728 | tmp_d['last_change_sort'] = time.mktime(last_change.timetuple()) | ||
r1366 | tmp_d['tip'] = tip.raw_id | ||
tmp_d['tip_sort'] = tip.revision | |||
tmp_d['rev'] = tip.revision | |||
tmp_d['contact'] = dbr.user.full_contact | |||
tmp_d['contact_sort'] = tmp_d['contact'] | |||
tmp_d['owner_sort'] = tmp_d['contact'] | |||
tmp_d['repo_archives'] = list(scmr._get_archives()) | |||
tmp_d['last_msg'] = tip.message | |||
r1459 | tmp_d['author'] = tip.author | ||
r1366 | tmp_d['dbrepo'] = dbr.get_dict() | ||
r1728 | tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {} | ||
r1366 | yield tmp_d | ||
r1213 | |||
r1801 | |||
r2604 | class SimpleCachedRepoList(CachedRepoList): | ||
""" | |||
Lighter version of CachedRepoList without the scm initialisation | |||
""" | |||
def __iter__(self): | |||
for dbr in self.db_repo_list: | |||
# check permission at this level | |||
if not HasRepoPermissionAny( | |||
'repository.read', 'repository.write', 'repository.admin' | |||
)(dbr.repo_name, 'get repo check'): | |||
continue | |||
tmp_d = {} | |||
tmp_d['name'] = dbr.repo_name | |||
tmp_d['name_sort'] = tmp_d['name'].lower() | |||
tmp_d['description'] = dbr.description | |||
tmp_d['description_sort'] = tmp_d['description'].lower() | |||
tmp_d['dbrepo'] = dbr.get_dict() | |||
tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {} | |||
yield tmp_d | |||
r1982 | class GroupList(object): | ||
def __init__(self, db_repo_group_list): | |||
self.db_repo_group_list = db_repo_group_list | |||
def __len__(self): | |||
return len(self.db_repo_group_list) | |||
def __repr__(self): | |||
return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | |||
def __iter__(self): | |||
for dbgr in self.db_repo_group_list: | |||
# check permission at this level | |||
if not HasReposGroupPermissionAny( | |||
'group.read', 'group.write', 'group.admin' | |||
)(dbgr.group_name, 'get group repo check'): | |||
continue | |||
yield dbgr | |||
r752 | class ScmModel(BaseModel): | ||
r1716 | """ | ||
Generic Scm Model | |||
r691 | """ | ||
r1755 | def __get_repo(self, instance): | ||
cls = Repository | |||
if isinstance(instance, cls): | |||
return instance | |||
elif isinstance(instance, int) or str(instance).isdigit(): | |||
return cls.get(instance) | |||
elif isinstance(instance, basestring): | |||
return cls.get_by_repo_name(instance) | |||
elif instance: | |||
raise Exception('given object must be int, basestr or Instance' | |||
' of %s got %s' % (type(cls), type(instance))) | |||
r691 | @LazyProperty | ||
def repos_path(self): | |||
r1716 | """ | ||
Get's the repositories root path from database | |||
r691 | """ | ||
r811 | |||
r691 | q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() | ||
return q.ui_value | |||
r1038 | def repo_scan(self, repos_path=None): | ||
r1716 | """ | ||
Listing of repositories in given path. This path should not be a | |||
r691 | repository itself. Return a dictionary of repository objects | ||
r1203 | |||
r691 | :param repos_path: path to directory containing repositories | ||
""" | |||
r811 | |||
r1038 | if repos_path is None: | ||
repos_path = self.repos_path | |||
r1925 | log.info('scanning for repositories in %s' % repos_path) | ||
r1038 | baseui = make_ui('db') | ||
r1751 | repos = {} | ||
r691 | |||
r877 | for name, path in get_filesystem_repos(repos_path, recursive=True): | ||
r2069 | # skip removed repos | ||
if REMOVED_REPO_PAT.match(name): | |||
continue | |||
r1716 | |||
r1554 | # name need to be decomposed and put back together using the / | ||
# since this is internal storage separator for rhodecode | |||
name = Repository.url_sep().join(name.split(os.sep)) | |||
r1716 | |||
r691 | try: | ||
r1751 | if name in repos: | ||
r691 | raise RepositoryError('Duplicate repository name %s ' | ||
r1554 | 'found in %s' % (name, path)) | ||
r691 | else: | ||
klass = get_backend(path[0]) | |||
r710 | if path[0] == 'hg' and path[0] in BACKENDS.keys(): | ||
r1753 | repos[name] = klass(safe_str(path[1]), baseui=baseui) | ||
r691 | |||
r710 | if path[0] == 'git' and path[0] in BACKENDS.keys(): | ||
r1751 | repos[name] = klass(path[1]) | ||
r691 | except OSError: | ||
continue | |||
r1751 | return repos | ||
r691 | |||
r2604 | def get_repos(self, all_repos=None, sort_key=None, simple=False): | ||
r1343 | """ | ||
Get all repos from db and for each repo create it's | |||
r1213 | backend instance and fill that backed with information from database | ||
r1203 | |||
r1343 | :param all_repos: list of repository names as strings | ||
give specific repositories list, good for filtering | |||
r2604 | |||
:param sort_key: initial sorting of repos | |||
:param simple: use SimpleCachedList - one without the SCM info | |||
r691 | """ | ||
r767 | if all_repos is None: | ||
r1366 | all_repos = self.sa.query(Repository)\ | ||
r1343 | .filter(Repository.group_id == None)\ | ||
r2354 | .order_by(func.lower(Repository.repo_name)).all() | ||
r2604 | if simple: | ||
repo_iter = SimpleCachedRepoList(all_repos, | |||
repos_path=self.repos_path, | |||
order_by=sort_key) | |||
else: | |||
repo_iter = CachedRepoList(all_repos, | |||
repos_path=self.repos_path, | |||
order_by=sort_key) | |||
r691 | |||
r1366 | return repo_iter | ||
r691 | |||
r1982 | def get_repos_groups(self, all_groups=None): | ||
if all_groups is None: | |||
all_groups = RepoGroup.query()\ | |||
.filter(RepoGroup.group_parent_id == None).all() | |||
group_iter = GroupList(all_groups) | |||
return group_iter | |||
r692 | def mark_for_invalidation(self, repo_name): | ||
r2147 | """ | ||
Puts cache invalidation task into db for | |||
r692 | further global cache invalidation | ||
r1203 | |||
r692 | :param repo_name: this repo that should invalidation take place | ||
""" | |||
r1607 | CacheInvalidation.set_invalidate(repo_name) | ||
r692 | |||
r734 | def toggle_following_repo(self, follow_repo_id, user_id): | ||
r692 | |||
r734 | f = self.sa.query(UserFollowing)\ | ||
.filter(UserFollowing.follows_repo_id == follow_repo_id)\ | |||
.filter(UserFollowing.user_id == user_id).scalar() | |||
if f is not None: | |||
try: | |||
self.sa.delete(f) | |||
r735 | action_logger(UserTemp(user_id), | ||
'stopped_following_repo', | |||
r747 | RepoTemp(follow_repo_id)) | ||
r734 | return | ||
except: | |||
log.error(traceback.format_exc()) | |||
raise | |||
try: | |||
f = UserFollowing() | |||
f.user_id = user_id | |||
f.follows_repo_id = follow_repo_id | |||
self.sa.add(f) | |||
r1722 | |||
r735 | action_logger(UserTemp(user_id), | ||
'started_following_repo', | |||
r747 | RepoTemp(follow_repo_id)) | ||
r734 | except: | ||
log.error(traceback.format_exc()) | |||
raise | |||
r1213 | def toggle_following_user(self, follow_user_id, user_id): | ||
r734 | f = self.sa.query(UserFollowing)\ | ||
.filter(UserFollowing.follows_user_id == follow_user_id)\ | |||
.filter(UserFollowing.user_id == user_id).scalar() | |||
if f is not None: | |||
try: | |||
self.sa.delete(f) | |||
return | |||
except: | |||
log.error(traceback.format_exc()) | |||
raise | |||
try: | |||
f = UserFollowing() | |||
f.user_id = user_id | |||
f.follows_user_id = follow_user_id | |||
self.sa.add(f) | |||
except: | |||
log.error(traceback.format_exc()) | |||
raise | |||
r999 | def is_following_repo(self, repo_name, user_id, cache=False): | ||
r734 | r = self.sa.query(Repository)\ | ||
.filter(Repository.repo_name == repo_name).scalar() | |||
f = self.sa.query(UserFollowing)\ | |||
.filter(UserFollowing.follows_repository == r)\ | |||
.filter(UserFollowing.user_id == user_id).scalar() | |||
return f is not None | |||
r999 | def is_following_user(self, username, user_id, cache=False): | ||
r1530 | u = User.get_by_username(username) | ||
r734 | |||
f = self.sa.query(UserFollowing)\ | |||
.filter(UserFollowing.follows_user == u)\ | |||
.filter(UserFollowing.user_id == user_id).scalar() | |||
return f is not None | |||
r692 | |||
r2440 | def get_followers(self, repo): | ||
repo = self._get_repo(repo) | |||
r1282 | |||
return self.sa.query(UserFollowing)\ | |||
r2440 | .filter(UserFollowing.follows_repository == repo).count() | ||
r747 | |||
r2440 | def get_forks(self, repo): | ||
repo = self._get_repo(repo) | |||
return self.sa.query(Repository)\ | |||
.filter(Repository.fork == repo).count() | |||
r1282 | |||
r2440 | def get_pull_requests(self, repo): | ||
repo = self._get_repo(repo) | |||
return self.sa.query(PullRequest)\ | |||
.filter(PullRequest.other_repo == repo).count() | |||
r692 | |||
r1755 | def mark_as_fork(self, repo, fork, user): | ||
repo = self.__get_repo(repo) | |||
fork = self.__get_repo(fork) | |||
r2629 | if fork and repo.repo_id == fork.repo_id: | ||
raise Exception("Cannot set repository as fork of itself") | |||
r1755 | repo.fork = fork | ||
self.sa.add(repo) | |||
return repo | |||
r2514 | def pull_changes(self, repo, username): | ||
dbrepo = self.__get_repo(repo) | |||
r1508 | clone_uri = dbrepo.clone_uri | ||
if not clone_uri: | |||
raise Exception("This repository doesn't have a clone uri") | |||
r1530 | |||
r1370 | repo = dbrepo.scm_instance | ||
r1114 | try: | ||
r2208 | extras = { | ||
'ip': '', | |||
'username': username, | |||
'action': 'push_remote', | |||
r2520 | 'repository': dbrepo.repo_name, | ||
r2208 | 'scm': repo.alias, | ||
} | |||
r2520 | Repository.inject_ui(repo, extras=extras) | ||
r1114 | |||
r2383 | if repo.alias == 'git': | ||
repo.fetch(clone_uri) | |||
else: | |||
repo.pull(clone_uri) | |||
r2520 | self.mark_for_invalidation(dbrepo.repo_name) | ||
r1114 | except: | ||
log.error(traceback.format_exc()) | |||
raise | |||
r1722 | def commit_change(self, repo, repo_name, cs, user, author, message, | ||
content, f_path): | |||
r1311 | |||
if repo.alias == 'hg': | |||
r2208 | from rhodecode.lib.vcs.backends.hg import \ | ||
MercurialInMemoryChangeset as IMC | |||
r1311 | elif repo.alias == 'git': | ||
r2208 | from rhodecode.lib.vcs.backends.git import \ | ||
GitInMemoryChangeset as IMC | |||
r1311 | |||
# decoding here will force that we have proper encoded values | |||
# in any other case this will throw exceptions and deny commit | |||
r1401 | content = safe_str(content) | ||
path = safe_str(f_path) | |||
r2199 | # message and author needs to be unicode | ||
# proper backend should then translate that into required type | |||
message = safe_unicode(message) | |||
author = safe_unicode(author) | |||
r1311 | m = IMC(repo) | ||
m.change(FileNode(path, content)) | |||
r1312 | tip = m.commit(message=message, | ||
r2199 | author=author, | ||
parents=[cs], branch=cs.branch) | |||
r1311 | |||
r1312 | new_cs = tip.short_id | ||
action = 'push_local:%s' % new_cs | |||
action_logger(user, action, repo_name) | |||
r1311 | self.mark_for_invalidation(repo_name) | ||
r1483 | def create_node(self, repo, repo_name, cs, user, author, message, content, | ||
f_path): | |||
if repo.alias == 'hg': | |||
r2007 | from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC | ||
r1483 | elif repo.alias == 'git': | ||
r2007 | from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC | ||
r1483 | # decoding here will force that we have proper encoded values | ||
# in any other case this will throw exceptions and deny commit | |||
r1530 | |||
if isinstance(content, (basestring,)): | |||
r1485 | content = safe_str(content) | ||
r1801 | elif isinstance(content, (file, cStringIO.OutputType,)): | ||
r1485 | content = content.read() | ||
r1801 | else: | ||
raise Exception('Content is of unrecognized type %s' % ( | |||
type(content) | |||
)) | |||
r1530 | |||
r2199 | message = safe_unicode(message) | ||
author = safe_unicode(author) | |||
r1483 | path = safe_str(f_path) | ||
m = IMC(repo) | |||
if isinstance(cs, EmptyChangeset): | |||
r2199 | # EmptyChangeset means we we're editing empty repository | ||
r1483 | parents = None | ||
else: | |||
parents = [cs] | |||
m.add(FileNode(path, content=content)) | |||
tip = m.commit(message=message, | |||
r2199 | author=author, | ||
parents=parents, branch=cs.branch) | |||
r1483 | new_cs = tip.short_id | ||
action = 'push_local:%s' % new_cs | |||
action_logger(user, action, repo_name) | |||
self.mark_for_invalidation(repo_name) | |||
r1810 | def get_nodes(self, repo_name, revision, root_path='/', flat=True): | ||
""" | |||
recursive walk in root dir and return a set of all path in that dir | |||
based on repository walk function | |||
:param repo_name: name of repository | |||
:param revision: revision for which to list nodes | |||
:param root_path: root path to list | |||
:param flat: return as a list, if False returns a dict with decription | |||
""" | |||
_files = list() | |||
_dirs = list() | |||
try: | |||
_repo = self.__get_repo(repo_name) | |||
changeset = _repo.scm_instance.get_changeset(revision) | |||
root_path = root_path.lstrip('/') | |||
for topnode, dirs, files in changeset.walk(root_path): | |||
for f in files: | |||
_files.append(f.path if flat else {"name": f.path, | |||
"type": "file"}) | |||
for d in dirs: | |||
_dirs.append(d.path if flat else {"name": d.path, | |||
"type": "dir"}) | |||
except RepositoryError: | |||
log.debug(traceback.format_exc()) | |||
raise | |||
return _dirs, _files | |||
r784 | def get_unread_journal(self): | ||
return self.sa.query(UserLog).count() | |||
r2459 | |||
def get_repo_landing_revs(self, repo=None): | |||
""" | |||
Generates select option with tags branches and bookmarks (for hg only) | |||
grouped by type | |||
:param repo: | |||
:type repo: | |||
""" | |||
r2460 | |||
r2459 | hist_l = [] | ||
r2460 | choices = [] | ||
r2459 | repo = self.__get_repo(repo) | ||
hist_l.append(['tip', _('latest tip')]) | |||
r2460 | choices.append('tip') | ||
r2459 | if not repo: | ||
r2460 | return choices, hist_l | ||
r2459 | |||
repo = repo.scm_instance | |||
r2460 | |||
r2459 | branches_group = ([(k, k) for k, v in | ||
repo.branches.iteritems()], _("Branches")) | |||
hist_l.append(branches_group) | |||
r2460 | choices.extend([x[0] for x in branches_group[0]]) | ||
r2459 | |||
if repo.alias == 'hg': | |||
bookmarks_group = ([(k, k) for k, v in | |||
repo.bookmarks.iteritems()], _("Bookmarks")) | |||
hist_l.append(bookmarks_group) | |||
r2460 | choices.extend([x[0] for x in bookmarks_group[0]]) | ||
r2459 | |||
tags_group = ([(k, k) for k, v in | |||
repo.tags.iteritems()], _("Tags")) | |||
hist_l.append(tags_group) | |||
r2460 | choices.extend([x[0] for x in tags_group[0]]) | ||
r2459 | |||
r2460 | return choices, hist_l | ||
r2618 | |||
def install_git_hook(self, repo, force_create=False): | |||
""" | |||
Creates a rhodecode hook inside a git repository | |||
:param repo: Instance of VCS repo | |||
:param force_create: Create even if same name hook exists | |||
""" | |||
loc = jn(repo.path, 'hooks') | |||
if not repo.bare: | |||
loc = jn(repo.path, '.git', 'hooks') | |||
if not os.path.isdir(loc): | |||
os.makedirs(loc) | |||
tmpl = pkg_resources.resource_string( | |||
'rhodecode', jn('config', 'post_receive_tmpl.py') | |||
) | |||
_hook_file = jn(loc, 'post-receive') | |||
_rhodecode_hook = False | |||
log.debug('Installing git hook in repo %s' % repo) | |||
if os.path.exists(_hook_file): | |||
# let's take a look at this hook, maybe it's rhodecode ? | |||
log.debug('hook exists, checking if it is from rhodecode') | |||
_HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER') | |||
with open(_hook_file, 'rb') as f: | |||
data = f.read() | |||
matches = re.compile(r'(?:%s)\s*=\s*(.*)' | |||
% 'RC_HOOK_VER').search(data) | |||
if matches: | |||
try: | |||
ver = matches.groups()[0] | |||
log.debug('got %s it is rhodecode' % (ver)) | |||
_rhodecode_hook = True | |||
except: | |||
log.error(traceback.format_exc()) | |||
if _rhodecode_hook or force_create: | |||
log.debug('writing hook file !') | |||
with open(_hook_file, 'wb') as f: | |||
tmpl = tmpl.replace('_TMPL_', rhodecode.__version__) | |||
f.write(tmpl) | |||
os.chmod(_hook_file, 0755) | |||
else: | |||
log.debug('skipping writing hook file') |