scm.py
412 lines
| 14.2 KiB
| text/x-python
|
PythonLexer
r757 | # -*- coding: utf-8 -*- | |||
""" | ||||
r811 | rhodecode.model.scm | |||
~~~~~~~~~~~~~~~~~~~ | ||||
r757 | ||||
r811 | Scm model for RhodeCode | |||
r757 | :created_on: Apr 9, 2010 | |||
:author: marcink | ||||
r902 | :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com> | |||
r757 | :license: GPLv3, see COPYING for more details. | |||
""" | ||||
r691 | # This program is free software; you can redistribute it and/or | |||
# modify it under the terms of the GNU General Public License | ||||
# as published by the Free Software Foundation; version 2 | ||||
# of the License or (at your opinion) any later version of the license. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU General Public License | ||||
# along with this program; if not, write to the Free Software | ||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, | ||||
# MA 02110-1301, USA. | ||||
r757 | import os | |||
import time | ||||
import traceback | ||||
import logging | ||||
r1022 | from mercurial import ui | |||
from sqlalchemy.exc import DatabaseError | ||||
r1045 | from sqlalchemy.orm import make_transient | |||
r1022 | ||||
from beaker.cache import cache_region, region_invalidate | ||||
r757 | from vcs import get_backend | |||
from vcs.utils.helpers import get_scm | ||||
from vcs.exceptions import RepositoryError, VCSError | ||||
from vcs.utils.lazy import LazyProperty | ||||
r710 | from rhodecode import BACKENDS | |||
r691 | from rhodecode.lib import helpers as h | |||
from rhodecode.lib.auth import HasRepoPermissionAny | ||||
r1038 | from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \ | |||
action_logger | ||||
r752 | from rhodecode.model import BaseModel | |||
r758 | from rhodecode.model.user import UserModel | |||
r1038 | from rhodecode.model.repo import RepoModel | |||
r758 | from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \ | |||
r784 | UserFollowing, UserLog | |||
r692 | from rhodecode.model.caching_query import FromCache | |||
r757 | ||||
r691 | log = logging.getLogger(__name__) | |||
r757 | ||||
r735 | class UserTemp(object): | |||
def __init__(self, user_id): | ||||
self.user_id = user_id | ||||
r901 | ||||
def __repr__(self): | ||||
return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) | ||||
r735 | class RepoTemp(object): | |||
def __init__(self, repo_id): | ||||
self.repo_id = repo_id | ||||
r747 | ||||
r901 | def __repr__(self): | |||
return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) | ||||
r752 | class ScmModel(BaseModel): | |||
r811 | """Generic Scm Model | |||
r691 | """ | |||
@LazyProperty | ||||
def repos_path(self): | ||||
r811 | """Get's the repositories root path from database | |||
r691 | """ | |||
r811 | ||||
r691 | q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() | |||
return q.ui_value | ||||
r1038 | def repo_scan(self, repos_path=None): | |||
r811 | """Listing of repositories in given path. This path should not be a | |||
r691 | repository itself. Return a dictionary of repository objects | |||
:param repos_path: path to directory containing repositories | ||||
""" | ||||
r811 | ||||
r691 | log.info('scanning for repositories in %s', repos_path) | |||
r1038 | if repos_path is None: | |||
repos_path = self.repos_path | ||||
baseui = make_ui('db') | ||||
r691 | repos_list = {} | |||
r877 | for name, path in get_filesystem_repos(repos_path, recursive=True): | |||
r691 | try: | |||
if repos_list.has_key(name): | ||||
raise RepositoryError('Duplicate repository name %s ' | ||||
'found in %s' % (name, path)) | ||||
else: | ||||
klass = get_backend(path[0]) | ||||
r710 | if path[0] == 'hg' and path[0] in BACKENDS.keys(): | |||
r691 | repos_list[name] = klass(path[1], baseui=baseui) | |||
r710 | if path[0] == 'git' and path[0] in BACKENDS.keys(): | |||
r691 | repos_list[name] = klass(path[1]) | |||
except OSError: | ||||
continue | ||||
return repos_list | ||||
def get_repos(self, all_repos=None): | ||||
r811 | """Get all repos from db and for each repo create it's backend instance. | |||
r691 | and fill that backed with information from database | |||
:param all_repos: give specific repositories list, good for filtering | ||||
r1045 | this have to be a list of just the repository names | |||
r691 | """ | |||
r767 | if all_repos is None: | |||
r1045 | all_repos = [r.repo_name for r in self.sa.query(Repository)\ | |||
.order_by(Repository.repo_name).all()] | ||||
r691 | ||||
r791 | #get the repositories that should be invalidated | |||
r726 | invalidation_list = [str(x.cache_key) for x in \ | |||
self.sa.query(CacheInvalidation.cache_key)\ | ||||
.filter(CacheInvalidation.cache_active == False)\ | ||||
.all()] | ||||
r1045 | for r_name in all_repos: | |||
r_dbr = self.get(r_name, invalidation_list) | ||||
r1042 | if r_dbr is not None: | |||
repo, dbrepo = r_dbr | ||||
r1045 | ||||
r691 | last_change = repo.last_change | |||
tip = h.get_changeset_safe(repo, 'tip') | ||||
tmp_d = {} | ||||
r1045 | tmp_d['name'] = dbrepo.repo_name | |||
r691 | tmp_d['name_sort'] = tmp_d['name'].lower() | |||
r1038 | tmp_d['description'] = dbrepo.description | |||
r691 | tmp_d['description_sort'] = tmp_d['description'] | |||
tmp_d['last_change'] = last_change | ||||
tmp_d['last_change_sort'] = time.mktime(last_change.timetuple()) | ||||
tmp_d['tip'] = tip.raw_id | ||||
tmp_d['tip_sort'] = tip.revision | ||||
tmp_d['rev'] = tip.revision | ||||
r1038 | tmp_d['contact'] = dbrepo.user.full_contact | |||
r691 | tmp_d['contact_sort'] = tmp_d['contact'] | |||
r1005 | tmp_d['owner_sort'] = tmp_d['contact'] | |||
r691 | tmp_d['repo_archives'] = list(repo._get_archives()) | |||
tmp_d['last_msg'] = tip.message | ||||
tmp_d['repo'] = repo | ||||
r1045 | tmp_d['dbrepo'] = dbrepo.get_dict() | |||
tmp_d['dbrepo_fork'] = dbrepo.fork.get_dict() if dbrepo.fork else {} | ||||
r691 | yield tmp_d | |||
r1038 | def get(self, repo_name, invalidation_list=None, retval='all'): | |||
"""Returns a tuple of Repository,DbRepository, | ||||
Get's repository from given name, creates BackendInstance and | ||||
r691 | propagates it's data from database with all additional information | |||
r791 | ||||
r691 | :param repo_name: | |||
r791 | :param invalidation_list: if a invalidation list is given the get | |||
method should not manually check if this repository needs | ||||
invalidation and just invalidate the repositories in list | ||||
r1038 | :param retval: string specifing what to return one of 'repo','dbrepo', | |||
'all'if repo or dbrepo is given it'll just lazy load chosen type | ||||
and return None as the second | ||||
r691 | """ | |||
if not HasRepoPermissionAny('repository.read', 'repository.write', | ||||
'repository.admin')(repo_name, 'get repo check'): | ||||
return | ||||
r791 | #====================================================================== | |||
# CACHE FUNCTION | ||||
#====================================================================== | ||||
r692 | @cache_region('long_term') | |||
r691 | def _get_repo(repo_name): | |||
repo_path = os.path.join(self.repos_path, repo_name) | ||||
r757 | ||||
try: | ||||
alias = get_scm(repo_path)[0] | ||||
log.debug('Creating instance of %s repository', alias) | ||||
backend = get_backend(alias) | ||||
except VCSError: | ||||
log.error(traceback.format_exc()) | ||||
r1038 | log.error('Perhaps this repository is in db and not in ' | |||
'filesystem run rescan repositories with ' | ||||
'"destroy old data " option from admin panel') | ||||
r757 | return | |||
r691 | ||||
if alias == 'hg': | ||||
r1038 | repo = backend(repo_path, create=False, baseui=make_ui('db')) | |||
r691 | #skip hidden web repository | |||
if repo._get_hidden(): | ||||
return | ||||
else: | ||||
repo = backend(repo_path, create=False) | ||||
return repo | ||||
r792 | pre_invalidate = True | |||
r1038 | dbinvalidate = False | |||
r792 | if invalidation_list is not None: | |||
pre_invalidate = repo_name in invalidation_list | ||||
if pre_invalidate: | ||||
r1038 | #this returns object to invalidate | |||
r792 | invalidate = self._should_invalidate(repo_name) | |||
if invalidate: | ||||
log.info('invalidating cache for repository %s', repo_name) | ||||
r1039 | region_invalidate(_get_repo, None, repo_name) | |||
r792 | self._mark_invalidated(invalidate) | |||
r1038 | dbinvalidate = True | |||
r792 | ||||
r1038 | r, dbr = None, None | |||
if retval == 'repo' or 'all': | ||||
r = _get_repo(repo_name) | ||||
if retval == 'dbrepo' or 'all': | ||||
r1045 | dbr = RepoModel().get_full(repo_name, cache=True, | |||
r1038 | invalidate=dbinvalidate) | |||
return r, dbr | ||||
r691 | ||||
r692 | def mark_for_invalidation(self, repo_name): | |||
r811 | """Puts cache invalidation task into db for | |||
r692 | further global cache invalidation | |||
:param repo_name: this repo that should invalidation take place | ||||
""" | ||||
r811 | ||||
r692 | log.debug('marking %s for invalidation', repo_name) | |||
cache = self.sa.query(CacheInvalidation)\ | ||||
.filter(CacheInvalidation.cache_key == repo_name).scalar() | ||||
if cache: | ||||
#mark this cache as inactive | ||||
cache.cache_active = False | ||||
else: | ||||
log.debug('cache key not found in invalidation db -> creating one') | ||||
cache = CacheInvalidation(repo_name) | ||||
try: | ||||
self.sa.add(cache) | ||||
self.sa.commit() | ||||
r758 | except (DatabaseError,): | |||
r692 | log.error(traceback.format_exc()) | |||
self.sa.rollback() | ||||
r734 | def toggle_following_repo(self, follow_repo_id, user_id): | |||
r692 | ||||
r734 | f = self.sa.query(UserFollowing)\ | |||
.filter(UserFollowing.follows_repo_id == follow_repo_id)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
if f is not None: | ||||
r747 | ||||
r734 | try: | |||
self.sa.delete(f) | ||||
self.sa.commit() | ||||
r735 | action_logger(UserTemp(user_id), | |||
'stopped_following_repo', | ||||
r747 | RepoTemp(follow_repo_id)) | |||
r734 | return | |||
except: | ||||
log.error(traceback.format_exc()) | ||||
self.sa.rollback() | ||||
raise | ||||
try: | ||||
f = UserFollowing() | ||||
f.user_id = user_id | ||||
f.follows_repo_id = follow_repo_id | ||||
self.sa.add(f) | ||||
self.sa.commit() | ||||
r735 | action_logger(UserTemp(user_id), | |||
'started_following_repo', | ||||
r747 | RepoTemp(follow_repo_id)) | |||
r734 | except: | |||
log.error(traceback.format_exc()) | ||||
self.sa.rollback() | ||||
raise | ||||
def toggle_following_user(self, follow_user_id , user_id): | ||||
f = self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_user_id == follow_user_id)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
if f is not None: | ||||
try: | ||||
self.sa.delete(f) | ||||
self.sa.commit() | ||||
return | ||||
except: | ||||
log.error(traceback.format_exc()) | ||||
self.sa.rollback() | ||||
raise | ||||
try: | ||||
f = UserFollowing() | ||||
f.user_id = user_id | ||||
f.follows_user_id = follow_user_id | ||||
self.sa.add(f) | ||||
self.sa.commit() | ||||
except: | ||||
log.error(traceback.format_exc()) | ||||
self.sa.rollback() | ||||
raise | ||||
r999 | def is_following_repo(self, repo_name, user_id, cache=False): | |||
r734 | r = self.sa.query(Repository)\ | |||
.filter(Repository.repo_name == repo_name).scalar() | ||||
f = self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_repository == r)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
return f is not None | ||||
r999 | def is_following_user(self, username, user_id, cache=False): | |||
r758 | u = UserModel(self.sa).get_by_username(username) | |||
r734 | ||||
f = self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_user == u)\ | ||||
.filter(UserFollowing.user_id == user_id).scalar() | ||||
return f is not None | ||||
r692 | ||||
r747 | def get_followers(self, repo_id): | |||
r1045 | if isinstance(repo_id, int): | |||
return self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_repo_id == repo_id).count() | ||||
else: | ||||
return self.sa.query(UserFollowing)\ | ||||
.filter(UserFollowing.follows_repository \ | ||||
== RepoModel().get_by_repo_name(repo_id)).count() | ||||
r747 | ||||
def get_forks(self, repo_id): | ||||
r1045 | if isinstance(repo_id, int): | |||
return self.sa.query(Repository)\ | ||||
r747 | .filter(Repository.fork_id == repo_id).count() | |||
r1045 | else: | |||
return self.sa.query(Repository)\ | ||||
.filter(Repository.fork \ | ||||
== RepoModel().get_by_repo_name(repo_id)).count() | ||||
r692 | ||||
r784 | ||||
r1114 | def pull_changes(self, repo_name, username): | |||
repo, dbrepo = self.get(repo_name, retval='all') | ||||
try: | ||||
extras = {'ip':'', | ||||
'username':username, | ||||
'action':'push_remote', | ||||
'repository':repo_name} | ||||
#inject ui extra param to log this action via push logger | ||||
for k, v in extras.items(): | ||||
repo._repo.ui.setconfig('rhodecode_extras', k, v) | ||||
repo.pull(dbrepo.clone_uri) | ||||
self.mark_for_invalidation(repo_name) | ||||
except: | ||||
log.error(traceback.format_exc()) | ||||
raise | ||||
r784 | def get_unread_journal(self): | |||
return self.sa.query(UserLog).count() | ||||
r692 | def _should_invalidate(self, repo_name): | |||
r811 | """Looks up database for invalidation signals for this repo_name | |||
r692 | :param repo_name: | |||
""" | ||||
ret = self.sa.query(CacheInvalidation)\ | ||||
.filter(CacheInvalidation.cache_key == repo_name)\ | ||||
.filter(CacheInvalidation.cache_active == False)\ | ||||
.scalar() | ||||
return ret | ||||
def _mark_invalidated(self, cache_key): | ||||
r1038 | """ Marks all occurrences of cache to invalidation as already | |||
invalidated | ||||
r811 | ||||
:param cache_key: | ||||
r692 | """ | |||
r811 | ||||
r692 | if cache_key: | |||
log.debug('marking %s as already invalidated', cache_key) | ||||
try: | ||||
cache_key.cache_active = True | ||||
self.sa.add(cache_key) | ||||
self.sa.commit() | ||||
r758 | except (DatabaseError,): | |||
r692 | log.error(traceback.format_exc()) | |||
self.sa.rollback() | ||||