##// END OF EJS Templates
added info to ugprade docs to restart celery
added info to ugprade docs to restart celery

File last commit:

r2109:8ecfed1d beta
r2115:8dfb265f beta
Show More
scm.py
459 lines | 15.5 KiB | text/x-python | PythonLexer
rolled back to make transient since got some exceptions on expunge...
r757 # -*- coding: utf-8 -*-
"""
docs updates
r811 rhodecode.model.scm
~~~~~~~~~~~~~~~~~~~
rolled back to make transient since got some exceptions on expunge...
r757
docs updates
r811 Scm model for RhodeCode
rolled back to make transient since got some exceptions on expunge...
r757 :created_on: Apr 9, 2010
:author: marcink
2012 copyrights
r1824 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
rolled back to make transient since got some exceptions on expunge...
r757 :license: GPLv3, see COPYING for more details.
"""
fixed license issue #149
r1206 # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
source code cleanup: remove trailing white space, normalize file endings
r1203 #
Refactor codes for scm model...
r691 # This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
source code cleanup: remove trailing white space, normalize file endings
r1203 #
Refactor codes for scm model...
r691 # You should have received a copy of the GNU General Public License
fixed license issue #149
r1206 # along with this program. If not, see <http://www.gnu.org/licenses/>.
fixes #245 Rescan of the repositories on Windows
r1554 import os
rolled back to make transient since got some exceptions on expunge...
r757 import time
import traceback
import logging
fixed problem with uploading files into rhodecode that wasn't detected as streams
r1801 import cStringIO
rolled back to make transient since got some exceptions on expunge...
r757
Added VCS into rhodecode core for faster and easier deployments of new versions
r2007 from rhodecode.lib.vcs import get_backend
from rhodecode.lib.vcs.exceptions import RepositoryError
from rhodecode.lib.vcs.utils.lazy import LazyProperty
from rhodecode.lib.vcs.nodes import FileNode
rolled back to make transient since got some exceptions on expunge...
r757
Disable git support due to large problems with dulwich....
r710 from rhodecode import BACKENDS
Refactor codes for scm model...
r691 from rhodecode.lib import helpers as h
utils/conf...
r2109 from rhodecode.lib.utils2 import safe_str
#227 Initial version of repository groups permissions system...
r1982 from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
another major code rafactor, reimplemented (almost from scratch)...
r1038 from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \
fixed issues with removed repos was accidentally added as groups, after...
r2069 action_logger, EmptyChangeset, REMOVED_REPO_PAT
fixed Example celery config to ampq,...
r752 from rhodecode.model import BaseModel
code cleanups
r758 from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
#227 Initial version of repository groups permissions system...
r1982 UserFollowing, UserLog, User, RepoGroup
rolled back to make transient since got some exceptions on expunge...
r757
Refactor codes for scm model...
r691 log = logging.getLogger(__name__)
rolled back to make transient since got some exceptions on expunge...
r757
added action loggers to following repositories,...
r735 class UserTemp(object):
def __init__(self, user_id):
self.user_id = user_id
Fixed repo of Temp user and repo for better logging
r901
def __repr__(self):
return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
#150 fixes for errors on repositories mapped in db but corrupted in filesystem
r1213
added action loggers to following repositories,...
r735 class RepoTemp(object):
def __init__(self, repo_id):
self.repo_id = repo_id
Added icons with numbers of followers and number of forks
r747
Fixed repo of Temp user and repo for better logging
r901 def __repr__(self):
return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
fixed problem with uploading files into rhodecode that wasn't detected as streams
r1801
fixes #200, rewrote the whole caching mechanism to get rid of such problems. Now cached instances are attached...
r1366 class CachedRepoList(object):
fixed some issues with cache invalidation, and simplified invalidation codes
r1428 def __init__(self, db_repo_list, repos_path, order_by=None):
fixes #200, rewrote the whole caching mechanism to get rid of such problems. Now cached instances are attached...
r1366 self.db_repo_list = db_repo_list
self.repos_path = repos_path
self.order_by = order_by
self.reversed = (order_by or '').startswith('-')
def __len__(self):
return len(self.db_repo_list)
def __repr__(self):
return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
def __iter__(self):
fixed some issues with cache invalidation, and simplified invalidation codes
r1428 for dbr in self.db_repo_list:
scmr = dbr.scm_instance_cached
added welcome message if no repositories are present in current view
r1437 # check permission at this level
#227 Initial version of repository groups permissions system...
r1982 if not HasRepoPermissionAny(
'repository.read', 'repository.write', 'repository.admin'
)(dbr.repo_name, 'get repo check'):
fixes #200, rewrote the whole caching mechanism to get rid of such problems. Now cached instances are attached...
r1366 continue
Fixed problem with new repos, and visibility on the main list.
r1380 if scmr is None:
#227 Initial version of repository groups permissions system...
r1982 log.error(
'%s this repository is present in database but it '
'cannot be created as an scm instance' % dbr.repo_name
)
Fixes issue #201...
r1373 continue
fixes #200, rewrote the whole caching mechanism to get rid of such problems. Now cached instances are attached...
r1366
last_change = scmr.last_change
tip = h.get_changeset_safe(scmr, 'tip')
tmp_d = {}
tmp_d['name'] = dbr.repo_name
tmp_d['name_sort'] = tmp_d['name'].lower()
tmp_d['description'] = dbr.description
tmp_d['description_sort'] = tmp_d['description']
tmp_d['last_change'] = last_change
- fixes celery sqlalchemy session issues for async forking...
r1728 tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
fixes #200, rewrote the whole caching mechanism to get rid of such problems. Now cached instances are attached...
r1366 tmp_d['tip'] = tip.raw_id
tmp_d['tip_sort'] = tip.revision
tmp_d['rev'] = tip.revision
tmp_d['contact'] = dbr.user.full_contact
tmp_d['contact_sort'] = tmp_d['contact']
tmp_d['owner_sort'] = tmp_d['contact']
tmp_d['repo_archives'] = list(scmr._get_archives())
tmp_d['last_msg'] = tip.message
added author to main page tooltip
r1459 tmp_d['author'] = tip.author
fixes #200, rewrote the whole caching mechanism to get rid of such problems. Now cached instances are attached...
r1366 tmp_d['dbrepo'] = dbr.get_dict()
- fixes celery sqlalchemy session issues for async forking...
r1728 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
fixes #200, rewrote the whole caching mechanism to get rid of such problems. Now cached instances are attached...
r1366 yield tmp_d
#150 fixes for errors on repositories mapped in db but corrupted in filesystem
r1213
fixed problem with uploading files into rhodecode that wasn't detected as streams
r1801
#227 Initial version of repository groups permissions system...
r1982 class GroupList(object):
def __init__(self, db_repo_group_list):
self.db_repo_group_list = db_repo_group_list
def __len__(self):
return len(self.db_repo_group_list)
def __repr__(self):
return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
def __iter__(self):
for dbgr in self.db_repo_group_list:
# check permission at this level
if not HasReposGroupPermissionAny(
'group.read', 'group.write', 'group.admin'
)(dbgr.group_name, 'get group repo check'):
continue
yield dbgr
fixed Example celery config to ampq,...
r752 class ScmModel(BaseModel):
notification to commit author + gardening
r1716 """
Generic Scm Model
Refactor codes for scm model...
r691 """
implements #239 manual marking of repos as forks for admins
r1755 def __get_repo(self, instance):
cls = Repository
if isinstance(instance, cls):
return instance
elif isinstance(instance, int) or str(instance).isdigit():
return cls.get(instance)
elif isinstance(instance, basestring):
return cls.get_by_repo_name(instance)
elif instance:
raise Exception('given object must be int, basestr or Instance'
' of %s got %s' % (type(cls), type(instance)))
Refactor codes for scm model...
r691 @LazyProperty
def repos_path(self):
notification to commit author + gardening
r1716 """
Get's the repositories root path from database
Refactor codes for scm model...
r691 """
docs updates
r811
Refactor codes for scm model...
r691 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
return q.ui_value
another major code rafactor, reimplemented (almost from scratch)...
r1038 def repo_scan(self, repos_path=None):
notification to commit author + gardening
r1716 """
Listing of repositories in given path. This path should not be a
Refactor codes for scm model...
r691 repository itself. Return a dictionary of repository objects
source code cleanup: remove trailing white space, normalize file endings
r1203
Refactor codes for scm model...
r691 :param repos_path: path to directory containing repositories
"""
docs updates
r811
another major code rafactor, reimplemented (almost from scratch)...
r1038 if repos_path is None:
repos_path = self.repos_path
fixes issue #341, logger outputed invalid path name
r1925 log.info('scanning for repositories in %s' % repos_path)
another major code rafactor, reimplemented (almost from scratch)...
r1038 baseui = make_ui('db')
Summary page downloads limited to zip....
r1751 repos = {}
Refactor codes for scm model...
r691
Added recursive scanning for repositories in directory
r877 for name, path in get_filesystem_repos(repos_path, recursive=True):
fixed issues with removed repos was accidentally added as groups, after...
r2069 # skip removed repos
if REMOVED_REPO_PAT.match(name):
continue
notification to commit author + gardening
r1716
fixes #245 Rescan of the repositories on Windows
r1554 # name need to be decomposed and put back together using the /
# since this is internal storage separator for rhodecode
name = Repository.url_sep().join(name.split(os.sep))
notification to commit author + gardening
r1716
Refactor codes for scm model...
r691 try:
Summary page downloads limited to zip....
r1751 if name in repos:
Refactor codes for scm model...
r691 raise RepositoryError('Duplicate repository name %s '
fixes #245 Rescan of the repositories on Windows
r1554 'found in %s' % (name, path))
Refactor codes for scm model...
r691 else:
klass = get_backend(path[0])
Disable git support due to large problems with dulwich....
r710 if path[0] == 'hg' and path[0] in BACKENDS.keys():
moved soon-to-be-deleted code from vcs to rhodecode...
r1753 repos[name] = klass(safe_str(path[1]), baseui=baseui)
Refactor codes for scm model...
r691
Disable git support due to large problems with dulwich....
r710 if path[0] == 'git' and path[0] in BACKENDS.keys():
Summary page downloads limited to zip....
r1751 repos[name] = klass(path[1])
Refactor codes for scm model...
r691 except OSError:
continue
Summary page downloads limited to zip....
r1751 return repos
Refactor codes for scm model...
r691
fixes #200, rewrote the whole caching mechanism to get rid of such problems. Now cached instances are attached...
r1366 def get_repos(self, all_repos=None, sort_key=None):
#47 implemented basic gui for browsing repo groups
r1343 """
Get all repos from db and for each repo create it's
#150 fixes for errors on repositories mapped in db but corrupted in filesystem
r1213 backend instance and fill that backed with information from database
source code cleanup: remove trailing white space, normalize file endings
r1203
#47 implemented basic gui for browsing repo groups
r1343 :param all_repos: list of repository names as strings
give specific repositories list, good for filtering
Refactor codes for scm model...
r691 """
bugfix, when user had no repos he would see all repos in my account, (correct commit)
r767 if all_repos is None:
fixes #200, rewrote the whole caching mechanism to get rid of such problems. Now cached instances are attached...
r1366 all_repos = self.sa.query(Repository)\
#47 implemented basic gui for browsing repo groups
r1343 .filter(Repository.group_id == None)\
Added repo group page showing what reposiories are inside a group
r1193 .order_by(Repository.repo_name).all()
Refactor codes for scm model...
r691
fixed some issues with cache invalidation, and simplified invalidation codes
r1428 repo_iter = CachedRepoList(all_repos, repos_path=self.repos_path,
fixes #200, rewrote the whole caching mechanism to get rid of such problems. Now cached instances are attached...
r1366 order_by=sort_key)
Refactor codes for scm model...
r691
fixes #200, rewrote the whole caching mechanism to get rid of such problems. Now cached instances are attached...
r1366 return repo_iter
Refactor codes for scm model...
r691
#227 Initial version of repository groups permissions system...
r1982 def get_repos_groups(self, all_groups=None):
if all_groups is None:
all_groups = RepoGroup.query()\
.filter(RepoGroup.group_parent_id == None).all()
group_iter = GroupList(all_groups)
return group_iter
#50 on point cache invalidation changes....
r692 def mark_for_invalidation(self, repo_name):
source code cleanup: remove trailing white space, normalize file endings
r1203 """Puts cache invalidation task into db for
#50 on point cache invalidation changes....
r692 further global cache invalidation
source code cleanup: remove trailing white space, normalize file endings
r1203
#50 on point cache invalidation changes....
r692 :param repo_name: this repo that should invalidation take place
"""
Cached readme generation...
r1607 CacheInvalidation.set_invalidate(repo_name)
notification to commit author + gardening
r1716 CacheInvalidation.set_invalidate(repo_name + "_README")
#50 on point cache invalidation changes....
r692
implemented user dashboards, and following system.
r734 def toggle_following_repo(self, follow_repo_id, user_id):
#50 on point cache invalidation changes....
r692
implemented user dashboards, and following system.
r734 f = self.sa.query(UserFollowing)\
.filter(UserFollowing.follows_repo_id == follow_repo_id)\
.filter(UserFollowing.user_id == user_id).scalar()
if f is not None:
try:
self.sa.delete(f)
added action loggers to following repositories,...
r735 action_logger(UserTemp(user_id),
'stopped_following_repo',
Added icons with numbers of followers and number of forks
r747 RepoTemp(follow_repo_id))
implemented user dashboards, and following system.
r734 return
except:
log.error(traceback.format_exc())
raise
try:
f = UserFollowing()
f.user_id = user_id
f.follows_repo_id = follow_repo_id
self.sa.add(f)
#235 forking page repo group selection...
r1722
added action loggers to following repositories,...
r735 action_logger(UserTemp(user_id),
'started_following_repo',
Added icons with numbers of followers and number of forks
r747 RepoTemp(follow_repo_id))
implemented user dashboards, and following system.
r734 except:
log.error(traceback.format_exc())
raise
#150 fixes for errors on repositories mapped in db but corrupted in filesystem
r1213 def toggle_following_user(self, follow_user_id, user_id):
implemented user dashboards, and following system.
r734 f = self.sa.query(UserFollowing)\
.filter(UserFollowing.follows_user_id == follow_user_id)\
.filter(UserFollowing.user_id == user_id).scalar()
if f is not None:
try:
self.sa.delete(f)
return
except:
log.error(traceback.format_exc())
raise
try:
f = UserFollowing()
f.user_id = user_id
f.follows_user_id = follow_user_id
self.sa.add(f)
except:
log.error(traceback.format_exc())
raise
fixed following js snipet. It' can be called multiple times now next to each repository...
r999 def is_following_repo(self, repo_name, user_id, cache=False):
implemented user dashboards, and following system.
r734 r = self.sa.query(Repository)\
.filter(Repository.repo_name == repo_name).scalar()
f = self.sa.query(UserFollowing)\
.filter(UserFollowing.follows_repository == r)\
.filter(UserFollowing.user_id == user_id).scalar()
return f is not None
fixed following js snipet. It' can be called multiple times now next to each repository...
r999 def is_following_user(self, username, user_id, cache=False):
Refactoring of model get functions
r1530 u = User.get_by_username(username)
implemented user dashboards, and following system.
r734
f = self.sa.query(UserFollowing)\
.filter(UserFollowing.follows_user == u)\
.filter(UserFollowing.user_id == user_id).scalar()
return f is not None
#50 on point cache invalidation changes....
r692
Added icons with numbers of followers and number of forks
r747 def get_followers(self, repo_id):
fixed condition evaluated for gitrepo that returned null, simplified scm functions
r1282 if not isinstance(repo_id, int):
Refactoring of model get functions
r1530 repo_id = getattr(Repository.get_by_repo_name(repo_id), 'repo_id')
fixed condition evaluated for gitrepo that returned null, simplified scm functions
r1282
return self.sa.query(UserFollowing)\
.filter(UserFollowing.follows_repo_id == repo_id).count()
Added icons with numbers of followers and number of forks
r747
def get_forks(self, repo_id):
fixed condition evaluated for gitrepo that returned null, simplified scm functions
r1282 if not isinstance(repo_id, int):
Refactoring of model get functions
r1530 repo_id = getattr(Repository.get_by_repo_name(repo_id), 'repo_id')
fixed condition evaluated for gitrepo that returned null, simplified scm functions
r1282
return self.sa.query(Repository)\
Added icons with numbers of followers and number of forks
r747 .filter(Repository.fork_id == repo_id).count()
#50 on point cache invalidation changes....
r692
implements #239 manual marking of repos as forks for admins
r1755 def mark_as_fork(self, repo, fork, user):
repo = self.__get_repo(repo)
fork = self.__get_repo(fork)
repo.fork = fork
self.sa.add(repo)
return repo
#109, added manual pull of changes for repositories that have remote location filled in....
r1114 def pull_changes(self, repo_name, username):
Refactoring of model get functions
r1530 dbrepo = Repository.get_by_repo_name(repo_name)
API added checks for a valid repository on pull command...
r1508 clone_uri = dbrepo.clone_uri
if not clone_uri:
raise Exception("This repository doesn't have a clone uri")
Refactoring of model get functions
r1530
Fixed remote pull command from todays code refactoring
r1370 repo = dbrepo.scm_instance
#109, added manual pull of changes for repositories that have remote location filled in....
r1114 try:
#150 fixes for errors on repositories mapped in db but corrupted in filesystem
r1213 extras = {'ip': '',
'username': username,
'action': 'push_remote',
'repository': repo_name}
#109, added manual pull of changes for repositories that have remote location filled in....
r1114
source code cleanup: remove trailing white space, normalize file endings
r1203 #inject ui extra param to log this action via push logger
#109, added manual pull of changes for repositories that have remote location filled in....
r1114 for k, v in extras.items():
repo._repo.ui.setconfig('rhodecode_extras', k, v)
API added checks for a valid repository on pull command...
r1508 repo.pull(clone_uri)
#109, added manual pull of changes for repositories that have remote location filled in....
r1114 self.mark_for_invalidation(repo_name)
except:
log.error(traceback.format_exc())
raise
#235 forking page repo group selection...
r1722 def commit_change(self, repo, repo_name, cs, user, author, message,
content, f_path):
moved out commit into scm model, and added cache invalidation after commit.
r1311
if repo.alias == 'hg':
Added VCS into rhodecode core for faster and easier deployments of new versions
r2007 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
moved out commit into scm model, and added cache invalidation after commit.
r1311 elif repo.alias == 'git':
Added VCS into rhodecode core for faster and easier deployments of new versions
r2007 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
moved out commit into scm model, and added cache invalidation after commit.
r1311
# decoding here will force that we have proper encoded values
# in any other case this will throw exceptions and deny commit
Unicode fixes, added safe_str method for global str() operations +better test sandboxing
r1401 content = safe_str(content)
message = safe_str(message)
path = safe_str(f_path)
author = safe_str(author)
moved out commit into scm model, and added cache invalidation after commit.
r1311 m = IMC(repo)
m.change(FileNode(path, content))
logged local commit with special action via action_logger,
r1312 tip = m.commit(message=message,
moved out commit into scm model, and added cache invalidation after commit.
r1311 author=author,
parents=[cs], branch=cs.branch)
logged local commit with special action via action_logger,
r1312 new_cs = tip.short_id
action = 'push_local:%s' % new_cs
action_logger(user, action, repo_name)
moved out commit into scm model, and added cache invalidation after commit.
r1311 self.mark_for_invalidation(repo_name)
Added initial support for creating new nodes in repos
r1483 def create_node(self, repo, repo_name, cs, user, author, message, content,
f_path):
if repo.alias == 'hg':
Added VCS into rhodecode core for faster and easier deployments of new versions
r2007 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
Added initial support for creating new nodes in repos
r1483 elif repo.alias == 'git':
Added VCS into rhodecode core for faster and easier deployments of new versions
r2007 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
Added initial support for creating new nodes in repos
r1483 # decoding here will force that we have proper encoded values
# in any other case this will throw exceptions and deny commit
Refactoring of model get functions
r1530
if isinstance(content, (basestring,)):
added uploading of files from web interface directly into repo
r1485 content = safe_str(content)
fixed problem with uploading files into rhodecode that wasn't detected as streams
r1801 elif isinstance(content, (file, cStringIO.OutputType,)):
added uploading of files from web interface directly into repo
r1485 content = content.read()
fixed problem with uploading files into rhodecode that wasn't detected as streams
r1801 else:
raise Exception('Content is of unrecognized type %s' % (
type(content)
))
Refactoring of model get functions
r1530
Added initial support for creating new nodes in repos
r1483 message = safe_str(message)
path = safe_str(f_path)
author = safe_str(author)
m = IMC(repo)
if isinstance(cs, EmptyChangeset):
# Emptychangeset means we we're editing empty repository
parents = None
else:
parents = [cs]
m.add(FileNode(path, content=content))
tip = m.commit(message=message,
author=author,
parents=parents, branch=cs.branch)
new_cs = tip.short_id
action = 'push_local:%s' % new_cs
action_logger(user, action, repo_name)
self.mark_for_invalidation(repo_name)
implements #330 api method for listing nodes at particular revision...
r1810 def get_nodes(self, repo_name, revision, root_path='/', flat=True):
"""
recursive walk in root dir and return a set of all path in that dir
based on repository walk function
:param repo_name: name of repository
:param revision: revision for which to list nodes
:param root_path: root path to list
:param flat: return as a list, if False returns a dict with decription
"""
_files = list()
_dirs = list()
try:
_repo = self.__get_repo(repo_name)
changeset = _repo.scm_instance.get_changeset(revision)
root_path = root_path.lstrip('/')
for topnode, dirs, files in changeset.walk(root_path):
for f in files:
_files.append(f.path if flat else {"name": f.path,
"type": "file"})
for d in dirs:
_dirs.append(d.path if flat else {"name": d.path,
"type": "dir"})
except RepositoryError:
log.debug(traceback.format_exc())
raise
return _dirs, _files
Implemented fancier top menu for logged and anonymous users...
r784 def get_unread_journal(self):
return self.sa.query(UserLog).count()