utils.py
688 lines
| 21.5 KiB
| text/x-python
|
PythonLexer
r783 | # -*- coding: utf-8 -*- | |||
""" | ||||
r833 | rhodecode.lib.utils | |||
~~~~~~~~~~~~~~~~~~~ | ||||
r783 | ||||
Utilities library for RhodeCode | ||||
r1203 | ||||
r783 | :created_on: Apr 18, 2010 | |||
:author: marcink | ||||
r1824 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |||
r783 | :license: GPLv3, see COPYING for more details. | |||
""" | ||||
r1206 | # This program is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU General Public License as published by | ||||
# the Free Software Foundation, either version 3 of the License, or | ||||
# (at your option) any later version. | ||||
r1203 | # | |||
r547 | # This program is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
r1203 | # | |||
r547 | # You should have received a copy of the GNU General Public License | |||
r1206 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
r783 | ||||
import os | ||||
r2069 | import re | |||
r783 | import logging | |||
import datetime | ||||
import traceback | ||||
r1022 | import paste | |||
import beaker | ||||
r1717 | import tarfile | |||
import shutil | ||||
from os.path import abspath | ||||
r1354 | from os.path import dirname as dn, join as jn | |||
r1022 | ||||
from paste.script.command import Command, BadCommand | ||||
r633 | ||||
r1505 | from mercurial import ui, config | |||
r783 | ||||
r1022 | from webhelpers.text import collapse, remove_formatting, strip_tags | |||
r785 | ||||
r2007 | from rhodecode.lib.vcs import get_backend | |||
from rhodecode.lib.vcs.backends.base import BaseChangeset | ||||
from rhodecode.lib.vcs.utils.lazy import LazyProperty | ||||
from rhodecode.lib.vcs.utils.helpers import get_scm | ||||
from rhodecode.lib.vcs.exceptions import VCSError | ||||
r783 | ||||
r1669 | from rhodecode.lib.caching_query import FromCache | |||
r631 | from rhodecode.model import meta | |||
r1717 | from rhodecode.model.db import Repository, User, RhodeCodeUi, \ | |||
r2147 | UserLog, RepoGroup, RhodeCodeSetting, UserRepoGroupToPerm,\ | |||
CacheInvalidation | ||||
r1734 | from rhodecode.model.meta import Session | |||
r1982 | from rhodecode.model.repos_group import ReposGroupModel | |||
r2109 | from rhodecode.lib.utils2 import safe_str, safe_unicode | |||
from rhodecode.lib.vcs.utils.fakemod import create_module | ||||
r756 | ||||
r547 | log = logging.getLogger(__name__) | |||
r2069 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*') | |||
r547 | ||||
r1634 | def recursive_replace(str_, replace=' '): | |||
r2109 | """ | |||
Recursive replace of given sign to just one instance | ||||
r1203 | ||||
r1634 | :param str_: given string | |||
r1022 | :param replace: char to find and replace multiple instances | |||
r1203 | ||||
r1022 | Examples:: | |||
>>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') | ||||
'Mighty-Mighty-Bo-sstones' | ||||
""" | ||||
r1634 | if str_.find(replace * 2) == -1: | |||
return str_ | ||||
r1022 | else: | |||
r1634 | str_ = str_.replace(replace * 2, replace) | |||
return recursive_replace(str_, replace) | ||||
r1022 | ||||
r1224 | ||||
r1022 | def repo_name_slug(value): | |||
r2109 | """ | |||
Return slug of name of repository | ||||
r1022 | This function is called on each creation/modification | |||
of repository to prevent bad names in repo | ||||
""" | ||||
slug = remove_formatting(value) | ||||
slug = strip_tags(slug) | ||||
for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """: | ||||
slug = slug.replace(c, '-') | ||||
slug = recursive_replace(slug, '-') | ||||
slug = collapse(slug, '-') | ||||
return slug | ||||
r1224 | ||||
r604 | def get_repo_slug(request): | |||
r2055 | _repo = request.environ['pylons.routes_dict'].get('repo_name') | |||
if _repo: | ||||
_repo = _repo.rstrip('/') | ||||
return _repo | ||||
r547 | ||||
r1224 | ||||
r1982 | def get_repos_group_slug(request): | |||
r2055 | _group = request.environ['pylons.routes_dict'].get('group_name') | |||
if _group: | ||||
_group = _group.rstrip('/') | ||||
return _group | ||||
r1982 | ||||
r1722 | def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): | |||
r547 | """ | |||
r735 | Action logger for various actions made by users | |||
r1203 | ||||
r735 | :param user: user that made this action, can be a unique username string or | |||
r689 | object containing user_id attribute | |||
:param action: action to log, should be on of predefined unique actions for | ||||
easy translations | ||||
r735 | :param repo: string name of repository or object containing repo_id, | |||
that action was made on | ||||
r689 | :param ipaddr: optional ip address from what the action was made | |||
:param sa: optional sqlalchemy session | ||||
r1203 | ||||
r547 | """ | |||
r604 | ||||
r547 | if not sa: | |||
r1749 | sa = meta.Session | |||
r604 | ||||
r547 | try: | |||
if hasattr(user, 'user_id'): | ||||
r654 | user_obj = user | |||
r547 | elif isinstance(user, basestring): | |||
r1530 | user_obj = User.get_by_username(user) | |||
r547 | else: | |||
raise Exception('You have to provide user object or username') | ||||
r604 | ||||
r735 | if hasattr(repo, 'repo_id'): | |||
r1634 | repo_obj = Repository.get(repo.repo_id) | |||
r735 | repo_name = repo_obj.repo_name | |||
elif isinstance(repo, basestring): | ||||
r689 | repo_name = repo.lstrip('/') | |||
r1634 | repo_obj = Repository.get_by_repo_name(repo_name) | |||
r689 | else: | |||
r2375 | repo_obj = None | |||
repo_name = '' | ||||
r689 | ||||
r547 | user_log = UserLog() | |||
r654 | user_log.user_id = user_obj.user_id | |||
r2249 | user_log.action = safe_unicode(action) | |||
r756 | ||||
r2375 | user_log.repository = repo_obj | |||
r547 | user_log.repository_name = repo_name | |||
r756 | ||||
r547 | user_log.action_date = datetime.datetime.now() | |||
user_log.user_ip = ipaddr | ||||
sa.add(user_log) | ||||
r621 | ||||
r2100 | log.info( | |||
'Adding user %s, action %s on %s' % (user_obj, action, | ||||
safe_unicode(repo)) | ||||
) | ||||
r1722 | if commit: | |||
sa.commit() | ||||
r654 | except: | |||
log.error(traceback.format_exc()) | ||||
r1722 | raise | |||
r604 | ||||
r1224 | ||||
r877 | def get_repos(path, recursive=False): | |||
r631 | """ | |||
r1203 | Scans given path for repos and return (name,(type,path)) tuple | |||
r1836 | :param path: path to scan for repositories | |||
r1203 | :param recursive: recursive search and return names with subdirs in front | |||
r631 | """ | |||
r633 | ||||
r1820 | # remove ending slash for better results | |||
r1836 | path = path.rstrip(os.sep) | |||
r631 | ||||
r877 | def _get_repos(p): | |||
r1221 | if not os.access(p, os.W_OK): | |||
return | ||||
r877 | for dirpath in os.listdir(p): | |||
if os.path.isfile(os.path.join(p, dirpath)): | ||||
continue | ||||
cur_path = os.path.join(p, dirpath) | ||||
try: | ||||
scm_info = get_scm(cur_path) | ||||
r1820 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info | |||
r877 | except VCSError: | |||
if not recursive: | ||||
continue | ||||
#check if this dir containts other repos for recursive scan | ||||
rec_path = os.path.join(p, dirpath) | ||||
if os.path.isdir(rec_path): | ||||
for inner_scm in _get_repos(rec_path): | ||||
yield inner_scm | ||||
return _get_repos(path) | ||||
r631 | ||||
r1224 | ||||
r1507 | def is_valid_repo(repo_name, base_path): | |||
r761 | """ | |||
r1505 | Returns True if given path is a valid repository False otherwise | |||
r2100 | ||||
r761 | :param repo_name: | |||
:param base_path: | ||||
r1203 | ||||
r1505 | :return True: if given path is a valid repository | |||
r761 | """ | |||
r2100 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) | |||
r1528 | ||||
r547 | try: | |||
r1505 | get_scm(full_path) | |||
return True | ||||
except VCSError: | ||||
r547 | return False | |||
r1505 | ||||
r1982 | ||||
r1507 | def is_valid_repos_group(repos_group_name, base_path): | |||
r1505 | """ | |||
Returns True if given path is a repos group False otherwise | ||||
r1818 | ||||
r1505 | :param repo_name: | |||
:param base_path: | ||||
""" | ||||
r2100 | full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name)) | |||
r1528 | ||||
r1505 | # check if it's not a repo | |||
r1507 | if is_valid_repo(repos_group_name, base_path): | |||
r1505 | return False | |||
r1528 | ||||
r2497 | try: | |||
# we need to check bare git repos at higher level | ||||
# since we might match branches/hooks/info/objects or possible | ||||
# other things inside bare git repo | ||||
get_scm(os.path.dirname(full_path)) | ||||
return False | ||||
except VCSError: | ||||
pass | ||||
r1505 | # check if it's a valid path | |||
if os.path.isdir(full_path): | ||||
r547 | return True | |||
r1528 | ||||
r1505 | return False | |||
r1528 | ||||
r1982 | ||||
r547 | def ask_ok(prompt, retries=4, complaint='Yes or no, please!'): | |||
while True: | ||||
ok = raw_input(prompt) | ||||
r1224 | if ok in ('y', 'ye', 'yes'): | |||
return True | ||||
if ok in ('n', 'no', 'nop', 'nope'): | ||||
return False | ||||
r547 | retries = retries - 1 | |||
r1224 | if retries < 0: | |||
raise IOError | ||||
r547 | print complaint | |||
r604 | ||||
r547 | #propagated from mercurial documentation | |||
ui_sections = ['alias', 'auth', | ||||
'decode/encode', 'defaults', | ||||
'diff', 'email', | ||||
'extensions', 'format', | ||||
'merge-patterns', 'merge-tools', | ||||
'hooks', 'http_proxy', | ||||
'smtp', 'patch', | ||||
'paths', 'profiling', | ||||
'server', 'trusted', | ||||
'ui', 'web', ] | ||||
r604 | ||||
r1224 | ||||
r604 | def make_ui(read_from='file', path=None, checkpaths=True): | |||
r2109 | """ | |||
A function that will read python rc files or database | ||||
r547 | and make an mercurial ui object from read options | |||
r1203 | ||||
r604 | :param path: path to mercurial config file | |||
:param checkpaths: check the path | ||||
:param read_from: read from 'file' or 'db' | ||||
r547 | """ | |||
baseui = ui.ui() | ||||
r1991 | # clean the baseui object | |||
r724 | baseui._ocfg = config.config() | |||
baseui._ucfg = config.config() | ||||
baseui._tcfg = config.config() | ||||
r547 | if read_from == 'file': | |||
if not os.path.isfile(path): | ||||
r1991 | log.debug('hgrc file is not present at %s skipping...' % path) | |||
r547 | return False | |||
r1976 | log.debug('reading hgrc from %s' % path) | |||
r547 | cfg = config.config() | |||
cfg.read(path) | ||||
for section in ui_sections: | ||||
for k, v in cfg.items(section): | ||||
r1976 | log.debug('settings ui from file[%s]%s:%s' % (section, k, v)) | |||
r547 | baseui.setconfig(section, k, v) | |||
r724 | ||||
r547 | elif read_from == 'db': | |||
r1749 | sa = meta.Session | |||
r756 | ret = sa.query(RhodeCodeUi)\ | |||
r1991 | .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\ | |||
.all() | ||||
r773 | ||||
r756 | hg_ui = ret | |||
r547 | for ui_ in hg_ui: | |||
if ui_.ui_active: | ||||
r773 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, | |||
ui_.ui_key, ui_.ui_value) | ||||
r547 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) | |||
r773 | ||||
meta.Session.remove() | ||||
r547 | return baseui | |||
r548 | def set_rhodecode_config(config): | |||
r1717 | """ | |||
Updates pylons config with new settings from database | ||||
r1203 | ||||
r756 | :param config: | |||
""" | ||||
r1633 | hgsettings = RhodeCodeSetting.get_app_settings() | |||
r604 | ||||
r547 | for k, v in hgsettings.items(): | |||
config[k] = v | ||||
r1224 | ||||
r692 | def invalidate_cache(cache_key, *args): | |||
r1717 | """ | |||
Puts cache invalidation task into db for | ||||
r665 | further global cache invalidation | |||
""" | ||||
r860 | ||||
r692 | from rhodecode.model.scm import ScmModel | |||
if cache_key.startswith('get_repo_cached_'): | ||||
name = cache_key.split('get_repo_cached_')[-1] | ||||
ScmModel().mark_for_invalidation(name) | ||||
r604 | ||||
r1224 | ||||
r547 | class EmptyChangeset(BaseChangeset): | |||
""" | ||||
r643 | An dummy empty changeset. It's possible to pass hash when creating | |||
an EmptyChangeset | ||||
r547 | """ | |||
r604 | ||||
r1982 | def __init__(self, cs='0' * 40, repo=None, requested_revision=None, | |||
alias=None): | ||||
r643 | self._empty_cs = cs | |||
self.revision = -1 | ||||
self.message = '' | ||||
self.author = '' | ||||
self.date = '' | ||||
r1224 | self.repository = repo | |||
r1466 | self.requested_revision = requested_revision | |||
r1483 | self.alias = alias | |||
r547 | @LazyProperty | |||
def raw_id(self): | ||||
r1717 | """ | |||
Returns raw string identifying this changeset, useful for web | ||||
r547 | representation. | |||
""" | ||||
r860 | ||||
r643 | return self._empty_cs | |||
r604 | ||||
r547 | @LazyProperty | |||
r1483 | def branch(self): | |||
return get_backend(self.alias).DEFAULT_BRANCH_NAME | ||||
@LazyProperty | ||||
r547 | def short_id(self): | |||
return self.raw_id[:12] | ||||
def get_file_changeset(self, path): | ||||
return self | ||||
r604 | ||||
r547 | def get_file_content(self, path): | |||
return u'' | ||||
r604 | ||||
r547 | def get_file_size(self, path): | |||
return 0 | ||||
r604 | ||||
r1224 | ||||
r2120 | def map_groups(path): | |||
r1717 | """ | |||
r2120 | Given a full path to a repository, create all nested groups that this | |||
repo is inside. This function creates parent-child relationships between | ||||
groups and creates default perms for all new groups. | ||||
r1203 | ||||
r2120 | :param paths: full path to repository | |||
r878 | """ | |||
r1749 | sa = meta.Session | |||
r2120 | groups = path.split(Repository.url_sep()) | |||
r878 | parent = None | |||
group = None | ||||
r1538 | ||||
# last element is repo in nested groups structure | ||||
groups = groups[:-1] | ||||
r1982 | rgm = ReposGroupModel(sa) | |||
r1538 | for lvl, group_name in enumerate(groups): | |||
group_name = '/'.join(groups[:lvl] + [group_name]) | ||||
r1982 | group = RepoGroup.get_by_group_name(group_name) | |||
desc = '%s group' % group_name | ||||
r2069 | # skip folders that are now removed repos | |||
if REMOVED_REPO_PAT.match(group_name): | ||||
break | ||||
r878 | if group is None: | |||
r2120 | log.debug('creating group level: %s group_name: %s' % (lvl, | |||
group_name)) | ||||
r1633 | group = RepoGroup(group_name, parent) | |||
r1982 | group.group_description = desc | |||
r878 | sa.add(group) | |||
r1982 | rgm._create_default_perms(group) | |||
r2120 | sa.flush() | |||
r878 | parent = group | |||
return group | ||||
r1224 | ||||
r547 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): | |||
r1634 | """ | |||
maps all repos given in initial_repo_list, non existing repositories | ||||
r878 | are created, if remove_obsolete is True it also check for db entries | |||
that are not in initial_repo_list and removes them. | ||||
r1203 | ||||
r878 | :param initial_repo_list: list of repositories found by scanning methods | |||
:param remove_obsolete: check for obsolete entries in database | ||||
r547 | """ | |||
r1634 | from rhodecode.model.repo import RepoModel | |||
r1749 | sa = meta.Session | |||
r692 | rm = RepoModel() | |||
r547 | user = sa.query(User).filter(User.admin == True).first() | |||
r1634 | if user is None: | |||
r1717 | raise Exception('Missing administrative account !') | |||
r1039 | added = [] | |||
r1716 | ||||
r631 | for name, repo in initial_repo_list.items(): | |||
r2120 | group = map_groups(name) | |||
r735 | if not rm.get_by_repo_name(name, cache=False): | |||
r1976 | log.info('repository %s not found creating default' % name) | |||
r1039 | added.append(name) | |||
r547 | form_data = { | |||
r1982 | 'repo_name': name, | |||
'repo_name_full': name, | ||||
'repo_type': repo.alias, | ||||
'description': repo.description \ | ||||
if repo.description != 'unknown' else '%s repository' % name, | ||||
'private': False, | ||||
r2459 | 'group_id': getattr(group, 'group_id', None), | |||
r2511 | 'landing_rev': 'tip', | |||
'clone_uri': None, | ||||
'repo_group': None, | ||||
r1982 | } | |||
r547 | rm.create(form_data, user, just_db=True) | |||
r1723 | sa.commit() | |||
r1039 | removed = [] | |||
r547 | if remove_obsolete: | |||
r2147 | # remove from database those repositories that are not in the filesystem | |||
r547 | for repo in sa.query(Repository).all(): | |||
if repo.repo_name not in initial_repo_list.keys(): | ||||
r2147 | log.debug("Removing non existing repository found in db %s" % | |||
repo.repo_name) | ||||
r1039 | removed.append(repo.repo_name) | |||
r547 | sa.delete(repo) | |||
sa.commit() | ||||
r2147 | # clear cache keys | |||
log.debug("Clearing cache keys now...") | ||||
CacheInvalidation.clear_cache() | ||||
sa.commit() | ||||
r1039 | return added, removed | |||
r1224 | ||||
r1982 | ||||
r1717 | # set cache regions for beaker so celery can utilise it | |||
r785 | def add_cache(settings): | |||
r1224 | cache_settings = {'regions': None} | |||
r785 | for key in settings.keys(): | |||
for prefix in ['beaker.cache.', 'cache.']: | ||||
if key.startswith(prefix): | ||||
name = key.split(prefix)[1].strip() | ||||
cache_settings[name] = settings[key].strip() | ||||
if cache_settings['regions']: | ||||
for region in cache_settings['regions'].split(','): | ||||
region = region.strip() | ||||
region_settings = {} | ||||
for key, value in cache_settings.items(): | ||||
if key.startswith(region): | ||||
region_settings[key.split('.')[1]] = value | ||||
region_settings['expire'] = int(region_settings.get('expire', | ||||
60)) | ||||
region_settings.setdefault('lock_dir', | ||||
cache_settings.get('lock_dir')) | ||||
r1032 | region_settings.setdefault('data_dir', | |||
cache_settings.get('data_dir')) | ||||
r785 | if 'type' not in region_settings: | |||
region_settings['type'] = cache_settings.get('type', | ||||
'memory') | ||||
beaker.cache.cache_regions[region] = region_settings | ||||
r1224 | ||||
r2109 | def load_rcextensions(root_path): | |||
import rhodecode | ||||
from rhodecode.config import conf | ||||
path = os.path.join(root_path, 'rcextensions', '__init__.py') | ||||
if os.path.isfile(path): | ||||
rcext = create_module('rc', path) | ||||
EXT = rhodecode.EXTENSIONS = rcext | ||||
log.debug('Found rcextensions now loading %s...' % rcext) | ||||
# Additional mappings that are not present in the pygments lexers | ||||
conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) | ||||
#OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present) | ||||
if getattr(EXT, 'INDEX_EXTENSIONS', []) != []: | ||||
log.debug('settings custom INDEX_EXTENSIONS') | ||||
conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', []) | ||||
#ADDITIONAL MAPPINGS | ||||
log.debug('adding extra into INDEX_EXTENSIONS') | ||||
conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', [])) | ||||
r1224 | #============================================================================== | |||
r629 | # TEST FUNCTIONS AND CREATORS | |||
r1224 | #============================================================================== | |||
r1401 | def create_test_index(repo_location, config, full_index): | |||
""" | ||||
Makes default test index | ||||
r1818 | ||||
r1401 | :param config: test config | |||
r604 | :param full_index: | |||
r547 | """ | |||
r1401 | ||||
r547 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon | |||
from rhodecode.lib.pidlock import DaemonLock, LockHeld | ||||
r1401 | ||||
repo_location = repo_location | ||||
r604 | ||||
r1411 | index_location = os.path.join(config['app_conf']['index_dir']) | |||
r1401 | if not os.path.exists(index_location): | |||
os.makedirs(index_location) | ||||
r604 | ||||
r547 | try: | |||
r1540 | l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock')) | |||
r688 | WhooshIndexingDaemon(index_location=index_location, | |||
repo_location=repo_location)\ | ||||
r547 | .run(full_index=full_index) | |||
l.release() | ||||
except LockHeld: | ||||
r604 | pass | |||
r1224 | ||||
r547 | def create_test_env(repos_test_path, config): | |||
r1717 | """ | |||
Makes a fresh database and | ||||
r547 | install test repository into tmp dir | |||
""" | ||||
from rhodecode.lib.db_manage import DbManage | ||||
r2459 | from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH | |||
r604 | ||||
r1416 | # PART ONE create db | |||
r783 | dbconf = config['sqlalchemy.db1.url'] | |||
r1976 | log.debug('making test db %s' % dbconf) | |||
r604 | ||||
r1397 | # create test dir if it doesn't exist | |||
if not os.path.isdir(repos_test_path): | ||||
log.debug('Creating testdir %s' % repos_test_path) | ||||
os.makedirs(repos_test_path) | ||||
r783 | dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], | |||
r552 | tests=True) | |||
r547 | dbmanage.create_tables(override=True) | |||
r1099 | dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) | |||
r547 | dbmanage.create_default_user() | |||
dbmanage.admin_prompt() | ||||
dbmanage.create_permissions() | ||||
dbmanage.populate_default_permissions() | ||||
r1749 | Session.commit() | |||
r1416 | # PART TWO make test repo | |||
r688 | log.debug('making test vcs repositories') | |||
r1396 | idx_path = config['app_conf']['index_dir'] | |||
data_path = config['app_conf']['cache_dir'] | ||||
#clean index and data | ||||
if idx_path and os.path.exists(idx_path): | ||||
log.debug('remove %s' % idx_path) | ||||
shutil.rmtree(idx_path) | ||||
if data_path and os.path.exists(data_path): | ||||
log.debug('remove %s' % data_path) | ||||
shutil.rmtree(data_path) | ||||
r2459 | #CREATE DEFAULT TEST REPOS | |||
r547 | cur_dir = dn(dn(abspath(__file__))) | |||
r688 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) | |||
tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) | ||||
r547 | tar.close() | |||
r684 | ||||
r2459 | cur_dir = dn(dn(abspath(__file__))) | |||
tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz")) | ||||
tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO)) | ||||
tar.close() | ||||
r2451 | #LOAD VCS test stuff | |||
from rhodecode.tests.vcs import setup_package | ||||
setup_package() | ||||
r1224 | ||||
r785 | #============================================================================== | |||
# PASTER COMMANDS | ||||
#============================================================================== | ||||
class BasePasterCommand(Command): | ||||
""" | ||||
Abstract Base Class for paster commands. | ||||
The celery commands are somewhat aggressive about loading | ||||
celery.conf, and since our module sets the `CELERY_LOADER` | ||||
environment variable to our loader, we have to bootstrap a bit and | ||||
make sure we've had a chance to load the pylons config off of the | ||||
command line, otherwise everything fails. | ||||
""" | ||||
min_args = 1 | ||||
min_args_error = "Please provide a paster config file as an argument." | ||||
takes_config_file = 1 | ||||
requires_config_file = True | ||||
r837 | def notify_msg(self, msg, log=False): | |||
"""Make a notification to user, additionally if logger is passed | ||||
it logs this action using given logger | ||||
r1203 | ||||
r837 | :param msg: message that will be printed to user | |||
:param log: logging instance, to use to additionally log this message | ||||
r1203 | ||||
r837 | """ | |||
if log and isinstance(log, logging): | ||||
log(msg) | ||||
r785 | def run(self, args): | |||
""" | ||||
Overrides Command.run | ||||
r1203 | ||||
r785 | Checks for a config file argument and loads it. | |||
""" | ||||
if len(args) < self.min_args: | ||||
raise BadCommand( | ||||
self.min_args_error % {'min_args': self.min_args, | ||||
'actual_args': len(args)}) | ||||
# Decrement because we're going to lob off the first argument. | ||||
# @@ This is hacky | ||||
self.min_args -= 1 | ||||
self.bootstrap_config(args[0]) | ||||
self.update_parser() | ||||
return super(BasePasterCommand, self).run(args[1:]) | ||||
def update_parser(self): | ||||
""" | ||||
Abstract method. Allows for the class's parser to be updated | ||||
before the superclass's `run` method is called. Necessary to | ||||
allow options/arguments to be passed through to the underlying | ||||
celery command. | ||||
""" | ||||
raise NotImplementedError("Abstract Method.") | ||||
def bootstrap_config(self, conf): | ||||
""" | ||||
Loads the pylons configuration. | ||||
""" | ||||
from pylons import config as pylonsconfig | ||||
r2102 | self.path_to_ini_file = os.path.realpath(conf) | |||
conf = paste.deploy.appconfig('config:' + self.path_to_ini_file) | ||||
r1818 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) | |||