utils.py
441 lines
| 13.2 KiB
| text/x-python
|
PythonLexer
r297 | #!/usr/bin/env python | |||
# encoding: utf-8 | ||||
# Utilities for hg app | ||||
# Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com> | ||||
# This program is free software; you can redistribute it and/or | ||||
# modify it under the terms of the GNU General Public License | ||||
# as published by the Free Software Foundation; version 2 | ||||
# of the License or (at your opinion) any later version of the license. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU General Public License | ||||
# along with this program; if not, write to the Free Software | ||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, | ||||
# MA 02110-1301, USA. | ||||
""" | ||||
Created on April 18, 2010 | ||||
Utilities for hg app | ||||
@author: marcink | ||||
""" | ||||
r351 | from beaker.cache import cache_region | |||
r297 | from mercurial import ui, config, hg | |||
from mercurial.error import RepoError | ||||
r351 | from pylons_app.model import meta | |||
r345 | from pylons_app.model.db import Repository, User, HgAppUi, HgAppSettings | |||
r351 | from vcs.backends.base import BaseChangeset | |||
from vcs.utils.lazy import LazyProperty | ||||
import logging | ||||
import os | ||||
r491 | ||||
r297 | log = logging.getLogger(__name__) | |||
def get_repo_slug(request): | ||||
return request.environ['pylons.routes_dict'].get('repo_name') | ||||
def is_mercurial(environ): | ||||
""" | ||||
Returns True if request's target is mercurial server - header | ||||
``HTTP_ACCEPT`` of such request would start with ``application/mercurial``. | ||||
""" | ||||
http_accept = environ.get('HTTP_ACCEPT') | ||||
if http_accept and http_accept.startswith('application/mercurial'): | ||||
return True | ||||
return False | ||||
def check_repo_dir(paths): | ||||
repos_path = paths[0][1].split('/') | ||||
if repos_path[-1] in ['*', '**']: | ||||
repos_path = repos_path[:-1] | ||||
if repos_path[0] != '/': | ||||
repos_path[0] = '/' | ||||
if not os.path.isdir(os.path.join(*repos_path)): | ||||
raise Exception('Not a valid repository in %s' % paths[0][1]) | ||||
r301 | def check_repo_fast(repo_name, base_path): | |||
if os.path.isdir(os.path.join(base_path, repo_name)):return False | ||||
return True | ||||
def check_repo(repo_name, base_path, verify=True): | ||||
r297 | ||||
repo_path = os.path.join(base_path, repo_name) | ||||
try: | ||||
r301 | if not check_repo_fast(repo_name, base_path): | |||
return False | ||||
r297 | r = hg.repository(ui.ui(), repo_path) | |||
r301 | if verify: | |||
hg.verify(r) | ||||
r297 | #here we hnow that repo exists it was verified | |||
log.info('%s repo is already created', repo_name) | ||||
return False | ||||
except RepoError: | ||||
r301 | #it means that there is no valid repo there... | |||
r297 | log.info('%s repo is free for creation', repo_name) | |||
return True | ||||
r301 | ||||
r351 | def ask_ok(prompt, retries=4, complaint='Yes or no, please!'): | |||
while True: | ||||
ok = raw_input(prompt) | ||||
if ok in ('y', 'ye', 'yes'): return True | ||||
if ok in ('n', 'no', 'nop', 'nope'): return False | ||||
retries = retries - 1 | ||||
if retries < 0: raise IOError | ||||
print complaint | ||||
r343 | @cache_region('super_short_term', 'cached_hg_ui') | |||
def get_hg_ui_cached(): | ||||
r350 | try: | |||
sa = meta.Session | ||||
ret = sa.query(HgAppUi).all() | ||||
finally: | ||||
meta.Session.remove() | ||||
return ret | ||||
r343 | ||||
r345 | def get_hg_settings(): | |||
r350 | try: | |||
sa = meta.Session | ||||
r381 | ret = sa.query(HgAppSettings).all() | |||
r350 | finally: | |||
meta.Session.remove() | ||||
r345 | if not ret: | |||
raise Exception('Could not get application settings !') | ||||
r381 | settings = {} | |||
for each in ret: | ||||
settings['hg_app_' + each.app_settings_name] = each.app_settings_value | ||||
return settings | ||||
r386 | ||||
r388 | def get_hg_ui_settings(): | |||
try: | ||||
sa = meta.Session | ||||
ret = sa.query(HgAppUi).all() | ||||
finally: | ||||
meta.Session.remove() | ||||
if not ret: | ||||
raise Exception('Could not get application ui settings !') | ||||
settings = {} | ||||
for each in ret: | ||||
r395 | k = each.ui_key | |||
v = each.ui_value | ||||
if k == '/': | ||||
k = 'root_path' | ||||
if k.find('.') != -1: | ||||
k = k.replace('.', '_') | ||||
if each.ui_section == 'hooks': | ||||
v = each.ui_active | ||||
settings[each.ui_section + '_' + k] = v | ||||
r388 | ||||
return settings | ||||
#propagated from mercurial documentation | ||||
r386 | ui_sections = ['alias', 'auth', | |||
'decode/encode', 'defaults', | ||||
'diff', 'email', | ||||
'extensions', 'format', | ||||
'merge-patterns', 'merge-tools', | ||||
'hooks', 'http_proxy', | ||||
'smtp', 'patch', | ||||
'paths', 'profiling', | ||||
'server', 'trusted', | ||||
'ui', 'web', ] | ||||
r341 | def make_ui(read_from='file', path=None, checkpaths=True): | |||
r297 | """ | |||
r341 | A function that will read python rc files or database | |||
and make an mercurial ui object from read options | ||||
r297 | ||||
@param path: path to mercurial config file | ||||
r341 | @param checkpaths: check the path | |||
@param read_from: read from 'file' or 'db' | ||||
r297 | """ | |||
r386 | ||||
r341 | baseui = ui.ui() | |||
r297 | ||||
r341 | if read_from == 'file': | |||
if not os.path.isfile(path): | ||||
log.warning('Unable to read config file %s' % path) | ||||
return False | ||||
r388 | log.debug('reading hgrc from %s', path) | |||
r341 | cfg = config.config() | |||
cfg.read(path) | ||||
r386 | for section in ui_sections: | |||
r341 | for k, v in cfg.items(section): | |||
baseui.setconfig(section, k, v) | ||||
r388 | log.debug('settings ui from file[%s]%s:%s', section, k, v) | |||
r341 | if checkpaths:check_repo_dir(cfg.items('paths')) | |||
elif read_from == 'db': | ||||
r343 | hg_ui = get_hg_ui_cached() | |||
r341 | for ui_ in hg_ui: | |||
r392 | if ui_.ui_active: | |||
log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, ui_.ui_key, ui_.ui_value) | ||||
baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) | ||||
r341 | ||||
r297 | ||||
return baseui | ||||
r341 | ||||
def set_hg_app_config(config): | ||||
r345 | hgsettings = get_hg_settings() | |||
r381 | ||||
for k, v in hgsettings.items(): | ||||
config[k] = v | ||||
r341 | ||||
r297 | def invalidate_cache(name, *args): | |||
"""Invalidates given name cache""" | ||||
from beaker.cache import region_invalidate | ||||
log.info('INVALIDATING CACHE FOR %s', name) | ||||
"""propagate our arguments to make sure invalidation works. First | ||||
argument has to be the name of cached func name give to cache decorator | ||||
without that the invalidation would not work""" | ||||
tmp = [name] | ||||
tmp.extend(args) | ||||
args = tuple(tmp) | ||||
if name == 'cached_repo_list': | ||||
from pylons_app.model.hg_model import _get_repos_cached | ||||
region_invalidate(_get_repos_cached, None, *args) | ||||
if name == 'full_changelog': | ||||
from pylons_app.model.hg_model import _full_changelog_cached | ||||
region_invalidate(_full_changelog_cached, None, *args) | ||||
class EmptyChangeset(BaseChangeset): | ||||
revision = -1 | ||||
message = '' | ||||
r489 | author = '' | |||
r297 | ||||
@LazyProperty | ||||
def raw_id(self): | ||||
""" | ||||
Returns raw string identifing this changeset, useful for web | ||||
representation. | ||||
""" | ||||
r520 | return '0' * 40 | |||
r524 | @LazyProperty | |||
r520 | def short_id(self): | |||
r524 | return self.raw_id[:12] | |||
r297 | ||||
r348 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): | |||
r297 | """ | |||
maps all found repositories into db | ||||
""" | ||||
from pylons_app.model.repo_model import RepoModel | ||||
r350 | sa = meta.Session | |||
r297 | user = sa.query(User).filter(User.admin == True).first() | |||
rm = RepoModel() | ||||
for name, repo in initial_repo_list.items(): | ||||
r367 | if not sa.query(Repository).filter(Repository.repo_name == name).scalar(): | |||
r297 | log.info('repository %s not found creating default', name) | |||
form_data = { | ||||
'repo_name':name, | ||||
'description':repo.description if repo.description != 'unknown' else \ | ||||
'auto description for %s' % name, | ||||
'private':False | ||||
} | ||||
rm.create(form_data, user, just_db=True) | ||||
r348 | ||||
if remove_obsolete: | ||||
#remove from database those repositories that are not in the filesystem | ||||
for repo in sa.query(Repository).all(): | ||||
if repo.repo_name not in initial_repo_list.keys(): | ||||
sa.delete(repo) | ||||
sa.commit() | ||||
r350 | ||||
meta.Session.remove() | ||||
r358 | ||||
from UserDict import DictMixin | ||||
class OrderedDict(dict, DictMixin): | ||||
def __init__(self, *args, **kwds): | ||||
if len(args) > 1: | ||||
raise TypeError('expected at most 1 arguments, got %d' % len(args)) | ||||
try: | ||||
self.__end | ||||
except AttributeError: | ||||
self.clear() | ||||
self.update(*args, **kwds) | ||||
def clear(self): | ||||
self.__end = end = [] | ||||
end += [None, end, end] # sentinel node for doubly linked list | ||||
self.__map = {} # key --> [key, prev, next] | ||||
dict.clear(self) | ||||
def __setitem__(self, key, value): | ||||
if key not in self: | ||||
end = self.__end | ||||
curr = end[1] | ||||
curr[2] = end[1] = self.__map[key] = [key, curr, end] | ||||
dict.__setitem__(self, key, value) | ||||
def __delitem__(self, key): | ||||
dict.__delitem__(self, key) | ||||
key, prev, next = self.__map.pop(key) | ||||
prev[2] = next | ||||
next[1] = prev | ||||
def __iter__(self): | ||||
end = self.__end | ||||
curr = end[2] | ||||
while curr is not end: | ||||
yield curr[0] | ||||
curr = curr[2] | ||||
def __reversed__(self): | ||||
end = self.__end | ||||
curr = end[1] | ||||
while curr is not end: | ||||
yield curr[0] | ||||
curr = curr[1] | ||||
def popitem(self, last=True): | ||||
if not self: | ||||
raise KeyError('dictionary is empty') | ||||
if last: | ||||
key = reversed(self).next() | ||||
else: | ||||
key = iter(self).next() | ||||
value = self.pop(key) | ||||
return key, value | ||||
def __reduce__(self): | ||||
items = [[k, self[k]] for k in self] | ||||
tmp = self.__map, self.__end | ||||
del self.__map, self.__end | ||||
inst_dict = vars(self).copy() | ||||
self.__map, self.__end = tmp | ||||
if inst_dict: | ||||
return (self.__class__, (items,), inst_dict) | ||||
return self.__class__, (items,) | ||||
def keys(self): | ||||
return list(self) | ||||
setdefault = DictMixin.setdefault | ||||
update = DictMixin.update | ||||
pop = DictMixin.pop | ||||
values = DictMixin.values | ||||
items = DictMixin.items | ||||
iterkeys = DictMixin.iterkeys | ||||
itervalues = DictMixin.itervalues | ||||
iteritems = DictMixin.iteritems | ||||
def __repr__(self): | ||||
if not self: | ||||
return '%s()' % (self.__class__.__name__,) | ||||
return '%s(%r)' % (self.__class__.__name__, self.items()) | ||||
def copy(self): | ||||
return self.__class__(self) | ||||
@classmethod | ||||
def fromkeys(cls, iterable, value=None): | ||||
d = cls() | ||||
for key in iterable: | ||||
d[key] = value | ||||
return d | ||||
def __eq__(self, other): | ||||
if isinstance(other, OrderedDict): | ||||
return len(self) == len(other) and self.items() == other.items() | ||||
return dict.__eq__(self, other) | ||||
def __ne__(self, other): | ||||
return not self == other | ||||
r473 | ||||
r491 | ||||
#=============================================================================== | ||||
# TEST FUNCTIONS | ||||
#=============================================================================== | ||||
def create_test_index(repo_location, full_index): | ||||
"""Makes default test index | ||||
@param repo_location: | ||||
@param full_index: | ||||
""" | ||||
from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon | ||||
r497 | from pylons_app.lib.pidlock import DaemonLock, LockHeld | |||
r491 | from pylons_app.lib.indexers import IDX_LOCATION | |||
import shutil | ||||
if os.path.exists(IDX_LOCATION): | ||||
shutil.rmtree(IDX_LOCATION) | ||||
try: | ||||
l = DaemonLock() | ||||
WhooshIndexingDaemon(repo_location=repo_location)\ | ||||
.run(full_index=full_index) | ||||
l.release() | ||||
except LockHeld: | ||||
pass | ||||
def create_test_env(repos_test_path, config): | ||||
"""Makes a fresh database and | ||||
r473 | install test repository into tmp dir | |||
""" | ||||
r491 | from pylons_app.lib.db_manage import DbManage | |||
import tarfile | ||||
import shutil | ||||
from os.path import dirname as dn, join as jn, abspath | ||||
log = logging.getLogger('TestEnvCreator') | ||||
# create logger | ||||
log.setLevel(logging.DEBUG) | ||||
log.propagate = True | ||||
# create console handler and set level to debug | ||||
ch = logging.StreamHandler() | ||||
ch.setLevel(logging.DEBUG) | ||||
# create formatter | ||||
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") | ||||
r473 | ||||
r491 | # add formatter to ch | |||
ch.setFormatter(formatter) | ||||
# add ch to logger | ||||
log.addHandler(ch) | ||||
#PART ONE create db | ||||
log.debug('making test db') | ||||
dbname = config['sqlalchemy.db1.url'].split('/')[-1] | ||||
dbmanage = DbManage(log_sql=True, dbname=dbname, tests=True) | ||||
dbmanage.create_tables(override=True) | ||||
dbmanage.config_prompt(repos_test_path) | ||||
dbmanage.create_default_user() | ||||
dbmanage.admin_prompt() | ||||
dbmanage.create_permissions() | ||||
dbmanage.populate_default_permissions() | ||||
r473 | ||||
#PART TWO make test repo | ||||
r491 | log.debug('making test vcs repo') | |||
r473 | if os.path.isdir('/tmp/vcs_test'): | |||
shutil.rmtree('/tmp/vcs_test') | ||||
r491 | cur_dir = dn(dn(abspath(__file__))) | |||
r489 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test.tar.gz")) | |||
r473 | tar.extractall('/tmp') | |||
tar.close() | ||||