##// END OF EJS Templates
vcsserver: modernize code for python3
vcsserver: modernize code for python3

File last commit:

r1130:d68a72e0 python3
r1130:d68a72e0 python3
Show More
utils.py
247 lines | 8.9 KiB | text/x-python | PythonLexer
caches: added redis backend as an option
r733 # RhodeCode VCSServer provides access to different vcs backends via network.
source-code: updated copyrights to 2023
r1126 # Copyright (C) 2014-2023 RhodeCode GmbH
caches: added redis backend as an option
r733 #
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
ruff: code-cleanups
r1100 import functools
import logging
caches: added redis backend as an option
r733 import os
caches: synced with CE code
r1113 import threading
caches: allow regional per repo caches, and invalidate caches via a remote call.
r961 import time
ruff: code-cleanups
r1100
py3: fixes for python3
r1045 import decorator
caches: added redis backend as an option
r733 from dogpile.cache import CacheRegion
caches: synced with CE changes
r1127
from vcsserver.utils import sha1
packages: move the str utils to it's own module
r1060 from vcsserver.str_utils import safe_bytes
caches: synced with CE code
r1113 from vcsserver.type_utils import str2bool
caches: synced with CE changes
r1127
from . import region_meta
caches: added redis backend as an option
r733
log = logging.getLogger(__name__)
class RhodeCodeCacheRegion(CacheRegion):
caches: synced with CE code
r1113 def __repr__(self):
return f'{self.__class__}(name={self.name})'
caches: added redis backend as an option
r733 def conditional_cache_on_arguments(
self, namespace=None,
expiration_time=None,
should_cache_fn=None,
py3: compat and code fixes
r1042 to_str=str,
caches: added redis backend as an option
r733 function_key_generator=None,
condition=True):
"""
Custom conditional decorator, that will not touch any dogpile internals if
ruff: code-cleanups
r1100 condition isn't meet. This works a bit different from should_cache_fn
caches: added redis backend as an option
r733 And it's faster in cases we don't ever want to compute cached values
"""
py3: compat and code fixes
r1042 expiration_time_is_callable = callable(expiration_time)
caches: new cache + archive cache implementation
r1121 if not namespace:
namespace = getattr(self, '_default_namespace', None)
caches: added redis backend as an option
r733
if function_key_generator is None:
function_key_generator = self.function_key_generator
caches: new cache + archive cache implementation
r1121 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
cache: updated cache decorators based on latest code from dogpile
r751
if not condition:
py3: 2to3 run
r1044 log.debug('Calling un-cached method:%s', user_func.__name__)
caches: updated logging and some timings
r958 start = time.time()
result = user_func(*arg, **kw)
total = time.time() - start
py3: 2to3 run
r1044 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
caches: updated logging and some timings
r958 return result
cache: updated cache decorators based on latest code from dogpile
r751
caches: new cache + archive cache implementation
r1121 key = func_key_generator(*arg, **kw)
cache: updated cache decorators based on latest code from dogpile
r751
timeout = expiration_time() if expiration_time_is_callable \
else expiration_time
py3: 2to3 run
r1044 log.debug('Calling cached method:`%s`', user_func.__name__)
cache: updated cache decorators based on latest code from dogpile
r751 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
def cache_decorator(user_func):
py3: compat and code fixes
r1042 if to_str is str:
caches: added redis backend as an option
r733 # backwards compatible
cache: updated cache decorators based on latest code from dogpile
r751 key_generator = function_key_generator(namespace, user_func)
caches: added redis backend as an option
r733 else:
cache: updated cache decorators based on latest code from dogpile
r751 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
caches: added redis backend as an option
r733
cache: updated cache decorators based on latest code from dogpile
r751 def refresh(*arg, **kw):
"""
Like invalidate, but regenerates the value instead
"""
key = key_generator(*arg, **kw)
value = user_func(*arg, **kw)
self.set(key, value)
return value
caches: added redis backend as an option
r733
def invalidate(*arg, **kw):
key = key_generator(*arg, **kw)
self.delete(key)
def set_(value, *arg, **kw):
key = key_generator(*arg, **kw)
self.set(key, value)
def get(*arg, **kw):
key = key_generator(*arg, **kw)
return self.get(key)
cache: updated cache decorators based on latest code from dogpile
r751 user_func.set = set_
user_func.invalidate = invalidate
user_func.get = get
user_func.refresh = refresh
user_func.key_generator = key_generator
user_func.original = user_func
caches: added redis backend as an option
r733
cache: updated cache decorators based on latest code from dogpile
r751 # Use `decorate` to preserve the signature of :param:`user_func`.
caches: allow regional per repo caches, and invalidate caches via a remote call.
r961 return decorator.decorate(user_func, functools.partial(
cache: updated cache decorators based on latest code from dogpile
r751 get_or_create_for_user_func, key_generator))
caches: added redis backend as an option
r733
cache: updated cache decorators based on latest code from dogpile
r751 return cache_decorator
caches: added redis backend as an option
r733
def make_region(*arg, **kw):
return RhodeCodeCacheRegion(*arg, **kw)
def get_default_cache_settings(settings, prefixes=None):
prefixes = prefixes or []
cache_settings = {}
for key in settings.keys():
for prefix in prefixes:
if key.startswith(prefix):
name = key.split(prefix)[1].strip()
val = settings[key]
py3: compat and code fixes
r1042 if isinstance(val, str):
caches: added redis backend as an option
r733 val = val.strip()
cache_settings[name] = val
return cache_settings
def compute_key_from_params(*args):
"""
Helper to compute key from given params to be used in cache manager
"""
python3: code change for py3 support...
r1048 return sha1(safe_bytes("_".join(map(str, args))))
caches: added redis backend as an option
r733
caches: new cache + archive cache implementation
r1121 def custom_key_generator(backend, namespace, fn):
func_name = fn.__name__
def generate_key(*args):
backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
namespace_pref = namespace or 'default_namespace'
arg_key = compute_key_from_params(*args)
final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
return final_key
return generate_key
caches: don't use key_manglers instead prefix keys based on backend.
r734 def backend_key_generator(backend):
"""
Special wrapper that also sends over the backend to the key generator
"""
def wrapper(namespace, fn):
caches: new cache + archive cache implementation
r1121 return custom_key_generator(backend, namespace, fn)
caches: don't use key_manglers instead prefix keys based on backend.
r734 return wrapper
caches: synced with CE changes
r1127 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
from .backends import FileNamespaceBackend
from . import async_creation_runner
caches: synced with CE code
r1113
caches: allow regional per repo caches, and invalidate caches via a remote call.
r961 region_obj = region_meta.dogpile_cache_regions.get(region_name)
if not region_obj:
scm: added md5 methods to be calculated on vcsserver instead of RhodeCode side
r1074 reg_keys = list(region_meta.dogpile_cache_regions.keys())
vcsserver: modernize code for python3
r1130 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
caches: allow regional per repo caches, and invalidate caches via a remote call.
r961
scm: added md5 methods to be calculated on vcsserver instead of RhodeCode side
r1074 region_uid_name = f'{region_name}:{region_namespace}'
caches: synced with CE code
r1113
caches: allow regional per repo caches, and invalidate caches via a remote call.
r961 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
caches: synced with CE code
r1113 if not region_namespace:
raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
caches: allow regional per repo caches, and invalidate caches via a remote call.
r961 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
if region_exist:
log.debug('Using already configured region: %s', region_namespace)
return region_exist
caches: synced with CE code
r1113
caches: allow regional per repo caches, and invalidate caches via a remote call.
r961 expiration_time = region_obj.expiration_time
caches: synced with CE code
r1113 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
namespace_cache_dir = cache_dir
# we default the namespace_cache_dir to our default cache dir.
# however if this backend is configured with filename= param, we prioritize that
# so all caches within that particular region, even those namespaced end up in the same path
if region_obj.actual_backend.filename:
namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
if not os.path.isdir(namespace_cache_dir):
os.makedirs(namespace_cache_dir)
caches: allow regional per repo caches, and invalidate caches via a remote call.
r961 new_region = make_region(
name=region_uid_name,
function_key_generator=backend_key_generator(region_obj.actual_backend)
)
caches: synced with CE code
r1113
caches: allow regional per repo caches, and invalidate caches via a remote call.
r961 namespace_filename = os.path.join(
caches: synced with CE code
r1113 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
caches: allow regional per repo caches, and invalidate caches via a remote call.
r961 # special type that allows 1db per namespace
new_region.configure(
backend='dogpile.cache.rc.file_namespace',
expiration_time=expiration_time,
arguments={"filename": namespace_filename}
)
# create and save in region caches
log.debug('configuring new region: %s', region_uid_name)
region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
caches: new cache + archive cache implementation
r1121 region_obj._default_namespace = region_namespace
caches: synced with CE changes
r1127 if use_async_runner:
region_obj.async_creation_runner = async_creation_runner
caches: allow regional per repo caches, and invalidate caches via a remote call.
r961 return region_obj
caches: new cache + archive cache implementation
r1121 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
from . import CLEAR_DELETE, CLEAR_INVALIDATE
caches: synced with CE code
r1113 if not isinstance(cache_region, RhodeCodeCacheRegion):
cache_region = get_or_create_region(cache_region, cache_namespace_uid)
caches: new cache + archive cache implementation
r1121 log.debug('clearing cache region: %s with method=%s', cache_region, method)
caches: synced with CE code
r1113
caches: new cache + archive cache implementation
r1121 num_affected_keys = None
if method == CLEAR_INVALIDATE:
caches: synced with CE code
r1113 # NOTE: The CacheRegion.invalidate() method’s default mode of
# operation is to set a timestamp local to this CacheRegion in this Python process only.
# It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
caches: new cache + archive cache implementation
r1121 cache_region.invalidate(hard=True)
if method == CLEAR_DELETE:
cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
num_affected_keys = len(cache_keys)
if num_affected_keys:
caches: synced with CE code
r1113 cache_region.delete_multi(cache_keys)
caches: new cache + archive cache implementation
r1121
return num_affected_keys