utils.py
248 lines
| 9.0 KiB
| text/x-python
|
PythonLexer
r733 | # RhodeCode VCSServer provides access to different vcs backends via network. | |||
r1126 | # Copyright (C) 2014-2023 RhodeCode GmbH | |||
r733 | # | |||
# This program is free software; you can redistribute it and/or modify | ||||
# it under the terms of the GNU General Public License as published by | ||||
# the Free Software Foundation; either version 3 of the License, or | ||||
# (at your option) any later version. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU General Public License | ||||
# along with this program; if not, write to the Free Software Foundation, | ||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||||
r1100 | import functools | |||
import logging | ||||
r733 | import os | |||
r1113 | import threading | |||
r961 | import time | |||
r1100 | ||||
r1045 | import decorator | |||
r733 | from dogpile.cache import CacheRegion | |||
r1127 | ||||
from vcsserver.utils import sha1 | ||||
r1060 | from vcsserver.str_utils import safe_bytes | |||
r1135 | from vcsserver.type_utils import str2bool # noqa :required by imports from .utils | |||
r1127 | ||||
from . import region_meta | ||||
r733 | ||||
log = logging.getLogger(__name__) | ||||
class RhodeCodeCacheRegion(CacheRegion): | ||||
r1113 | def __repr__(self): | |||
return f'{self.__class__}(name={self.name})' | ||||
r733 | def conditional_cache_on_arguments( | |||
self, namespace=None, | ||||
expiration_time=None, | ||||
should_cache_fn=None, | ||||
r1042 | to_str=str, | |||
r733 | function_key_generator=None, | |||
condition=True): | ||||
""" | ||||
Custom conditional decorator, that will not touch any dogpile internals if | ||||
r1100 | condition isn't meet. This works a bit different from should_cache_fn | |||
r733 | And it's faster in cases we don't ever want to compute cached values | |||
""" | ||||
r1042 | expiration_time_is_callable = callable(expiration_time) | |||
r1121 | if not namespace: | |||
namespace = getattr(self, '_default_namespace', None) | ||||
r733 | ||||
if function_key_generator is None: | ||||
function_key_generator = self.function_key_generator | ||||
r1121 | def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw): | |||
r751 | ||||
if not condition: | ||||
r1044 | log.debug('Calling un-cached method:%s', user_func.__name__) | |||
r958 | start = time.time() | |||
result = user_func(*arg, **kw) | ||||
total = time.time() - start | ||||
r1044 | log.debug('un-cached method:%s took %.4fs', user_func.__name__, total) | |||
r958 | return result | |||
r751 | ||||
r1121 | key = func_key_generator(*arg, **kw) | |||
r751 | ||||
timeout = expiration_time() if expiration_time_is_callable \ | ||||
else expiration_time | ||||
r1044 | log.debug('Calling cached method:`%s`', user_func.__name__) | |||
r751 | return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw)) | |||
def cache_decorator(user_func): | ||||
r1042 | if to_str is str: | |||
r733 | # backwards compatible | |||
r751 | key_generator = function_key_generator(namespace, user_func) | |||
r733 | else: | |||
r751 | key_generator = function_key_generator(namespace, user_func, to_str=to_str) | |||
r733 | ||||
r751 | def refresh(*arg, **kw): | |||
""" | ||||
Like invalidate, but regenerates the value instead | ||||
""" | ||||
key = key_generator(*arg, **kw) | ||||
value = user_func(*arg, **kw) | ||||
self.set(key, value) | ||||
return value | ||||
r733 | ||||
def invalidate(*arg, **kw): | ||||
key = key_generator(*arg, **kw) | ||||
self.delete(key) | ||||
def set_(value, *arg, **kw): | ||||
key = key_generator(*arg, **kw) | ||||
self.set(key, value) | ||||
def get(*arg, **kw): | ||||
key = key_generator(*arg, **kw) | ||||
return self.get(key) | ||||
r751 | user_func.set = set_ | |||
user_func.invalidate = invalidate | ||||
user_func.get = get | ||||
user_func.refresh = refresh | ||||
user_func.key_generator = key_generator | ||||
user_func.original = user_func | ||||
r733 | ||||
r751 | # Use `decorate` to preserve the signature of :param:`user_func`. | |||
r961 | return decorator.decorate(user_func, functools.partial( | |||
r751 | get_or_create_for_user_func, key_generator)) | |||
r733 | ||||
r751 | return cache_decorator | |||
r733 | ||||
def make_region(*arg, **kw): | ||||
return RhodeCodeCacheRegion(*arg, **kw) | ||||
def get_default_cache_settings(settings, prefixes=None): | ||||
prefixes = prefixes or [] | ||||
cache_settings = {} | ||||
for key in settings.keys(): | ||||
for prefix in prefixes: | ||||
if key.startswith(prefix): | ||||
name = key.split(prefix)[1].strip() | ||||
val = settings[key] | ||||
r1042 | if isinstance(val, str): | |||
r733 | val = val.strip() | |||
cache_settings[name] = val | ||||
return cache_settings | ||||
def compute_key_from_params(*args): | ||||
""" | ||||
Helper to compute key from given params to be used in cache manager | ||||
""" | ||||
r1048 | return sha1(safe_bytes("_".join(map(str, args)))) | |||
r733 | ||||
r1121 | def custom_key_generator(backend, namespace, fn): | |||
func_name = fn.__name__ | ||||
def generate_key(*args): | ||||
backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix' | ||||
namespace_pref = namespace or 'default_namespace' | ||||
arg_key = compute_key_from_params(*args) | ||||
final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}" | ||||
return final_key | ||||
return generate_key | ||||
r734 | def backend_key_generator(backend): | |||
""" | ||||
Special wrapper that also sends over the backend to the key generator | ||||
""" | ||||
def wrapper(namespace, fn): | ||||
r1121 | return custom_key_generator(backend, namespace, fn) | |||
r734 | return wrapper | |||
r1127 | def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False): | |||
from .backends import FileNamespaceBackend | ||||
from . import async_creation_runner | ||||
r1113 | ||||
r961 | region_obj = region_meta.dogpile_cache_regions.get(region_name) | |||
if not region_obj: | ||||
r1074 | reg_keys = list(region_meta.dogpile_cache_regions.keys()) | |||
r1130 | raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.') | |||
r961 | ||||
r1074 | region_uid_name = f'{region_name}:{region_namespace}' | |||
r1113 | ||||
r1135 | # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region | |||
r961 | if isinstance(region_obj.actual_backend, FileNamespaceBackend): | |||
r1113 | if not region_namespace: | |||
raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param') | ||||
r961 | region_exist = region_meta.dogpile_cache_regions.get(region_namespace) | |||
if region_exist: | ||||
log.debug('Using already configured region: %s', region_namespace) | ||||
return region_exist | ||||
r1113 | ||||
r961 | expiration_time = region_obj.expiration_time | |||
r1113 | cache_dir = region_meta.dogpile_config_defaults['cache_dir'] | |||
namespace_cache_dir = cache_dir | ||||
# we default the namespace_cache_dir to our default cache dir. | ||||
r1135 | # however, if this backend is configured with filename= param, we prioritize that | |||
r1113 | # so all caches within that particular region, even those namespaced end up in the same path | |||
if region_obj.actual_backend.filename: | ||||
namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename) | ||||
if not os.path.isdir(namespace_cache_dir): | ||||
os.makedirs(namespace_cache_dir) | ||||
r961 | new_region = make_region( | |||
name=region_uid_name, | ||||
function_key_generator=backend_key_generator(region_obj.actual_backend) | ||||
) | ||||
r1113 | ||||
r961 | namespace_filename = os.path.join( | |||
r1113 | namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db") | |||
r961 | # special type that allows 1db per namespace | |||
new_region.configure( | ||||
backend='dogpile.cache.rc.file_namespace', | ||||
expiration_time=expiration_time, | ||||
arguments={"filename": namespace_filename} | ||||
) | ||||
# create and save in region caches | ||||
log.debug('configuring new region: %s', region_uid_name) | ||||
region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region | ||||
r1121 | region_obj._default_namespace = region_namespace | |||
r1127 | if use_async_runner: | |||
region_obj.async_creation_runner = async_creation_runner | ||||
r961 | return region_obj | |||
r1121 | def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str): | |||
from . import CLEAR_DELETE, CLEAR_INVALIDATE | ||||
r1113 | if not isinstance(cache_region, RhodeCodeCacheRegion): | |||
cache_region = get_or_create_region(cache_region, cache_namespace_uid) | ||||
r1121 | log.debug('clearing cache region: %s with method=%s', cache_region, method) | |||
r1113 | ||||
r1121 | num_affected_keys = None | |||
if method == CLEAR_INVALIDATE: | ||||
r1113 | # NOTE: The CacheRegion.invalidate() method’s default mode of | |||
# operation is to set a timestamp local to this CacheRegion in this Python process only. | ||||
# It does not impact other Python processes or regions as the timestamp is only stored locally in memory. | ||||
r1121 | cache_region.invalidate(hard=True) | |||
if method == CLEAR_DELETE: | ||||
cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid) | ||||
num_affected_keys = len(cache_keys) | ||||
if num_affected_keys: | ||||
r1113 | cache_region.delete_multi(cache_keys) | |||
r1121 | ||||
return num_affected_keys | ||||